admin 管理员组文章数量: 1184232
java
camera.setPreviewCallback(new PreviewCallback()
{
@Override
public void onPreviewFrame(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
if(mIsStartPre == true)
{
videostart(arg0);
}
}
});
对每一帧数据都进行编码
JNI接口:
三个接口,一个初始化接口,一个编码接口,一个结束释放接口
AVCodecContext *pCodecCtx= NULL;
AVPacket avpkt;
FILE * video_file;
unsigned char *outbuf=NULL;
unsigned char *yuv420buf=NULL;
AVFrame * yuv420pframe = NULL;
static int outsize=0;
static int mwidth = 480;
static int mheight = 272;
int count = 0;
/*
* encording init
*/
JNIEXPORT jint JNICALL Java_com_hua_cameraandroidtest_MainActivity_videoinit(JNIEnv * env, jclass obj,jbyteArray filename)
{
LOGI("%s\n",__func__);
AVCodec * pCodec=NULL;
avcodec_register_all();
pCodec=avcodec_find_encoder(AV_CODEC_ID_MPEG4); //AV_CODEC_ID_H264//AV_CODEC_ID_MPEG1VIDEO
if(pCodec == NULL) {
LOGE("++++++++++++codec not found\n");
return -1;
}
pCodecCtx=avcodec_alloc_context3(pCodec);
if (pCodecCtx == NULL) {
LOGE("++++++Could not allocate video codec context\n");
return -1;
}
/* put sample parameters */
pCodecCtx->bit_rate = 400000;
/* resolution must be a multiple of two */
pCodecCtx->width = mwidth;
pCodecCtx->height = mheight;
/* frames per second */
pCodecCtx->time_base= (AVRational){1,25};
pCodecCtx->gop_size = 10; /* emit one intra frame every ten frames */
pCodecCtx->max_b_frames=1;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;//AV_PIX_FMT_YUYV422;
/* open it */
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("+++++++Could not open codec\n");
return -1;
}
outsize = mwidth * mheight*2;
outbuf = malloc(outsize*sizeof(char));
yuv420buf = malloc(outsize*sizeof(char));
jbyte *filedir = (jbyte*)(*env)->GetByteArrayElements(env, filename, 0);
if ((video_file = fopen(filedir, "wb")) == NULL) {
LOGE("++++++++++++open %s failed\n",filedir);
return -1;
}
(*env)->ReleaseByteArrayElements(env, filename, filedir, 0);
return 1;
}
JNIEXPORT jint JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart(JNIEnv * env, jclass obj,jbyteArray yuvdata)
{
int frameFinished=0,size=0;
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart1");
jbyte *ydata = (jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart2");
yuv420pframe=NULL;
//AVFrame * yuv422frame=NULL;
//struct SwsContext *swsctx = NULL;
av_init_packet(&avpkt);
avpkt.data = NULL; // packet data will be allocated by the encoder
avpkt.size = 0;
yuv420pframe=avcodec_alloc_frame();
//yuv422frame=avcodec_alloc_frame();
avpicture_fill((AVPicture *) yuv420pframe, (uint8_t *)yuv420buf, AV_PIX_FMT_YUV420P,mwidth,mheight);
yuv420pframe->pts = count;
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart3");
//avpicture_fill((AVPicture *) yuv422frame, (uint8_t *)ydata, AV_PIX_FMT_YUYV422,mwidth,mheight);
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart4");
//swsctx = sws_getContext(mwidth,mheight, AV_PIX_FMT_YUYV422, mwidth, mheight,AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart5");
//sws_scale(swsctx,(const uint8_t* const*)yuv422frame->data,yuv422frame->linesize,0,mheight,yuv420pframe->data,yuv420pframe->linesize);
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart6");
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart7");
size = avcodec_encode_video2(pCodecCtx, &avpkt, yuv420pframe, &frameFinished);
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart8");
count++;
if (size < 0) {
LOGE("+++++Error encoding frame\n");
return -1;
}
if(frameFinished)
fwrite(avpkt.data,1,avpkt.size,video_file);
LOGE("JNICALL Java_com_hua_cameraandroidtest_MainActivity_videostart9");
av_free_packet(&avpkt);
//sws_freeContext(swsctx);
av_free(yuv420pframe);
//av_free(yuv422frame);
(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);
}
JNIEXPORT jint JNICALL Java_com_hua_cameraandroidtest_MainActivity_videoclose(JNIEnv * env, jclass obj)
{
fclose(video_file);
avcodec_close(pCodecCtx);
av_free(pCodecCtx);
av_freep(&yuv420pframe->data[0]);
av_frame_free(&yuv420pframe);
free(outbuf);
}
求大神分析一下,为什么会出现这样的情况。
版权声明:本文标题:FFmpeg 在 Android 上处理 H264 文件时,为什么出现偏绿色号的现象? 内容由网友自发贡献,该文观点仅代表作者本人, 转载请联系作者并注明出处:http://www.roclinux.cn/p/1772205556a3553310.html, 本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌抄袭侵权/违法违规的内容,一经查实,本站将立刻删除。
发表评论