Previously I was doing research on h264 encoding. After two days, I found that the h264 encoding in ffmpeg seems to be dependent on the third-party library x264, or how can it be simple? So I just integrated the mpeg encoding. there is an ffmpeg decoding that has given the specific link in this http://blog.csdn.net/hclydao/article/details/18546757
How to Use It has also been said here mainly through ffmpeg yuv422 format into rgb and then yuv422 into mpeg format followed by the previous articles to obtain yuv422 data, in order to display it first the data interface function for converting to rgb565 is as follows:
/** yuv to rgb*/JNIEXPORT jint JNICALL Java_com_hclydao_webcam_Ffmpeg_yuvtorgb(JNIEnv * env, jclass obj,const jbyteArray yuvdata, jbyteArray rgbdata,const jint dwidth,const jint dheight){jbyte *ydata = (jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);jbyte *rdata = (jbyte*)(*env)->GetByteArrayElements(env, rgbdata, 0);AVFrame * rpicture=NULL;AVFrame * ypicture=NULL;struct SwsContext *swsctx = NULL;rpicture=avcodec_alloc_frame();ypicture=avcodec_alloc_frame();avpicture_fill((AVPicture *) rpicture, (uint8_t *)rdata, PIX_FMT_RGB565,dwidth,dheight);avpicture_fill((AVPicture *) ypicture, (uint8_t *)ydata, AV_PIX_FMT_YUYV422,mwidth,mheight);swsctx = sws_getContext(mwidth,mheight, AV_PIX_FMT_YUYV422,dwidth, dheight,PIX_FMT_RGB565, SWS_BICUBIC, NULL, NULL, NULL);sws_scale(swsctx,(const uint8_t* const*)ypicture->data,ypicture->linesize,0,mheight,rpicture->data,rpicture->linesize);sws_freeContext(swsctx);av_free(rpicture);av_free(ypicture);(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);(*env)->ReleaseByteArrayElements(env, rgbdata, rdata, 0);return 0;}
Then, the mpeg code is online. ffmpeg can only encode yuv420p. Therefore, convert yuv422 to yuv420p first, and then encode the related interface functions as follows:
AVCodecContext *pCodecCtx= NULL;AVPacket avpkt;FILE * video_file;unsigned char *outbuf=NULL;unsigned char *yuv420buf=NULL;static int outsize=0;/** encording init*/JNIEXPORT jint JNICALL Java_com_hclydao_webcam_Ffmpeg_videoinit(JNIEnv * env, jclass obj,jbyteArray filename){LOGI("%s\n",__func__);AVCodec * pCodec=NULL;avcodec_register_all();pCodec=avcodec_find_encoder(AV_CODEC_ID_MPEG1VIDEO);if(pCodec == NULL) {LOGE("++++++++++++codec not found\n");return -1;} pCodecCtx=avcodec_alloc_context3(pCodec); if (pCodecCtx == NULL) { LOGE("++++++Could not allocate video codec context\n"); return -1; } /* put sample parameters */ pCodecCtx->bit_rate = 400000; /* resolution must be a multiple of two */ pCodecCtx->width = mwidth; pCodecCtx->height = mheight; /* frames per second */ pCodecCtx->time_base= (AVRational){1,25}; pCodecCtx->gop_size = 10; /* emit one intra frame every ten frames */ pCodecCtx->max_b_frames=1; pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;//AV_PIX_FMT_YUYV422; /* open it */ if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { LOGE("+++++++Could not open codec\n"); return -1; }outsize = mwidth * mheight*2;outbuf = malloc(outsize*sizeof(char));yuv420buf = malloc(outsize*sizeof(char));jbyte *filedir = (jbyte*)(*env)->GetByteArrayElements(env, filename, 0); if ((video_file = fopen(filedir, "wb")) == NULL) { LOGE("++++++++++++open %s failed\n",filedir); return -1; }(*env)->ReleaseByteArrayElements(env, filename, filedir, 0);return 1;}JNIEXPORT jint JNICALL Java_com_hclydao_webcam_Ffmpeg_videostart(JNIEnv * env, jclass obj,jbyteArray yuvdata){int frameFinished=0,size=0;jbyte *ydata = (jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);AVFrame * yuv420pframe=NULL;AVFrame * yuv422frame=NULL;struct SwsContext *swsctx = NULL;yuv420pframe=avcodec_alloc_frame();yuv422frame=avcodec_alloc_frame();avpicture_fill((AVPicture *) yuv420pframe, (uint8_t *)yuv420buf, AV_PIX_FMT_YUV420P,mwidth,mheight);avpicture_fill((AVPicture *) yuv422frame, (uint8_t *)ydata, AV_PIX_FMT_YUYV422,mwidth,mheight);swsctx = sws_getContext(mwidth,mheight, AV_PIX_FMT_YUYV422,mwidth, mheight,AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);sws_scale(swsctx,(const uint8_t* const*)yuv422frame->data,yuv422frame->linesize,0,mheight,yuv420pframe->data,yuv420pframe->linesize);av_init_packet(&avpkt);size = avcodec_encode_video2(pCodecCtx, &avpkt, yuv420pframe, &frameFinished); if (size < 0) { LOGE("+++++Error encoding frame\n"); return -1; }if(frameFinished)fwrite(avpkt.data,avpkt.size,1,video_file);av_free_packet(&avpkt);sws_freeContext(swsctx);av_free(yuv420pframe);av_free(yuv422frame);(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);}JNIEXPORT jint JNICALL Java_com_hclydao_webcam_Ffmpeg_videoclose(JNIEnv * env, jclass obj){fclose(video_file);avcodec_close(pCodecCtx);av_free(pCodecCtx);free(outbuf);}
The last recorded video can be played. It seems that the Code is not correct. There are always some processes and principles. It's easy to figure out other things.
Below is what I recorded. This is the current camera.
Please go to my resources to find the download link. I uploaded it, but it didn't show that there were 20 mb of it.
Many of the bugs have not been written seriously. If there is a problem with the principle and process, I hope you can point out