<span style= "font-family:arial, Helvetica, Sans-serif; Background-color:rgb (255, 255, 255); " > Front Blog about Linux under the compiler ffmpeg</span>
So how do I use it after compiling?
In the reference to the FFmpeg decoding file demo here gives a decoding of so library JNI implementation method
After compiling the ffmpeg source, create a new project such as directory structure
Copy the above header file in the Include folder in the FFmpeg compiled source, then copy the compiled so library to the prebuilt folder
New ANDROID.MK
The contents are as follows:
Local_path: = $ (call My-dir) include $ (clear_vars) Local_module: = avutil-54-prebuiltlocal_src_files: = prebuilt/ Libavutil-54.soinclude $ (prebuilt_shared_library) include $ (clear_vars) Local_module: = avswresample-1-prebuiltlocal _src_files: = Prebuilt/libswresample-1.soinclude $ (prebuilt_shared_library) include $ (clear_vars) LOCAL_MODULE: = Swscale-3-prebuiltlocal_src_files: = Prebuilt/libswscale-3.soinclude $ (prebuilt_shared_library) include $ (CLEAR_ VARS) Local_module: = avcodec-56-prebuiltlocal_src_files: = Prebuilt/libavcodec-56.soinclude $ (PREBUILT_SHARED_ LIBRARY) #include $ (clear_vars) #LOCAL_MODULE: = avdevice-56-prebuilt#local_src_files: = prebuilt/libavdevice-56.so# Include $ (prebuilt_shared_library) include $ (clear_vars) Local_module: = Avformat-56-prebuiltlocal_src_files: = Prebuilt/libavformat-56.soinclude $ (prebuilt_shared_library) include $ (clear_vars) Local_module: = Avfilter-5-prebuiltlocal_src_files: = Prebuilt/libavfilter-5.soinclude $ (prebuilt_shared_library) include $ (CLEAR_ VARS)Local_module: = libffmpegutillocal_src_files: = ffmpeg.clocal_ldlibs: =-llog-ljnigraphics-lz-landroid-lm-pthreadloc Al_shared_libraries: = avcodec-56-prebuilt avdevice-56-prebuilt avfilter-5-prebuilt avformat-56-prebuilt Avutil-54-prebuilt avswresample-1-prebuilt swscale-3-prebuilt include $ (build_shared_library)
New APPLICATION.MK content:
App_abi: = armeabi-v7aapp_platform: = android-9
New Interface Ffmpeg.java:
Package Com.android;import Android.util.log;public class FFmpeg {static {try {system.loadlibrary ("avutil-54"); System.loadlibrary ("swresample-1"); System.loadlibrary ("swscale-3"); System.loadlibrary ("avcodec-56"); System.loadlibrary ("avformat-56"); System.loadlibrary ("avfilter-5"); System.loadlibrary ("Ffmpegutils");} catch (Unsatisfiedlinkerror ule) {log.d ("FFMPEG", Ule.getmessage ());}} public native int h264decoderinit (int width, int height);p ublic native int h264decoderrelease ();p ublic native int H264deco De (byte[] in, Int. Insize, byte[] out);p ublic native int getffmpegversion ();}
To generate a header file:
Navigate to the Ffmpeg.class directory, run Javah-jni com.android.FFmpeg.class generate com_android_ffmpeg.h in cmd
New interface Implementation Class FFMPEG.C:
#include <math.h> #include <libavutil/opt.h> #include <libavcodec/avcodec.h> #include <libavutil /channel_layout.h> #include <libavutil/common.h> #include <libavutil/imgutils.h> #include < libavutil/mathematics.h> #include <libavutil/samplefmt.h> #include <android/log.h> #include "com_ Android_ffmpeg.h "#define LOG_TAG" H264ANDROID.C "#define LOGD (...) __android_log_print (android_log_debug,log_tag,__ va_args__) #ifdef __cplusplusextern "C" {#endif//videostruct avcodeccontext *pavcodecctx = null;struct AVCodec *pAVCodec ; struct Avpacket mavpacket;struct avframe *pavframe = null;//audiostruct Avcodeccontext *paucodecctx = NULL;struct AVCode C *paucodec;struct avpacket maupacket;struct avframe *pauframe = null;int iwidth = 0;int IHeight = 0;int *colortab = NULL; int *u_b_tab = Null;int *u_g_tab = null;int *v_g_tab = null;int *v_r_tab = null;//short *tmp_pic=null;unsigned int *rgb_2_ PIX = null;unsigned int *r_2_pix = null;unsigned int *g_2_pix = Null;unsigned int *b_2_pix = null;void Deleteyuvtab () {//av_free (tmp_pic); Av_free (Colortab); Av_free (Rgb_2_pix);} void Createyuvtab_16 () {int I;int u, v;//tmp_pic = (short*) av_malloc (iwidth*iheight*2);//Ke if 挸 mtr? iwidth * iheight * 16bits Colortab = (int *) Av_malloc (4 * n * sizeof (int)); u_b_tab = &colortab[0 * 256];u_g_tab = &colortab[1 * 256];v_g_ tab = &colortab[2 * 256];v_r_tab = &colortab[3 * 256];for (i = 0; i <; i++) {u = v = (i-128); U_b_tab[i] = (int) (1.772 * u); u_g_tab[i] = (int) (0.34414 * u); v_g_tab[i] = (int) (0.71414 * v); V_r_tab[i] = (int) (1.402 * v);} Rgb_2_pix = (unsigned int *) Av_malloc (3 * 768 * sizeof (unsigned int)); R_2_pix = &rgb_2_pix[0 * 768];g_2_pix = &rg b_2_pix[1 * 768];b_2_pix = &rgb_2_pix[2 * 768];for (i = 0; i <; i++) {r_2_pix[i] = 0;g_2_pix[i] = 0;b_2_pix[i] = 0;} for (i = 0; i < i++) {r_2_pix[i + n] = (I & 0xF8) << 8;g_2_pix[i + n] = (I & 0xFC) << 3;b_ 2_pix[i + [] = (i) >> 3;} for (i = 0; I < 256; i++) {r_2_pix[i + +] = 0xF8 << 8;g_2_pix[i + +] = 0xFC << 3;b_2_pix[i + +] = 0x1F;} R_2_pix + = 256;g_2_pix + 256;b_2_pix + = 256;} void displayyuv_16 (unsigned int *pdst1, unsigned char *y, unsigned char *u,unsigned char *v, int width, int height, int sr C_ystride,int src_uvstride, int dst_ystride) {int I, J;int R, G, B, Rgb;int yy, UB, UG, VG, vr;unsigned char* Yoff;unsigne D char* uoff;unsigned char* voff;unsigned int* pdst = pdst1;int Width2 = Width/2;int height2 = height/2;if (width2 ; IWIDTH/2) {width2 = Iwidth/2;y + = (width-iwidth)/4 * 2;u + = (width-iwidth)/4;v + = (width-iwidth)/4;} if (Height2 > iheight) height2 = iheight;for (j = 0; J < Height2; J + +) {Yoff = y + J * 2 * Src_ystride;uoff = U + J * Src_uvstride;voff = v + J * src_uvstride;for (i = 0; i < width2; i++) {yy = * (Yoff + (i << 1)); UB = u_b_tab[* (UO FF + i)];ug = u_g_tab[* (Uoff + i)];VG = v_g_tab[* (Voff + i)];VR = v_r_tab[* (Voff + i)];b = yy + Ub;g = Yy-ug-Vg;r = yy + Vr;rgb = R_2_pix[r] + g_2_pix[g] + b_2_pix[b];yy = * (Yoff + (i << 1) + 1); b = yy + Ub;g = YY-UG-VG ; r = yy + vr;pdst[(J * dst_ystride + i)] = (RGB) + ((R_2_pix[r] + g_2_pix[g] + b_2_pix[b]) << yy = * (Yoff + (i &L t;< 1) + src_ystride); b = yy + ub;g = Yy-ug-vg;r = yy + Vr;rgb = R_2_pix[r] + g_2_pix[g] + b_2_pix[b];yy = * (Yoff + (I << 1) + src_ystride + 1); b = yy + ub;g = Yy-ug-vg;r = yy + vr;pdst[((2 * j + 1) * dst_ystride + i * 2) >& Gt 1] = (RGB) + ((R_2_pix[r] + g_2_pix[g] + b_2_pix[b]) << 16);}}} /* * Class:ffmpeg * method:h264decoderinit * Signature: (II) I */jniexport jint Jnicall java_com_android_ffmpeg_h2 64DecoderInit (JNIENV * env, Jobject jobj, jint width, jint height) {iwidth = Width;iheight = Height;if (pavcodecctx! = NUL L) {avcodec_close (pavcodecctx);p avcodecctx = NULL;} if (pavframe! = null) {Av_free (pavframe);p avframe = null;} Register all formats and Codecsav_register_all (); LOGD ("Avcodec Register Success");//Codec_id_pcm_alawpavcodec = Avcodec_find_decoder (av_codec_id_h264), if (Pavcodec = = NULL) return-1;//init Avcodeccontextpavcodecctx = Avcodec_alloc_context3 (Pavcodec), if (Pavcodecctx = = NULL) return-1;/* We do not send complete Frames */if (pavcodec->capabilities & codec_cap_truncated) pavcodecctx->flags |= codec_flag_truncated; /* We don't send complete frames *//* open it */if (Avcodec_open2 (Pavcodecctx, Pavcodec, NULL) < 0) return Avcodec_open 2 (Pavcodecctx, Pavcodec, NULL), Av_init_packet (&mavpacket);p avframe = Av_frame_alloc (); if (pavframe = = NULL) return -1;//pimageconvertctx = Sws_getcontext (Pavcodecctx->width, Pavcodecctx->height, PIX_FMT_YUV420P, pAVCodecCtx- >width, Pavcodecctx->height,pix_fmt_rgb565le, sws_bicubic, NULL, NULL, NULL);//LOGD ("Sws_getcontext return =%d" , Pimageconvertctx); LOGD ("Avcodec context success"); Createyuvtab_16 (); LOGD ("Create YUV Table Success"); return 1;} /* * Class:com_android_concox_ffmpeg * method:h264decoderrelease * Signature: () I */jniexport jint jnicall java_com_android_ffmpeg_h264decoderrelease (jnienv * env, Jobject jobj) {if (PAV Codecctx! = null) {avcodec_close (pavcodecctx);p avcodecctx = null;} if (pavframe! = null) {Av_free (pavframe);p avframe = null;} Deleteyuvtab (); return 1;} /* * Class:com_android_concox_ffmpeg * Method:h264decode * Signature: ([bi[b) I */jniexport jint Jnicall java_com_ Android_ffmpeg_h264decode (jnienv* env,jobject thiz, Jbytearray in, Jint inbuf_size, jbytearray out) {int I;jbyte * Inbuf = (jbyte*) (*env)->getbytearrayelements (env, in, 0); Jbyte * picture = (jbyte*) (*env)->getbytearrayelements (env, out, 0); av_ Frame_unref (pavframe); mavpacket.data = Inbuf;mavpacket.size = Inbuf_size; LOGD ("mavpacket.size:%d\n", mavpacket.size); int len =-1, got_picture = 0;len = Avcodec_decode_video2 (Pavcodecctx, PAVFr Ame, &got_picture,&mavpacket); LOGD ("len:%d\n", Len), if (Len < 0) {logd ("Len=-1,decode error"); return Len; if (Got_picture > 0) {logd ("Got Picture");/*pimageconvertctx = Sws_getcontext (Pavcodecctx->width, Pavcodecctx->height, PAVCODECCTX->PIX_FMT, Pavcodecctx->width, Pavcodecctx->height, Pix_fmt_rgb565le, sws_bicubic, NULL, NULL, NULL); Sws_scale (Pimageconvertctx, Pavframe->data, pavframe->linesize,0, Pavcodecctx->height, PAVFrame->data, Pavframe->linesize); */displayyuv_16 (int*) picture, pavframe->data[0], pavframe->data[1],pavframe->data[2], pAVCodecCtx-> width, pavcodecctx->height,pavframe->linesize[0], pavframe->linesize[1], iwidth);} ELSELOGD ("GOT picture fail");(*env)->releasebytearrayelements (env, in, Inbuf, 0);(*env), Releasebytearrayelements (env, out, picture, 0); return len;} /* * Class:com_android_concox_ffmpeg * method:getffmpegversion * Signature: () I */jniexport jint Jnicall java_com _android_concox_ffmpeg_getffmpegversion (JNIENV * env, Jobject jobj) {return avcodec_version ();} #ifdef __cplusplus} #endif
Configure Ndk,build in eclipse to generate FFMPEGUITL library files.
The use of FFmpeg decoding library in Android