在續系列文章在32位的Ubuntu 11.04中為Android NDK r6編譯FFmpeg0.8.1版-Android中使用FFmpeg媒體庫(一)和在Android中通過jni方式使用編譯好的FFmpeg庫-Android中使用FFmpeg媒體庫(二)文章後,本文將根據github中churnlabs的一個開源項目,來深入展開說明如何使用FFmpeg庫進行多媒體的開發。
本文中的代碼來自於https://github.com/churnlabs/android-ffmpeg-sample,更多的可以參考這個項目代碼。我會在代碼中加一些自己的注釋。感謝作者churnlabs給我們提供這麼好的例子以供我們學習。
在Android的一些系統層應用開發大多數是採用jni的方式調用,另外對於一些比較吃CPU或者處理邏輯比較複雜的程式,也可以考慮使用jni方式來封裝。可以提高程式的執行效率。
本文涉及到以下幾個方面:
1 將3gp檔案push到類比機器的sdcard中
2 寫jni代碼,內部調用ffmpeg庫的方法,編譯jni庫
3 loadLibrary產生的庫,然後撰寫相應的java代碼
4 執行程式,並查看最終運行結果。
最終程式的顯示效果如下:
1 使用eclipse的DDMS工具,將vid.3pg push到sdcard中
2 撰寫相應的jni檔案
/* * Copyright 2011 - Churn Labs, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * This is mostly based off of the FFMPEG tutorial: * http://dranger.com/ffmpeg/ * With a few updates to support Android output mechanisms and to update * places where the APIs have shifted. */ #include <jni.h>#include <string.h>#include <stdio.h>#include <android/log.h>#include <android/bitmap.h> //包含ffmpeg庫標頭檔,這些檔案都直接方案jni目錄下#include <libavcodec/avcodec.h>#include <libavformat/avformat.h>#include <libswscale/swscale.h> #define LOG_TAG "FFMPEGSample"#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) /* Cheat to keep things simple and just use some globals. *///全域對象AVFormatContext *pFormatCtx;AVCodecContext *pCodecCtx;AVFrame *pFrame;AVFrame *pFrameRGB;int videoStream; /* * Write a frame worth of video (in pFrame) into the Android bitmap * described by info using the raw pixel buffer. It's a very inefficient * draw routine, but it's easy to read. Relies on the format of the * bitmap being 8bits per color component plus an 8bit alpha channel. */ //定義的靜態方法,將某幀AVFrame在Android的Bitmap中繪製static void fill_bitmap(AndroidBitmapInfo* info, void *pixels, AVFrame *pFrame){ uint8_t *frameLine; int yy; for (yy = 0; yy < info->height; yy++) { uint8_t* line = (uint8_t*)pixels; frameLine = (uint8_t *)pFrame->data[0] + (yy * pFrame->linesize[0]); int xx; for (xx = 0; xx < info->width; xx++) { int out_offset = xx * 4; int in_offset = xx * 3; line[out_offset] = frameLine[in_offset]; line[out_offset+1] = frameLine[in_offset+1]; line[out_offset+2] = frameLine[in_offset+2]; line[out_offset+3] = 0; } pixels = (char*)pixels + info->stride; }} //定義java回呼函數,相當與 com.churnlabs中的ffmpegsample中的MainActivity類中的openFile方法。void Java_com_churnlabs_ffmpegsample_MainActivity_openFile(JNIEnv * env, jobject this){ int ret; int err; int i; AVCodec *pCodec; uint8_t *buffer; int numBytes; //註冊所有的函數 av_register_all(); LOGE("Registered formats"); //開啟sdcard中的vid.3gp檔案 err = av_open_input_file(&pFormatCtx, "file:/sdcard/vid.3gp", NULL, 0, NULL); LOGE("Called open file"); if(err!=0) { LOGE("Couldn't open file"); return; } LOGE("Opened file"); if(av_find_stream_info(pFormatCtx)<0) { LOGE("Unable to get stream info"); return; } videoStream = -1; //定義設定videoStream for (i=0; i<pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) { videoStream = i; break; } } if(videoStream==-1) { LOGE("Unable to find video stream"); return; } LOGI("Video stream is [%d]", videoStream); //定義編碼類別型 pCodecCtx=pFormatCtx->streams[videoStream]->codec; //擷取解碼器 pCodec=avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { LOGE("Unsupported codec"); return; } //使用特定的解碼器開啟 if(avcodec_open(pCodecCtx, pCodec)<0) { LOGE("Unable to open codec"); return; } //分配幀空間 pFrame=avcodec_alloc_frame(); //分配RGB幀空間 pFrameRGB=avcodec_alloc_frame(); LOGI("Video size is [%d x %d]", pCodecCtx->width, pCodecCtx->height); //擷取大小 numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); 分配空間 buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);}//定義java回呼函數,相當與 com.churnlabs中的ffmpegsample中的MainActivity類中的drawFrame方法。void Java_com_churnlabs_ffmpegsample_MainActivity_drawFrame(JNIEnv * env, jobject this, jstring bitmap){ AndroidBitmapInfo info; void* pixels; int ret; int err; int i; int frameFinished = 0; AVPacket packet; static struct SwsContext *img_convert_ctx; int64_t seek_target; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret); return; } LOGE("Checked on the bitmap"); if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); } LOGE("Grabbed the pixels"); i = 0; while((i==0) && (av_read_frame(pFormatCtx, &packet)>=0)) { if(packet.stream_index==videoStream) { avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); if(frameFinished) { LOGE("packet pts %llu", packet.pts); // This is much different than the tutorial, sws_scale // replaces img_convert, but it's not a complete drop in. // This version keeps the image the same size but swaps to // RGB24 format, which works perfect for PPM output. int target_width = 320; int target_height = 240; img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, target_width, target_height, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL); if(img_convert_ctx == NULL) { LOGE("could not initialize conversion context\n"); return; } sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); // save_frame(pFrameRGB, target_width, target_height, i); fill_bitmap(&info, pixels, pFrameRGB); i = 1; } } av_free_packet(&packet); } AndroidBitmap_unlockPixels(env, bitmap);} //內部調用函數,不對外,用來尋找幀int seek_frame(int tsms){ int64_t frame; frame = av_rescale(tsms,pFormatCtx->streams[videoStream]->time_base.den,pFormatCtx->streams[videoStream]->time_base.num); frame/=1000; if(avformat_seek_file(pFormatCtx,videoStream,0,frame,frame,AVSEEK_FLAG_FRAME)<0) { return 0; } avcodec_flush_buffers(pCodecCtx); return 1;}//定義java回呼函數,相當與 com.churnlabs中的ffmpegsample中的MainActivity類中的drawFrameAt方法。void Java_com_churnlabs_ffmpegsample_MainActivity_drawFrameAt(JNIEnv * env, jobject this, jstring bitmap, jint secs){ AndroidBitmapInfo info; void* pixels; int ret; int err; int i; int frameFinished = 0; AVPacket packet; static struct SwsContext *img_convert_ctx; int64_t seek_target; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret); return; } LOGE("Checked on the bitmap"); if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); } LOGE("Grabbed the pixels"); seek_frame(secs * 1000); i = 0; while ((i== 0) && (av_read_frame(pFormatCtx, &packet)>=0)) { if(packet.stream_index==videoStream) { avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); if(frameFinished) { // This is much different than the tutorial, sws_scale // replaces img_convert, but it's not a complete drop in. // This version keeps the image the same size but swaps to // RGB24 format, which works perfect for PPM output. int target_width = 320; int target_height = 240; img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, target_width, target_height, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL); if(img_convert_ctx == NULL) { LOGE("could not initialize conversion context\n"); return; } sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); // save_frame(pFrameRGB, target_width, target_height, i); fill_bitmap(&info, pixels, pFrameRGB); i = 1; } } av_free_packet(&packet); } AndroidBitmap_unlockPixels(env, bitmap);}
3 撰寫相應的Android.mk檔案
LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) LOCAL_MODULE := ffmpegutilsLOCAL_SRC_FILES := native.c LOCAL_C_INCLUDES := $(LOCAL_PATH)/includeLOCAL_LDLIBS := -L$(NDK_PLATFORMS_ROOT)/$(TARGET_PLATFORM)/arch-arm/usr/lib -L$(LOCAL_PATH) -lavformat -lavcodec -lavdevice -lavfilter -lavcore -lavutil -lswscale -llog -ljnigraphics -lz -ldl -lgcc include $(BUILD_SHARED_LIBRARY)
這裡需要注意一下檔案的目錄情況,我說明一下。
在Android.mk中有意個LOCAL_C_INCLUDES :=$(LOCAL_PATH)/include指明了相應的FFmpeg的標頭檔路徑。故在代碼中包含
#include <libavcodec/avcodec.h>#include <libavformat/avformat.h>#include <libswscale/swscale.h>
就可以。
4 調用ndk-build,產生libffmpegutils.so檔案,將這個檔案拷貝到/root/develop/android-ndk-r6/platforms/android-8/arch-arm/usr/lib目錄,使得我們在下面使用Android AVD2.2的時候,可以載入到這個so檔案。
5 撰寫相應的Eclipse項目代碼,由於在native.c檔案中指明了項目的工程名詞以及類名詞還有函數名詞,故我們的項目為com.churnlabs.ffmpegsample下面的MainActivity.java檔案
package com.churnlabs.ffmpegsample; import android.app.Activity;import android.graphics.Bitmap;import android.os.Bundle;import android.view.View;import android.view.View.OnClickListener;import android.widget.Button;import android.widget.ImageView; public class MainActivity extends Activity { private static native void openFile(); private static native void drawFrame(Bitmap bitmap); private static native void drawFrameAt(Bitmap bitmap, int secs); private Bitmap mBitmap; private int mSecs = 0; static { System.loadLibrary("ffmpegutils"); } /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); //setContentView(new VideoView(this)); setContentView(R.layout.main); mBitmap = Bitmap.createBitmap(320, 240, Bitmap.Config.ARGB_8888); openFile(); Button btn = (Button)findViewById(R.id.frame_adv); btn.setOnClickListener(new OnClickListener() { public void onClick(View v) { drawFrame(mBitmap); ImageView i = (ImageView)findViewById(R.id.frame); i.setImageBitmap(mBitmap); } }); Button btn_fwd = (Button)findViewById(R.id.frame_fwd); btn_fwd.setOnClickListener(new OnClickListener() { public void onClick(View v) { mSecs += 5; drawFrameAt(mBitmap, mSecs); ImageView i = (ImageView)findViewById(R.id.frame); i.setImageBitmap(mBitmap); } }); Button btn_back = (Button)findViewById(R.id.frame_back); btn_back.setOnClickListener(new OnClickListener() { public void onClick(View v) { mSecs -= 5; drawFrameAt(mBitmap, mSecs); ImageView i = (ImageView)findViewById(R.id.frame); i.setImageBitmap(mBitmap); } }); }}
6 編譯運行即可,最終
7 項目代碼下載:
https://github.com/churnlabs/android-ffmpeg-sample/zipball/master
參考:
1 https://github.com/churnlabs/android-ffmpeg-sample
2 http://www.360doc.com/content/10/1216/17/474846_78726683.shtml
3 https://github.com/prajnashi
本文同發布地址:
http://doandroid.info/?p=497
感謝原作者分享,轉載:http://www.cnblogs.com/doandroid/archive/2011/11/09/2242558.html