Android使用FFMpeg實現推送ApsaraVideo for Live流到伺服器

來源:互聯網
上載者:User

標籤:

背景

在過去的2015年中,ApsaraVideo for Live頁的新寵無疑是戶外直播。隨著4G網路的普及和覆蓋率的提升,主播可以在戶外通過手機進行直播。而觀眾也願意為這種可以足不出戶而觀天下事的服務買單。基於這樣的背景,本文主要實現在Android裝置上採集視頻並推流到伺服器。

概覽

如所示,在安卓上採集並推流主要應用到兩個類。首先是安卓Api內建的Camera,實現從網路攝影機採集映像。然後是Javacv 中的FFMpegFrameRecorder類實現對Camera採集到的幀編碼並推流。

關鍵步驟與代碼

下面結合上面的流程圖給出視頻採集的關鍵步驟。 首先是Camera類的初始化。

// 初始化Camera裝置cameraDevice = Camera.open();     Log.i(LOG_TAG, "cameara open");     cameraView = new CameraView(this, cameraDevice);

上面的CameraView類是我們實現的負責預覽視頻採集和將採集到的幀寫入FFMpegFrameRecorder的類。具體代碼如下:

class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {    private SurfaceHolder mHolder;    private Camera mCamera;    public CameraView(Context context, Camera camera) {        super(context);        Log.w("camera", "camera view");        mCamera = camera;        mHolder = getHolder();        //設定SurfaceView 的SurfaceHolder的回呼函數        mHolder.addCallback(CameraView.this);        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);        //設定Camera預覽的回呼函數        mCamera.setPreviewCallback(CameraView.this);    }    @Override    public void surfaceCreated(SurfaceHolder holder) {        try {            stopPreview();            mCamera.setPreviewDisplay(holder);        } catch (IOException exception) {            mCamera.release();            mCamera = null;        }    }    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {        stopPreview();        Camera.Parameters camParams = mCamera.getParameters();        List<Camera.Size> sizes = camParams.getSupportedPreviewSizes();        // Sort the list in ascending order        Collections.sort(sizes, new Comparator<Camera.Size>() {            public int compare(final Camera.Size a, final Camera.Size b) {                return a.width * a.height - b.width * b.height;            }        });        // Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot        // reach the initial settings of imageWidth/imageHeight.        for (int i = 0; i < sizes.size(); i++) {            if ((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) {                imageWidth = sizes.get(i).width;                imageHeight = sizes.get(i).height;                Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight);                break;            }        }        camParams.setPreviewSize(imageWidth, imageHeight);        Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);        camParams.setPreviewFrameRate(frameRate);        Log.v(LOG_TAG, "Preview Framerate: " + camParams.getPreviewFrameRate());        mCamera.setParameters(camParams);        // Set the holder (which might have changed) again        try {            mCamera.setPreviewDisplay(holder);            mCamera.setPreviewCallback(CameraView.this);            startPreview();        } catch (Exception e) {            Log.e(LOG_TAG, "Could not set preview display in surfaceChanged");        }    }    @Override    public void surfaceDestroyed(SurfaceHolder holder) {        try {            mHolder.addCallback(null);            mCamera.setPreviewCallback(null);        } catch (RuntimeException e) {            // The camera has probably just been released, ignore.        }    }    public void startPreview() {        if (!isPreviewOn && mCamera != null) {            isPreviewOn = true;            mCamera.startPreview();        }    }    public void stopPreview() {        if (isPreviewOn && mCamera != null) {            isPreviewOn = false;            mCamera.stopPreview();        }    }    @Override    public void onPreviewFrame(byte[] data, Camera camera) {        if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {            startTime = System.currentTimeMillis();            return;        }        //如果是錄播,則把該幀先存在記憶體中        if (RECORD_LENGTH > 0) {            int i = imagesIndex++ % images.length;            yuvImage = images[i];            timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);        }        if (yuvImage != null && recording) {            ((ByteBuffer) yuvImage.image[0].position(0)).put(data);//如果是直播則直接寫入到FFmpegFrameRecorder中            if (RECORD_LENGTH <= 0) try {                Log.v(LOG_TAG, "Writing Frame");                long t = 1000 * (System.currentTimeMillis() - startTime);                if (t > recorder.getTimestamp()) {                    recorder.setTimestamp(t);                }                recorder.record(yuvImage);            } catch (FFmpegFrameRecorder.Exception e) {                Log.v(LOG_TAG, e.getMessage());                e.printStackTrace();            }        }    }}

初始化FFmpegFrameRecorder類

    recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);    //設定視頻編碼  28 指代h.264    recorder.setVideoCodec(28);    recorder.setFormat("flv");    //設定採樣頻率    recorder.setSampleRate(sampleAudioRateInHz);    // 設定幀率,即每秒的映像數    recorder.setFrameRate(frameRate);    //音頻採集線程audioRecordRunnable = new AudioRecordRunnable();    audioThread = new Thread(audioRecordRunnable);    runAudioThread = true;

其中的AudioRecordRunnable是我們自己實現的音頻採集線程,代碼如下

 class AudioRecordRunnable implements Runnable {    @Override    public void run() {        android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);        // Audio        int bufferSize;        ShortBuffer audioData;        int bufferReadResult;        bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,                AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);        audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,                AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);//如果是錄播,則需要錄播長度的緩衝        if (RECORD_LENGTH > 0) {            samplesIndex = 0;            samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];            for (int i = 0; i < samples.length; i++) {                samples[i] = ShortBuffer.allocate(bufferSize);            }        } else {        //直播只需要相當於一幀的音訊資料緩衝            audioData = ShortBuffer.allocate(bufferSize);        }        Log.d(LOG_TAG, "audioRecord.startRecording()");        audioRecord.startRecording();        /* ffmpeg_audio encoding loop */        while (runAudioThread) {            if (RECORD_LENGTH > 0) {                audioData = samples[samplesIndex++ % samples.length];                audioData.position(0).limit(0);            }            //Log.v(LOG_TAG,"recording? " + recording);            bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());            audioData.limit(bufferReadResult);            if (bufferReadResult > 0) {                Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult);                // If "recording" isn‘t true when start this thread, it never get‘s set according to this if statement...!!!                // Why?  Good question...                if (recording) {               //如果是直播,則直接調用recordSamples 將音頻寫入Recorder                    if (RECORD_LENGTH <= 0) try {                        recorder.recordSamples(audioData);                        //Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);                    } catch (FFmpegFrameRecorder.Exception e) {                        Log.v(LOG_TAG, e.getMessage());                        e.printStackTrace();                    }                }            }        }        Log.v(LOG_TAG, "AudioThread Finished, release audioRecord");        /* encoding finish, release recorder */        if (audioRecord != null) {            audioRecord.stop();            audioRecord.release();            audioRecord = null;            Log.v(LOG_TAG, "audioRecord released");        }    }}

接下來是開始直播和停止直播的方法

//開始直播public void startRecording() {    initRecorder();    try {        recorder.start();        startTime = System.currentTimeMillis();        recording = true;        audioThread.start();    } catch (FFmpegFrameRecorder.Exception e) {        e.printStackTrace();    }}public void stopRecording() {//停止音頻線程    runAudioThread = false;    try {        audioThread.join();    } catch (InterruptedException e) {        e.printStackTrace();    }    audioRecordRunnable = null;    audioThread = null;    if (recorder != null && recording) {    //如果是錄播,則將緩衝中的幀加上時間戳記後寫入        if (RECORD_LENGTH > 0) {            Log.v(LOG_TAG, "Writing frames");            try {                int firstIndex = imagesIndex % samples.length;                int lastIndex = (imagesIndex - 1) % images.length;                if (imagesIndex <= images.length) {                    firstIndex = 0;                    lastIndex = imagesIndex - 1;                }                if ((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {                    startTime = 0;                }                if (lastIndex < firstIndex) {                    lastIndex += images.length;                }                for (int i = firstIndex; i <= lastIndex; i++) {                    long t = timestamps[i % timestamps.length] - startTime;                    if (t >= 0) {                        if (t > recorder.getTimestamp()) {                            recorder.setTimestamp(t);                        }                        recorder.record(images[i % images.length]);                    }                }                firstIndex = samplesIndex % samples.length;                lastIndex = (samplesIndex - 1) % samples.length;                if (samplesIndex <= samples.length) {                    firstIndex = 0;                    lastIndex = samplesIndex - 1;                }                if (lastIndex < firstIndex) {                    lastIndex += samples.length;                }                for (int i = firstIndex; i <= lastIndex; i++) {                    recorder.recordSamples(samples[i % samples.length]);                }            } catch (FFmpegFrameRecorder.Exception e) {                Log.v(LOG_TAG, e.getMessage());                e.printStackTrace();            }        }        recording = false;        Log.v(LOG_TAG, "Finishing recording, calling stop and release on recorder");        try {            recorder.stop();            recorder.release();        } catch (FFmpegFrameRecorder.Exception e) {            e.printStackTrace();        }        recorder = null;    }}

以上即為關鍵的步驟和代碼,下面給出完整項目地址 RtmpRecorder

推薦:

 Android開發中多進程共用資料

Android使用FFMpeg實現推送ApsaraVideo for Live流到伺服器

聯繫我們

該頁面正文內容均來源於網絡整理,並不代表阿里雲官方的觀點,該頁面所提到的產品和服務也與阿里云無關,如果該頁面內容對您造成了困擾,歡迎寫郵件給我們,收到郵件我們將在5個工作日內處理。

如果您發現本社區中有涉嫌抄襲的內容,歡迎發送郵件至: info-contact@alibabacloud.com 進行舉報並提供相關證據,工作人員會在 5 個工作天內聯絡您,一經查實,本站將立刻刪除涉嫌侵權內容。

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.