標籤:
前面已經介紹了,通過live555來實現媒體檔案的播放。這篇主要和大家說一下即時資料流的通過live555的播放。
相對之前的檔案流,這裡即時資料流只需要多實現一個子類:通過繼承RTSPServer類來實現一些自己的相關操作。
如:有用戶端請求過來的時候,需要先通過lookupServerMediaSession找到對應的session,這裡可以定義自己的streamName,也就是url後面按個串,。如果沒有找到,則建立產生自己需要的不同的session,還有填充自己的SDP資訊等等操作。
繼承RTSPServer的子類實現如下:具體的一些實現可以參考RTSPServer的實現,只需要修改填充自己的session的即可。
#include "DemoH264RTSPServer.h"#include "DemoH264Interface.h"#include "DemoH264MediaSubsession.h"DemoH264RTSPServer* DemoH264RTSPServer::createNew(UsageEnvironment& env, Port rtspPort, UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds){int rtspSock = -1;rtspSock = setUpOurSocket(env, rtspPort);if(rtspSock == -1 ){DBG_LIVE555_PRINT("setUpOurSocket failed\n");return NULL;}return new DemoH264RTSPServer(env, rtspSock, rtspPort, authDatabase, reclamationTestSeconds);}DemoH264RTSPServer::DemoH264RTSPServer(UsageEnvironment& env, int ourSock, Port rtspPort, UserAuthenticationDatabase* authDatabase,unsigned reclamationTestSeconds):RTSPServer(env, ourSock, rtspPort, authDatabase, reclamationTestSeconds), fRTSPServerState(true){ DBG_LIVE555_PRINT("create DemoH264RTSPServer \n");}DemoH264RTSPServer::~DemoH264RTSPServer(){}ServerMediaSession* DemoH264RTSPServer::lookupServerMediaSession(const char* streamName){// streamName, 為URL地址後面的字串 如 // rtsp://10.0.2.15/streamNameCH00StreamType00, 則streamName = "streamNameCH00StreamType00";// 當用戶端發來url請求時,可以解析streamName來判斷請求那個通道的哪種碼流// 1 解析url 我這裡不處理,可以自己回調介面進來處理int channelNO = 0; // 通道號int streamType = 0; // 碼流類型int videoType = 1; // 視頻 or 音頻int requestType = 0; // 請求類型 即時預覽 or 回放ServerMediaSession* sms = NULL;switch(requestType){case 0: // realtime sms = RTSPServer::lookupServerMediaSession(streamName);if ( NULL == sms ){sms = ServerMediaSession::createNew(envir(), streamName, NULL, NULL);DemoH264MediaSubsession *session = DemoH264MediaSubsession::createNew(envir(), streamType, videoType, channelNO, false);sms->addSubsession(session);}break;case 1:// play backDBG_LIVE555_PRINT("play back request !\n");break;default:DBG_LIVE555_PRINT("unknown request type!\n");break;}this->addServerMediaSession(sms);return sms;}DemoH264RTSPServer::DemoH264RTSPClientSession* DemoH264RTSPServer::createNewClientSession(unsigned clientSessionID, int clientSocket, struct sockaddr_in clientAddr){DemoH264RTSPServer::DemoH264RTSPClientSession* client = new DemoH264RTSPClientSession(*this, clientSessionID, clientSocket, clientAddr);fClientSessionList.push_back(client);DBG_LIVE555_PRINT("add client session success!\n");return client;}int DemoH264RTSPServer::stopDemoH264RTSPServer(){// 刪除所有的用戶端的sessionstd::list<DemoH264RTSPServer::DemoH264RTSPClientSession*> ::iterator pos =this->fClientSessionList.begin();for(pos; pos != this->fClientSessionList.end(); pos ++ ){DemoH264RTSPServer::DemoH264RTSPClientSession* tmp = *pos;delete tmp;}delete this; //return 0;}DemoH264RTSPServer::DemoH264RTSPClientSession::DemoH264RTSPClientSession(DemoH264RTSPServer& rtspServer,unsigned clietnSessionID, int clientSocket, struct sockaddr_in clientAddr):RTSPServer::RTSPClientSession(rtspServer, clietnSessionID, clientSocket, clientAddr){}DemoH264RTSPServer::DemoH264RTSPClientSession::~DemoH264RTSPClientSession(){/*std::list<DemoH264RTSPServer::DemoH264RTSPClientSession*> ::iterator pos =((DemoH264RTSPServer&)fOurServer).fClientSessionList.begin();for(pos; pos != ((DemoH264RTSPServer&)fOurServer).fClientSessionList.end(); pos ++ ){if ((*pos)->fOurSessionId == this->fOurSessionId) {((DemoH264RTSPServer&)fOurServer).fClientSessionList.erase(pos);DBG_LIVE555_PRINT("client session has been delete !\n");break ;}}*/}
因為這些實現,就要請求不同的碼流類型,主次碼流,或者音頻視頻這些,所以Source和Session的子類實現也得作相應的修改:
#include "DemoH264FrameSource.h"#include "DemoH264Interface.h"DemoH264FrameSource::DemoH264FrameSource(UsageEnvironment& env, long sourceHandle, int sourceType):FramedSource(env), fSourceHandle(sourceHandle), fLastBufSize(0), fLeftDataSize(0), fSourceType(sourceType), fFirstFrame(1){// 開啟流媒體檔案,在即時資料流時,這裡就是開始傳送流之前的一些準備工作fDataBuf = (char*)malloc(2*1024*1024);if(fDataBuf == NULL ){DBG_LIVE555_PRINT(" create source data buf failed!\n");}}DemoH264FrameSource::~DemoH264FrameSource(){if(fDataBuf){free(fDataBuf);fDataBuf = NULL;}}DemoH264FrameSource* DemoH264FrameSource::createNew(UsageEnvironment& env, int streamType, int channelNO, int sourceType){//通過streamType和channelNO來建立source,向前端請求對應的碼流// long sourceHandle = openStreamHandle(channelNO, streamType);if(sourceHandle == 0){DBG_LIVE555_PRINT("open the source stream failed!\n");return NULL;}DBG_LIVE555_PRINT("create H264FrameSource !\n");return new DemoH264FrameSource(env, sourceHandle, sourceType);}/* 擷取需要讀取檔案的總長度,live555對每次資料的發送有長度限制 */long filesize(FILE *stream){long curpos, length;curpos = ftell(stream);fseek(stream, 0L, SEEK_END);length = ftell(stream);fseek(stream, curpos, SEEK_SET);return length;}void DemoH264FrameSource::doGetNextFrame(){int ret = 0;//調用裝置介面擷取一幀資料if (fLeftDataSize == 0){ret = getStreamData(fSourceHandle, fDataBuf,&fLastBufSize, &fLeftDataSize,fSourceType);if (ret <= 0){DBG_LIVE555_PRINT("getStreamData failed!\n");return;}}int fNewFrameSize = fLeftDataSize;if(fNewFrameSize > fMaxSize){ // the fMaxSize data fFrameSize = fMaxSize;fNumTruncatedBytes = fNewFrameSize - fMaxSize;fLeftDataSize = fNewFrameSize - fMaxSize;// 注意memmove函數的用法,允許記憶體空間疊加的memmove(fTo, fDataBuf, fFrameSize); memmove(fDataBuf, fDataBuf+fMaxSize, fLeftDataSize);} else { //all the data fFrameSize = fNewFrameSize;fLeftDataSize = 0; memmove(fTo, fDataBuf, fFrameSize);}gettimeofday(&fPresentationTime, NULL);if (fFirstFrame){fDurationInMicroseconds = 40000;nextTask() = envir().taskScheduler().scheduleDelayedTask(100000, (TaskFunc*)FramedSource::afterGetting, this);fFirstFrame = 0;}else{FramedSource::afterGetting(this);}}void DemoH264FrameSource::doStopGetFrame(){closeStreamHandle(fSourceHandle);}
session的子類實現
#include "DemoH264MediaSubsession.h"#include "DemoH264FrameSource.h"#include "DemoH264Interface.h"#include "H264VideoStreamFramer.hh"#include "H264VideoRTPSink.hh"DemoH264MediaSubsession::DemoH264MediaSubsession(UsageEnvironment& env, int streamType, int videoType, int channelNO, bool reuseFirstSource, portNumBits initalNumPort):OnDemandServerMediaSubsession(env, reuseFirstSource), fStreamType(streamType), fVideoType(videoType), fChannelNO(channelNO){}DemoH264MediaSubsession::~DemoH264MediaSubsession(){}DemoH264MediaSubsession* DemoH264MediaSubsession::createNew(UsageEnvironment& env, int streamType, int videoType, int channelNO, bool reuseFirstSource, portNumBits initalNumPort){DemoH264MediaSubsession* sms = new DemoH264MediaSubsession(env, streamType, videoType, channelNO, reuseFirstSource, initalNumPort);return sms;}FramedSource* DemoH264MediaSubsession::createNewStreamSource(unsigned clientsessionId, unsigned& estBitrate){DBG_LIVE555_PRINT("create new stream source !\n");//這雷根據實際請求的類型建立不同的source對象if(fVideoType == 0x01){ // H264 video estBitrate = 2000; // kbps DemoH264FrameSource * source = DemoH264FrameSource::createNew(envir(), fStreamType, fChannelNO, 0);if ( source == NULL ){DBG_LIVE555_PRINT("create source failed videoType:%d!\n", fVideoType );return NULL;}return H264VideoStreamFramer::createNew(envir(), source);}else if ( fVideoType == 0x2) {// Mpeg-4 video }else if( fVideoType == 0x04){ // G711 audio estBitrate = 128; // kbps DemoH264FrameSource * source = DemoH264FrameSource::createNew(envir(), fStreamType, fChannelNO, 1);if ( source == NULL ){DBG_LIVE555_PRINT("create source failed videoType:%d!\n", fVideoType );return NULL;}return source;}else { // unknow type}return NULL;}RTPSink* DemoH264MediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource){// 這裡可以根據類型的不同建立不同sink // 根據實際開發需要,繼承不同的子類DBG_LIVE555_PRINT("createNewRTPnk videoType:%d!\n", fVideoType );if( fVideoType == 0x01){return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);}else if( fVideoType == 0x02){ // Mpeg-4}else if(fVideoType == 0x04){// G711 audio}else { // unknow type ;return NULL;}}/* 根據開發實際情況填寫SDP資訊 *//*char const* DemoH264MediaSubsession::sdpLines(){// create sdp info return fSDPLines;}*/
最後一個Interface的類,主要是封裝一些live555的處理操作以及即時擷取碼流資料的介面(自己可以根據實際情況寫成回調或者其他,看怎麼實現方便),方便其他地方的調用。
#include "DemoH264Interface.h"#include "DemoH264RTSPServer.h"/*開啟即時碼流控制代碼*/long openStreamHandle(int channelNO, int streamType){//開始即時資料流的一些準備工作:擷取此類型即時碼流的控制代碼,方便後面直接get碼流// 我這裡測試,所以還是用自己定義的檔案碼流來讀,不過每次都是讀一幀資料// 檔案流格式為 FrameHeader_S + H264 + FrameHeader_S + H264 ...FILE* fp = fopen("stream264file.h264", "rb+");if (NULL == fp ){DBG_LIVE555_PRINT("open streamhandle failed!\n");return -1;}return (long)fp;}/*即時擷取一幀資料*/int getStreamData(long lHandle, char* buf, unsigned* bufsize, unsigned* leftbufsize, int sourcetype){if(lHandle <= 0){DBG_LIVE555_PRINT(" lHandle error !\n");return -1;}FrameHead_S stFrameHead;memset(&stFrameHead, 0, sizeof(FrameHead_S));FILE* fp = (FILE*)lHandle;int readlen = 0;// 1、先讀取一幀資料的頭資訊readlen = fread(&stFrameHead, 1, sizeof(FrameHead_S), fp);if( readlen != sizeof(FrameHead_S)){DBG_LIVE555_PRINT(" read Frame Header Failed !\n");return -1;}//2、擷取一幀H264即時資料if(stFrameHead.FrameLen > 2*1024*1024) // 在source中databuf指分配了2M{DBG_LIVE555_PRINT("data is too long:framlen=%d\n", stFrameHead.FrameLen);//重新分配記憶體處理return 0;}readlen = fread(buf, 1, stFrameHead.FrameLen, fp);if(readlen != stFrameHead.FrameLen){DBG_LIVE555_PRINT("read Frame rawdata Failed!\n");return -1;}return stFrameHead.FrameLen;}/*關閉碼流控制代碼*/void closeStreamHandle(long lHandle){//一些關閉碼流的清理工作fclose((FILE*)lHandle);}DemoH264Interface* DemoH264Interface::m_Instance = NULL;DemoH264Interface* DemoH264Interface::createNew(){if(NULL == m_Instance){m_Instance = new DemoH264Interface();}return m_Instance;}DemoH264Interface::DemoH264Interface(){m_liveServerFlag = false;}DemoH264Interface::~DemoH264Interface(){}void DemoH264Interface::InitLive555(void *param){//初始化DBG_LIVE555_PRINT(" ~~~~Init live555 stream\n");// Begin by setting up the live555 usage environment m_scheduler = BasicTaskScheduler::createNew();m_env = BasicUsageEnvironment::createNew(*m_scheduler);#if ACCESS_CONTROL // 認證m_authDB = new UserAuthenticationDatabase;m_authDB->addUserRecord("username", "password");#endif m_rtspServer = NULL;m_rtspServerPortNum = 554; // 可以修改m_liveServerFlag = true;}int DemoH264Interface::startLive555(){if( !m_liveServerFlag){DBG_LIVE555_PRINT("Not Init the live server !\n");return -1; }DBG_LIVE555_PRINT(" ~~~~Start live555 stream\n");// 建立RTSP服務m_rtspServer = DemoH264RTSPServer::createNew(*m_env, m_rtspServerPortNum, m_authDB);if( m_rtspServer == NULL){// *m_env << " create RTSPServer Failed:" << m_env->getResultMsg() << "\n";DBG_LIVE555_PRINT("create RTSPServer Failed:%s\n", m_env->getResultMsg());return -1;}// loop and not come back~m_env->taskScheduler().doEventLoop();return 0;}int DemoH264Interface::stopLive555(){DBG_LIVE555_PRINT(" ~~~~stop live555 stream\n");if(m_liveServerFlag){if(m_rtspServer)m_rtspServer->stopDemoH264RTSPServer();m_liveServerFlag = false;}}
最後相當於一個demo調用程式:
#include <stdio.h>#include "DemoH264Interface.h"int main(int argc, char* argv[]){// Init// 添加一些需要設定的rtsp服務資訊,如使用者名稱,密碼 連接埠等,通過參數傳遞void* param = NULL;DemoH264Interface::createNew()->InitLive555(param);// start if( -1 == DemoH264Interface::createNew()->startLive555()){DBG_LIVE555_PRINT(" start live555 moudle failed!\n");return 0;}//stop DemoH264Interface::createNew()->stopLive555();return 0;}
完成這些操作後,我在Windows下測試是能正常預覽的j結果如下:
但是在linux系統下貌似不行,出了問題,錯誤如下:
未找到stream,排查了幾天也沒查出來原因,在網上看了其他網友的的解惑,貌似說trackId不對,但是我此處ID只有一個,應該不會引起這個問題的,估計病因不在這裡,
因為沒有研究源碼的實現,所以暫時未能定位到問題,希望有遇到的知道緣由的大俠能留言告訴我,我將萬分感謝。
上面代碼的完整路徑可以到這裡下載:live555完整代碼
著作權聲明:本文為博主原創文章,未經博主允許不得轉載。
淺析live555媒體庫之實現即時碼流預覽