/** Hsf-devicesource. HH ** created on: July 19, 2014 * Author: zjzhang */# ifndef hsf-devicesource_hh _ # define hsf-devicesource_hh _ # include <devicesource. hh> class hsf-devicesource: Public devicesource {public: static devicesource * createnew (usageenvironment & ENV, u_int8_t Index = 1, u_int width = 352, u_int Height = 288, u_int FPS = 15, u_int kbps = 100); protected: hsf-devicesource (usageenvironment & ENV, u_int8_t index, U_int width, u_int height, u_int FPS, u_int Kbps); Virtual ~ Hsf-devicesource (); Private: Virtual void dogetnextframe (); Virtual unsigned maxframesize () const; int fheight; int fwidth; void * fh1_encoder; u_int8_t * fbuffer; u_int fbuffersize ;}; # endif/* hsf-devicesource_hh _*/
/** Hsf-devicesource. CPP ** created on: July 19, 2014 * Author: zjzhang */# include "hsf-devicesource. HH "# ifdef _ cplusplusextern" C "{# endif # include" hsf-stream. H "# ifdef _ cplusplus} # endifdevicesource * hsf-devicesource: createnew (usageenvironment & ENV, partition index, u_int width, u_height, u_int FPS, u_int Kbps) {return New hsf-devicesource (ENV, index, width, height, FPS, Kbps);} hsf-devicesource:: Hsf-devicesource (usageenvironment & ENV, u_int8_t index, u_int width, u_int height, u_int FPS, u_int Kbps): devicesource (ENV, deviceparameters () {opencamera (1 ); getframe (1); fheight = getheight (1); fwidth = getwidth (1); openhsf-encoder (fwidth, fheight, FPS, kbps, & fhw.encoder ); fbuffersize = fheight * fwidth * 3/2; fbuffer = new uint8_t [fbuffersize];} hsf-devicesource ::~ Hsf-devicesource () {// todo auto-generated destructor stubdelete [] fbuffer; closeh?encoder (fhw.encoder); closecamera (1);} unsigned hsf-devicesource: maxframesize () const {// by default, this source has no maximum frame size. return 4096;} void hsf-devicesource: dogetnextframe () {If (! Iscurrentlyawaitingdata () return; // we're not ready for the data yetunsigned char * rgbbuffer = getframe (1); convertrgb2yuv (fwidth, fheight, rgbbuffer, fbuffer ); int newframesize = encodeframe (fh264encoder, fbuffer, fbuffersize); // deliver the data here: if (newframesize <0) {handleclosure (); return;} If (newframesize> fmaxsize) {fframesize = fmaxsize; fnumtruncatedbytes = newframesize-fmaxsize;} else {fframesize = newframesize;} If (fframesize> 0) {int result = 0; int p = 0; do {unsigned long Len = 0; Result = getnextpacket (fh264encoder, fbuffer + P, & Len); P + = Len ;}while (result> 0 );} gettimeofday (& fpresentationtime, null); // if you have a more accurate time-e.g ., from an encoder-then use that instead. // if the device is * not * A 'Live source' (e.g ., it comes instead from a file or buffer), then set "fdurationinmicroseconds" here. memmove (FTO, fbuffer, fframesize); framedsource: aftergetting (this );}
#ifndef _DEVIC_SERVER_MEDIA_SUBSESSION_HH#define _DEVICE_SERVER_MEDIA_SUBSESSION_HH#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH#include "OnDemandServerMediaSubsession.hh"#endifclass DeviceSource;class DeviceServerMediaSubsession: public OnDemandServerMediaSubsession {public: static DeviceServerMediaSubsession* createNew(UsageEnvironment& env, Boolean reuseFirstSource); // Used to implement "getAuxSDPLine()": void checkForAuxSDPLine1(); void afterPlayingDummy1();protected: // we're a virtual base class DeviceServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource); virtual ~DeviceServerMediaSubsession(); void setDoneFlag() { fDoneFlag = ~0; }protected: // redefined virtual functions virtual char const* getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource); virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate); virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource);private: char* fAuxSDPLine; char fDoneFlag; // used when setting up "fAuxSDPLine" RTPSink* fDummyRTPSink; // ditto};#endif
#include "DeviceServerMediaSubsession.hh"#include "H264VideoRTPSink.hh"#include "DeviceSource.hh"#include "H264VideoStreamFramer.hh"#include "H264DeviceSource.hh"DeviceServerMediaSubsession*DeviceServerMediaSubsession::createNew(UsageEnvironment& env,Boolean reuseFirstSource) {return new DeviceServerMediaSubsession(env, reuseFirstSource);}DeviceServerMediaSubsession::DeviceServerMediaSubsession(UsageEnvironment& env,Boolean reuseFirstSource) :OnDemandServerMediaSubsession(env, reuseFirstSource) {}DeviceServerMediaSubsession::~DeviceServerMediaSubsession() {}FramedSource* DeviceServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {DeviceSource* source = H264DeviceSource::createNew(envir());return H264VideoStreamFramer::createNew(envir(), source);}RTPSink* DeviceServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock,unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/) {return H264VideoRTPSink::createNew(envir(), rtpGroupsock,rtpPayloadTypeIfDynamic);}static void afterPlayingDummy(void* clientData) {DeviceServerMediaSubsession* subsess =(DeviceServerMediaSubsession*) clientData;subsess->afterPlayingDummy1();}void DeviceServerMediaSubsession::afterPlayingDummy1() {// Unschedule any pending 'checking' task:envir().taskScheduler().unscheduleDelayedTask(nextTask());// Signal the event loop that we're done:setDoneFlag();}static void checkForAuxSDPLine(void* clientData) {DeviceServerMediaSubsession* subsess =(DeviceServerMediaSubsession*) clientData;subsess->checkForAuxSDPLine1();}void DeviceServerMediaSubsession::checkForAuxSDPLine1() {char const* dasl;if (fAuxSDPLine != NULL) {// Signal the event loop that we're done:setDoneFlag();} else if (fDummyRTPSink != NULL&& (dasl = fDummyRTPSink->auxSDPLine()) != NULL) {fAuxSDPLine = strDup(dasl);fDummyRTPSink = NULL;// Signal the event loop that we're done:setDoneFlag();} else if (!fDoneFlag) {// try again after a brief delay:int uSecsToDelay = 100000; // 100 msnextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,(TaskFunc*) checkForAuxSDPLine, this);}}char const* DeviceServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource) {if (fAuxSDPLine != NULL)return fAuxSDPLine; // it's already been set up (for a previous client)if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream// Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known// until we start reading the file. This means that "rtpSink"s "auxSDPLine()" will be NULL initially,// and we need to start reading data from our file until this changes.fDummyRTPSink = rtpSink;// Start reading the file:fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);// Check whether the sink's 'auxSDPLine()' is ready:checkForAuxSDPLine(this);}envir().taskScheduler().doEventLoop(&fDoneFlag);return fAuxSDPLine;}