Use live555 to live camera images from V4L2

Source: Internet
Author: User

Combined with the previous capture V4l2 video, use live555 to publish live streaming via RTSP. Capture.h, Capture.cpp, vcompress.h, Vcompress.cpp need to refer to the previous pieces of article. This is just a v4l2_x264_service.cpp.

#include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <assert.h> #include < livemedia.hh> #include <BasicUsageEnvironment.hh> #include <GroupsockHelper.hh> #include <sys/ types.h> #include <sys/syscall.h> #include "capture.h" #include "vcompress.h" static usageenvironment *_env = 0

; #define SINK_PORT 3030 #define VIDEO_WIDTH #define VIDEO_HEIGHT #define FRAME_PER_SEC 5.0 pid_t Gettid () {Retu
RN Syscall (Sys_gettid); }//Using webcam + x264 class webcamframesource:public Framedsource {void *mp_capture, *mp_compress;//v4l2 + x264 en
	coder int m_started;

void *mp_token; Public:webcamframesource (usageenvironment &env): Framedsource (env) {fprintf (stderr, "[%d]%s .... calling\n")
		, Gettid (), __func__);
		Mp_capture = Capture_open ("/dev/video0", Video_width, Video_height, pix_fmt_yuv420p);
			if (!mp_capture) {fprintf (stderr, "%s:open/dev/video0 err\n", __func__);
		Exit (-1); } MP_Compress = Vc_open (Video_width, Video_height, frame_per_sec);
			if (!mp_compress) {fprintf (stderr, "%s:open x264 err\n", __func__);
		Exit (-1);
		} m_started = 0;
	Mp_token = 0;
		
		} ~webcamframesource () {fprintf (stderr, "[%d]%s ... calling\n", Gettid (), __func__);
		if (m_started) {envir (). TaskScheduler (). Unscheduledelayedtask (Mp_token);
		} if (mp_compress) vc_close (mp_compress);
	if (mp_capture) capture_close (mp_capture);
		} protected:virtual void Dogetnextframe () {if (m_started) return;

		m_started = 1;
		Based on FPS, calculate the wait time double delay = 1000.0/frame_per_sec;	int to_delay = delay * 1000;
	US mp_token = EnviR (). TaskScheduler (). Scheduledelayedtask (To_delay, GetNextFrame, this); }
        Virtual unsigned maxframesize () const        //This is important, if not set, may cause getnextframe () to appear fmaxsize less than the actual encoded frame, resulting in an incomplete image
        {    return 100*1024;}
private:static void GetNextFrame (void *ptr) {(webcamframesource*) PTR)->getnextframe1 ();
		} void GetNextFrame1 () {//capture:picture pic; if (Capture_get_picture (Mp_capture, &pic) < 0) {fprintf (stderr, "= =%s:capture_get_picture err\n", __func__)
			;
			m_started = 0;
		Return
		}//Compress const void *OUTBUF;
		int Outlen; if (Vc_compress (mp_compress, Pic.data, Pic.stride, &outbuf, &outlen) < 0) {fprintf (stderr, "= =%s:vc_com
			Press err\n ", __func__);
			m_started = 0;
		Return
		} int64_t pts, DTS;
		int key;

		Vc_get_last_frame_info (mp_compress, &key, &pts, &dts);
		Save Outbuf Gettimeofday (&fpresentationtime, 0);
		Fframesize = Outlen;
			if (Fframesize > fmaxsize) {fnumtruncatedbytes = fframesize-fmaxsize;
		Fframesize = fmaxsize;
		} else {fnumtruncatedbytes = 0;

		} memmove (FTo, Outbuf, fframesize);

		Notify Aftergetting (this);
	m_started = 0;

}
}; Class WebcamondeMandmediasubsession:public ondemandservermediasubsession {public:static webcamondemandmediasubsession *createNew (
	Usageenvironment &env, Framedsource *source) {return new webcamondemandmediasubsession (env, source); } protected:webcamondemandmediasubsession (Usageenvironment &env, Framedsource *source): OnDemandServerMediaSubs
		Ession (env, True)//reuse the first source {fprintf (stderr, "[%d]%s .... calling\n", Gettid (), __func__);
		Mp_source = source;
	Mp_sdp_line = 0;
		} ~webcamondemandmediasubsession () {fprintf (stderr, "[%d]%s ... calling\n", Gettid (), __func__);
	if (mp_sdp_line) free (mp_sdp_line); 
		} private:static void Afterplayingdummy (void *ptr) {fprintf (stderr, "[%d]%s .... calling\n", Gettid (), __func__);
		OK webcamondemandmediasubsession *this = (webcamondemandmediasubsession*) ptr;
	This->m_done = 0xFF; } static void Chkforauxsdpline (void *ptr) {webcamondemandmediasubsession *this = (webcamondemandmediasubsessIon *) ptr;
	This->chkforauxsdpline1 ();
		} void ChkForAuxSDPLine1 () {fprintf (stderr, "[%d]%s ... calling\n", Gettid (), __func__);
		if (Mp_dummy_rtpsink->auxsdpline ()) M_done = 0xFF; else {int delay = 100*1000;//100ms nexttask () = EnviR (). TaskScheduler (). Scheduledelayedtask (Delay, Chkforaux
		Sdpline, this); }} protected:virtual const char *getauxsdpline (Rtpsink *sink, Framedsource *source) {fprintf (stderr, "[%d]%s: ..
		Calling\n ", Gettid (), __func__);

		if (mp_sdp_line) return mp_sdp_line;
		Mp_dummy_rtpsink = sink;
		Mp_dummy_rtpsink->startplaying (*source, 0, 0);
		Mp_dummy_rtpsink->startplaying (*source, Afterplayingdummy, this);
		Chkforauxsdpline (this);
		M_done = 0;
		EnviR (). TaskScheduler (). Doeventloop (&m_done);
		Mp_sdp_line = StrDup (Mp_dummy_rtpsink->auxsdpline ());

		Mp_dummy_rtpsink->stopplaying ();
	return mp_sdp_line; } Virtual Rtpsink *createnewrtpsink (Groupsock *rtpsock, unsigned char type, Framedsource *soUrce) {fprintf (stderr, "[%d]%s ... calling\n", Gettid (), __func__);
	Return H264videortpsink::createnew (EnviR (), rtpsock, type); } virtual Framedsource *createnewstreamsource (unsigned sid, unsigned &bitrate) {fprintf (stderr, "[%d]%s .... c)
		Alling\n ", Gettid (), __func__);
		bitrate = 500;
	Return H264videostreamframer::createnew (EnviR (), New Webcamframesource (EnviR ()));	} Private:framedsource *mp_source;
	Corresponds to Webcamframesource char *mp_sdp_line;
	Rtpsink *mp_dummy_rtpsink;
Char M_done;

};
	static void Test_task (void *ptr) {fprintf (stderr, "test:task .... \ n");
_env->taskscheduler (). Scheduledelayedtask (100000, test_task, 0);

	} static void Test (Usageenvironment &env) {fprintf (stderr, "test:begin...\n");
	char done = 0;
	int delay = 100 * 1000;
	Env.taskscheduler (). Scheduledelayedtask (delay, test_task, 0);

	Env.taskscheduler (). Doeventloop (&done); fprintf (stderr, "test:end.
\ n "); } int main (int argc, char **argv) {//env TaskScheduler*scheduler = Basictaskscheduler::createnew ();

	_env = Basicusageenvironment::createnew (*scheduler);

	Test//test (*_ENV);
	RTSP server Rtspserver *rtspserver = rtspserver::createnew (*_env, 8554);
		if (!rtspserver) {fprintf (stderr, "Err:create rtspserver err\n");
	:: Exit (-1);

		}//Add live stream do {Webcamframesource *webcam_source = 0; 
		Servermediasession *sms = servermediasession::createnew (*_env, "webcam", 0, "Session from/dev/video0");
		Sms->addsubsession (Webcamondemandmediasubsession::createnew (*_env, Webcam_source));

		Rtspserver->addservermediasession (SMS);
		Char *url = rtspserver->rtspurl (SMS);
		*_env << "Using URL \" "<< url <<" \ "\ n";
	delete [] URL;

	} while (0);

	Run Loop _env->taskscheduler (). Doeventloop ();
return 1;
 }

Requires live555 + libavcodec + libswscale + libx264, client using VLC, MPlayer, QuickTime, .....

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.