Currently, there are two methods to capture mobile video data in real time. One is to obtain the source data through the camera callback. The source data obtained here is not encoded.
Some people send YUV data and draw pictures there, which also means video chat is really ridiculous. This method implements video chat, but the encoding library needs to be transplanted.
Currently, Android opencore code can be transplanted. For more information, see http://www.shouyanwang.org/thread-184-1-1.html.
FFmpeg encoding, which is hard to find on the Internet, but is also implemented by some people. (High encoding efficiency, but not support HD videos)
After the encoding is complete, the RTP protocol can be used for sending. Video. Let's not talk about the RTP protocol.
For the code of the above method, it is better to reply to the code at this link, which is not complete. Link: http://www.eoeandroid.com/thread-51460-1-1.html
2nd methods, that is, the method I used.
Recording through mediarecorder. Then, bind a localsocket to obtain the encoded video data transmission;
The Code is as follows:
Package COM. PEI; import Java. io. datainputstream; import Java. io. ioexception; import Java. io. inputstream; import android. app. activity; import android. OS. bundle; import android. graphics. pixelformat; import android. media. mediarecorder; import android.net. localserversocket; import android.net. localsocket; import android.net. localsocketaddress; import android. view. surfaceholder; import android. view. surfaceview; impo RT android. view. view; import android. view. window; import android. view. windowmanager;/*** Class Name: videocameraactivity <br> * class description: Catch the videodata send to red5 <br> * PS: <br> ** @ Version 1.00 2011/11/05 * @ author codyy) peijiangping */public class videocameraactivity extends activity implementssurfaceholder. callback, mediarecorder. onerrorlistener, mediarecorder. oninfolistener {priva Te static final int mvideoencoder = mediarecorder. videoencoder. h264; private localsocket receiver, sender; private localserversocket LSS; private mediarecorder mmediarecorder = NULL; private Boolean enabled = false; private surfaceview msurfaceview = NULL; private surfaceholder msurfaceholder = NULL; private thread t; @ overridepublic void oncreate (bundle savedinstancestate) {super. oncreate (Savedinstancestate); getwindow (). setformat (pixelformat. translucent); requestwindowfeature (window. feature_no_title); getwindow (). setflags (windowmanager. layoutparams. flag_fullscreen, windowmanager. layoutparams. flag_fullscreen); setcontentview (R. layout. main); msurfaceview = (surfaceview) This. findviewbyid (R. id. surface_camera); surfaceholder holder = msurfaceview. getholder (); holder. addcallback (this); holder. Settype (surfaceholder. surface_type_push_buffers); msurfaceview. setvisibility (view. visible); runner ER = new localsocket (); try {LSS = new localserversocket ("videocamera"); runner er. connect (New localsocketaddress ("videocamera"); receiver. setreceivebuffersize (500000); receiver. setsendbuffersize (500000); Sender = lss. accept (); sender. setreceivebuffersize (500000); sender. setsendbuffersize (500000);} catch (ioex Ception e) {finish (); Return ;}@ overridepublic void onpause () {super. onpause (); If (mmediarecorderrecording) {stopvideorecording (); try {lss. close (); receiver. close (); sender. close ();} catch (ioexception e) {e. printstacktrace () ;}} finish ();} private void stopvideorecording () {system. out. println ("stopvideorecording"); If (mmediarecorderrecording | mmediarecorder! = NULL) {If (T! = NULL) T. interrupt (); releasemediarecorder () ;}} private void startvideorecording () {(t = new thread () {public void run () {int frame_size = 20000; byte [] buffer = new byte [1024*64]; int num, number = 0; inputstream FD = NULL; try {FD = ER er. getinputstream ();} catch (ioexception E1) {return;} number = 0; releasemediarecorder (); While (true) {system. out. println ("OK"); try {num = Fi. read (buffer, number, Frame_size); Number + = num; If (Num <frame_size) {system. out. println ("recoend break"); break ;}} catch (ioexception e) {system. out. println ("exception break"); break ;}} initializevideo (); number = 0; Consumer consumer = new publisher (); // publisher inherits consumerthread = new thread (runnable) Consumer); consumer. setrecording (true); // sets the thread status; consumerthread. start (); // start to publish data streams datainputstream Dis = New datainputstream (FS); try {dis. read (buffer, 0, 32);} catch (ioexception E1) {e1.printstacktrace ();} byte [] AA = {0x01, 0x42, (byte) 0x80, 0x0a, (byte) 0xff, (byte) 0xe1, 0x00, 0x12, 0x67, 0x42, (byte) 0x80, 0x0a, (byte) 0xe9, 0x02, (byte) 0xc1, 0x29, 0x08, 0x00, 0x00, 0x1f, 0x40, 0x00, 0x04, (byte) 0xe2, 0x00, 0x20, 0x01,0x00, 0x04, 0x68, (byte) 0xce, 0x3c, (byte) 0x80}; consumer. putdata (system. c Urrenttimemillis (), AA, 33); While (true) {try {int hsf-length = dis. readint (); number = 0; while (number
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED:System.out.println("MEDIA_RECORDER_INFO_MAX_DURATION_REACHED");break;case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED:System.out.println("MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED");break;}}@Overridepublic void onError(MediaRecorder mr, int what, int extra) {if (what == MediaRecorder.MEDIA_RECORDER_ERROR_UNKNOWN) {System.out.println("MEDIA_RECORDER_ERROR_UNKNOWN");finish();}}}