android4.0 USB Camera執行個體(五)jpg壓縮

來源:互聯網
上載者:User

標籤:android   des   style   blog   java   color   

最近大部分時間都在研究usb 網路攝影機順便把jpg編碼也寫上 下面大部分函數都是我在網上找的 然後稍微的改一點就可以使用了 不過找這些函數費了不少時間 jpg編碼網上有很多說明 大致流程都是一樣的 我也沒深入研究這裡就不說了 接前面幾篇Camera的文章 這裡主要同把擷取到的yuv資料通過jpg壓縮 以及壓縮成mjpeg視頻流 首先說明下網路攝影機擷取到的資料格式是yuv422(p16) 這個是在你初始化網路攝影機設定pixelformat我們當初設定的是V4L2_PIX_FMT_YUYV它的實際格式就是yuv422了 具體的格式你就上網找吧 具體過程是 首先將yuv422轉換成rgb888然後在將rgb888轉換成jpg 其實是可以直接將yuv422轉成jpg的但是我沒實現後面我們也放上相關的代碼 下面我就直接上代碼了

首先yuv422轉rgb888

static void YUV422toRGB888(int width, int height, unsigned char *src, unsigned char *dst){int line, column;unsigned char *py, *pu, *pv;unsigned char *tmp = dst;/* In this format each four bytes is two pixels. Each four bytes is two Y's, aCb and a Cr.Each Y goes to one of the pixels, and the Cb and Cr belong to bothpixels. */py = src;pu = src + 1;pv = src + 3;#define CLIP(x) ( (x)>=0xFF ? 0xFF : ( (x) <= 0x00 ? 0x00 : (x) ) )for (line = 0; line < height; ++line) {for (column = 0; column < width; ++column) {*tmp++ = CLIP((double)*py + 1.402*((double)*pv-128.0));*tmp++ = CLIP((double)*py - 0.344*((double)*pu-128.0) -0.714*((double)*pv-128.0));*tmp++ = CLIP((double)*py + 1.772*((double)*pu-128.0));// increase py every timepy += 2;// increase pu,pv every second timeif ((column & 1)==1) {pu += 4;pv += 4;}}}}

然後將rgb888轉成jpg

static int jpeg_mem_copy(unsigned char* img,unsigned char *dest){struct jpeg_compress_struct cinfo;struct jpeg_error_mgr jerr;JSAMPROW row_pointer[1];  unsigned char *pbuf = NULL;int jpglen = 0;// create jpeg datacinfo.err = jpeg_std_error( &jerr );jpeg_create_compress(&cinfo);//jpeg_stdio_dest(&cinfo, fp);jpeg_mem_dest(&cinfo, &pbuf, &jpglen);// set image parameterscinfo.image_width = mwidth;cinfo.image_height = mheight;cinfo.input_components = 3;cinfo.in_color_space = JCS_RGB;// set jpeg compression parameters to defaultjpeg_set_defaults(&cinfo);// and then adjust quality settingjpeg_set_quality(&cinfo, 80, TRUE);// start compressjpeg_start_compress(&cinfo, TRUE);// feed datawhile (cinfo.next_scanline < cinfo.image_height) {row_pointer[0] = &img[cinfo.next_scanline * cinfo.image_width * cinfo.input_components];jpeg_write_scanlines(&cinfo, row_pointer, 1);}// finish compressionjpeg_finish_compress(&cinfo);// destroy jpeg datajpeg_destroy_compress(&cinfo);    memcpy(dest,pbuf,jpglen);//LOGD("++++++++++++++++len is %d\n",jpglen);if(pbuf)free(pbuf);    return jpglen;}
這裡我使用的是最新的jpeg庫9a的裡面已經整合了jpeg_mem_dest函數 libjpeg移植網上也有很多說明

下面是我提供給上層調用的一個介面

JNIEXPORT jint JNICALL Java_com_hclydao_usbcamera_Fimcgzsd_writefile(JNIEnv * env, jclass obj,jbyteArray yuvdata,jbyteArray filename)//jintArray rgbdata{jbyte *ydata = (jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);jbyte *filedir = (jbyte*)(*env)->GetByteArrayElements(env, filename, 0);FILE * outfile;  if ((outfile = fopen(filedir, "wb")) == NULL) {    LOGE("++++++++++++open %s failed\n",filedir);    return -1;  }//yuv422_to_jpeg(ydata,mwidth,mheight,outfile,80);unsigned char* src = (unsigned char*)ydata;unsigned char* dst = malloc(mwidth*mheight*3*sizeof(char));unsigned char* jpgdata = malloc(mwidth*mheight*3*sizeof(char));YUV422toRGB888(mwidth,mheight,src,dst);int size=jpeg_mem_copy(dst,jpgdata);fwrite(jpgdata,size,1,outfile);if(dst)free(dst);if(jpgdata)free(jpgdata);fclose(outfile);(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);(*env)->ReleaseByteArrayElements(env, filename, filedir, 0);}
這個傳入的是擷取到的yuv資料以及要儲存的jpg檔案的路徑 裡面有一些參數是我聲明的全域變數 具體可以去看我前面幾篇文章


以下是視頻流的相關介面

FILE * video_file;/* *put in frame buffer to queue */JNIEXPORT jint JNICALL Java_com_hclydao_usbcamera_Fimcgzsd_videoopen(JNIEnv * env, jclass obj,jbyteArray filename){jbyte *filedir = (jbyte*)(*env)->GetByteArrayElements(env, filename, 0);  if ((video_file = fopen(filedir, "wb")) == NULL) {    LOGE("++++++++++++open %s failed\n",filedir);    return -1;  }(*env)->ReleaseByteArrayElements(env, filename, filedir, 0);}JNIEXPORT jint JNICALL Java_com_hclydao_usbcamera_Fimcgzsd_videostart(JNIEnv * env, jclass obj,jbyteArray yuvdata){jbyte *ydata = (jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);unsigned char* src = (unsigned char*)ydata;unsigned char* dst = malloc(mwidth*mheight*3*sizeof(char));unsigned char* jpgdata = malloc(mwidth*mheight*3*sizeof(char));YUV422toRGB888(mwidth,mheight,src,dst);int size=jpeg_mem_copy(dst,jpgdata);fwrite(jpgdata,size,1,video_file);//fwrite(dst,(mwidth*mheight*3*sizeof(char)),1,video_file);if(dst)free(dst);if(jpgdata)free(jpgdata);(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);}JNIEXPORT jint JNICALL Java_com_hclydao_usbcamera_Fimcgzsd_videoclose(JNIEnv * env, jclass obj){fclose(video_file);}
就是連續儲存jpg檔案到同一個檔案 儲存的檔案直接播放不了 用格式工廠轉換成avi以後就可以放了


下面是一個直接將yuv轉jpg的函數 這是將yuv420p轉成jpg的 我的是yuv422的 改了很多次發現儲存的圖片不對 看來還是要研究下這些格式之間的區別

/* put_jpeg_yuv420p_memory converts an input image in the YUV420P format into a jpeg image and puts * it in a memory buffer. * Inputs: * - input_image is the image in YUV420P format. * - width and height are the dimensions of the image * Output: * - dest_image is a pointer to the jpeg image buffer * Returns buffer size of jpeg image      */static int put_jpeg_yuv420p_memory(unsigned char *dest_image,                                   unsigned char *input_image, int width, int height){    int i, j, jpeg_image_size;      JSAMPROW y[16],cb[16],cr[16]; // y[2][5] = color sample of row 2 and pixel column 5; (one plane)    JSAMPARRAY data[3]; // t[0][2][5] = color sample 0 of row 2 and column 5      struct jpeg_compress_struct cinfo;    struct jpeg_error_mgr jerr;  char *pbuf = NULL;int jpglen = 0;    data[0] = y;    data[1] = cb;    data[2] = cr;      cinfo.err = jpeg_std_error(&jerr);  // errors get written to stderr           jpeg_create_compress(&cinfo);    cinfo.image_width = width;    cinfo.image_height = height;    cinfo.input_components = 3;    jpeg_set_defaults (&cinfo);      jpeg_set_colorspace(&cinfo, JCS_YCbCr);      cinfo.raw_data_in = TRUE;                  // supply downsampled data    cinfo.do_fancy_downsampling = FALSE;       // fix segfaulst with v7    cinfo.comp_info[0].h_samp_factor = 2;    cinfo.comp_info[0].v_samp_factor = 2;    cinfo.comp_info[1].h_samp_factor = 1;    cinfo.comp_info[1].v_samp_factor = 1;    cinfo.comp_info[2].h_samp_factor = 1;    cinfo.comp_info[2].v_samp_factor = 1;      jpeg_set_quality(&cinfo, 80, TRUE);    cinfo.dct_method = JDCT_FASTEST;      jpeg_mem_dest(&cinfo, &pbuf, &jpglen);    // data written to mem          jpeg_start_compress (&cinfo, TRUE);      for (j = 0; j < height; j += 16) {        for (i = 0; i < 16; i++) {            y[i] = input_image + width * (i + j);            if (i%2 == 0) {                cb[i/2] = input_image + width * height + width / 2 * ((i + j) / 2);                cr[i/2] = input_image + width * height + width * height / 4 + width / 2 * ((i + j) / 2);            }        }        jpeg_write_raw_data(&cinfo, data, 16);    }      jpeg_finish_compress(&cinfo);    jpeg_destroy_compress(&cinfo);    memcpy(dest_image,pbuf,jpglen);if(pbuf)free(pbuf);    return jpglen;}
最近比較迷茫 所以沒有深入研究這些 同時我也一直在想 有些東西到底有沒有深入研究的必要 糾結 接下來接著準備看下ffmpeg h264壓縮


聯繫我們

該頁面正文內容均來源於網絡整理,並不代表阿里雲官方的觀點,該頁面所提到的產品和服務也與阿里云無關,如果該頁面內容對您造成了困擾,歡迎寫郵件給我們,收到郵件我們將在5個工作日內處理。

如果您發現本社區中有涉嫌抄襲的內容,歡迎發送郵件至: info-contact@alibabacloud.com 進行舉報並提供相關證據,工作人員會在 5 個工作天內聯絡您,一經查實,本站將立刻刪除涉嫌侵權內容。

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.