Realization of KINECT+OPENCV acquisition depth and color data _c language based on C + +

Source: Internet
Author: User

Development environment vs2010+opencv2.4.10

First, download the latest Kinect 2 SDK http://www.microsoft.com/en-us/kinectforwindows/develop/downloads-docs.aspx

Do not insert the Kinect after downloading, preferably without inserting a USB device other than the keyboard and mouse, then install the SDK, insert Kinect after the installation is complete, and there will be a hint to install the new device. After installation, you can go to "start" to find two newly installed software, one can display the Kinect depth diagram, and another software to show the various examples in the SDK program.

into the SDK installation directory, you can find sample this folder, which is written in four languages, where native is C + +, managed is C #, there are two other languages unfamiliar, I am familiar with C + +, anyway, just try, on the C + +.

Opencv+kinect. cpp

#include <opencv2\opencv.hpp> #include <iostream>//windows header files must be NuiApi.h, or they will not use #include <windows.h
>//kinect for Windows header file #include "NuiApi.h" using namespace std;
 
using namespace CV;
#include <d3d11.h>//farthest distance (mm) const int max_distance = 3500;
 
Nearest distance (mm) const int min_distance = 200;
Const LONG M_depthwidth = 640;
Const LONG m_depthheight = 480;
Const LONG M_colorwidth = 640;
Const LONG m_colorheight = 480;
 
Const LONG Cbytesperpixel = 4;
  int main () {//color image Mat Image_rgb;
 
  Depth image Mat image_depth;
  Create a Mat image_rgb.create (480,640,CV_8UC3);
 
  Image_depth.create (480,640,CV_8UC1);
 
  A Kinect instance pointer inuisensor* m_pnuisensor = NULL;
  if (m_pnuisensor!= NULL) {return 0;
  //Record the number of currently connected Kinect (preparing for multiple connections) int isensorcount;
 
 
  Get current Kinect's number HRESULT hr = Nuigetsensorcount (&isensorcount);
  Initializes the kinetc instance by sequence, where a Kinect is connected, so no circular hr = Nuicreatesensorbyindex (iSensorCount-1, &m_pnuisensor) is used; Initialized so that it can be answeredReceive color and depth data flow hr = M_pnuisensor->nuiinitialize (Nui_initialize_flag_uses_color |
 
  Nui_initialize_flag_uses_depth);
    Determine if error (FAILED (hr)) {cout<< "nuiinitialize FAILED" <<endl;
  return HR;
  ///color image gets the next frame event HANDLE nextcolorframeevent = CreateEvent (null, TRUE, FALSE, NULL);
  Color Image event Handle HANDLE Colorstreamhandle = NULL;
  Depth image gets the next frame event HANDLE nextdepthframeevent = CreateEvent (null, TRUE, FALSE, NULL);
 
  Depth Image event handle HANDLE Depthstreamhandle = NULL; Instance to open the data stream, where Nui_image_type_color represents the color image hr = M_pnuisensor->nuiimagestreamopen (Nui_image_type_color, NUI_IMAGE_
 
  resolution_640x480, 0,2,nextcolorframeevent,&colorstreamhandle);
    if (FAILED (HR))//To determine whether to extract the correct {cout<< "could not open color image stream Video" <<endl;
    M_pnuisensor->nuishutdown ();
  return HR; //instance to open the data stream, where nui_image_type_depth represents the depth image hr = M_pnuisensor->nuiimagestreamopen (nui_image_type_depth, NUI_IMAGE_R esolution_640x480, 0,2, nextdepthframeevent, &depthstreamhandle);
    if (FAILED (HR))//To determine whether to extract the correct {cout<< "could not open color image stream Video" <<endl;
    M_pnuisensor->nuishutdown ();
  return HR;
  } cv::namedwindow ("depth", cv_window_autosize);
  MoveWindow ("depth", 300,600);
  Cv::namedwindow ("Colorimage", cv_window_autosize);
 
  MoveWindow ("Colorimage", 0,200);
    while (1) {Nui_image_frame Pimageframe_rgb;
 
    Nui_image_frame pimageframe_depth; Waits for the new color data indefinitely, waits until after returns if (WaitForSingleObject (nextcolorframeevent, 0) = = 0) {//obtains the frame data from the stream handle that just opened the data stream, the data address that reads is stored in P
      ImageFrame hr = m_pnuisensor->nuiimagestreamgetnextframe (colorstreamhandle, 0, &pimageframe_rgb);
        if (FAILED (HR)) {cout<< "could not get color image" <<endl;
        M_pnuisensor->nuishutdown ();
      return-1;
      } inuiframetexture *ptexture = Pimageframe_rgb.pframetexture;
 
      Nui_locked_rect Lockedrect; Extract the data frame to Lockedrect, which includes two data objects: Pitch per line byteNumber, pbits the first byte address//and lock the data so that when we read the data, the Kinect will not modify it ptexture->lockrect (0, &lockedrect, NULL, 0); Verify that the data obtained is valid if (lockedrect.pitch!= 0) {//Convert data to OPENCV mat format for (int i = 0; i < image _rgb.rows;
 
          i++) {//Line I of the pointer uchar *prt = IMAGE_RGB.PTR (i);
 
          Each byte represents a color information, directly using the Uchar Uchar *pbuffer = (uchar*) (lockedrect.pbits) + i * lockedrect.pitch; for (int j = 0; J < Image_rgb.cols; J +) {prt[3 * j] = pbuffer[4 * j];//Internal data is 4 bytes, 0-1-2 is BGR, 4th
            Now unused prt[3 * j + 1] = pbuffer[4 * j + 1];
          Prt[3 * j + 2] = pbuffer[4 * j + 2];
        } imshow ("Colorimage", Image_rgb);
        Unlock ptexture->unlockrect (0);
      Release frame M_pnuisensor->nuiimagestreamreleaseframe (Colorstreamhandle, &AMP;PIMAGEFRAME_RGB); else {cout<< "Buffer length of received texture is bogus\r\N "<<endl;
      BOOL Nearmode; 
 
 
      Inuiframetexture* pcolortodepthtexture; Processing of depth images if (WaitForSingleObject (nextdepthframeevent, INFINITE) = = 0) {hr = M_pnuisensor->nuiim
 
        Agestreamgetnextframe (depthstreamhandle, 0, &pimageframe_depth);
          if (FAILED (HR)) {cout<< "could not get color image" <<endl;
          Nuishutdown ();
        return-1; hr = M_pnuisensor->nuiimageframegetdepthimagepixelframetexture (Depthstreamhandle, &pImageFr 
        Ame_depth, &nearmode, &pcolortodepthtexture);
        Inuiframetexture *ptexture = pimageframe_depth.pframetexture;
        Nui_locked_rect Lockedrect; 
 
        Nui_locked_rect Colortodepthlockrect;
        Ptexture->lockrect (0, &lockedrect, NULL, 0); 
 
        Pcolortodepthtexture->lockrect (0,&colortodepthlockrect,null,0); Normalized for (int i = 0; i < image_depth.rows; i++) {
          Uchar *prt = image_depth.ptr<uchar> (i);
          uchar* pbuffer = (uchar*) (lockedrect.pbits) + i * lockedrect.pitch;
 
          Conversion is required because each depth data is 2 bytes, the byte should be converted to USHORT USHORT *pbufferrun = (ushort*) pbuffer;
            for (int j = 0; J < Image_depth.cols; J + +) {//forward, normalized data, map to "0-255" for pixels in depth distance within 300mm-3500mm range,
            Out of range, all do is edge pixel if (Pbufferrun[j] << 3 > Max_distance) prt[j] = 255;
            else if (Pbufferrun[j] << 3 < min_distance) Prt[j] = 0;
          else prt[j] = (BYTE) (256 * (PBUFFERRUN[J) << 3)/max_distance);
 
 
 
        } imshow ("depth", image_depth);
        Next is the Alignment section, the foreground is pulled out//store the depth point of the parameter nui_depth_image_point* depthpoints = new nui_depth_image_point[640 * 480]; 
           
          if (colortodepthlockrect.pitch!= 0) {HRESULT hrstate = S_OK; A class (including: depth, color, bone) that can be transformed in different spatial coordinates inuicoordinatemapper* Pmapper; 
 
          Sets the space coordinate system for the Kinect instance hrstate = M_pnuisensor->nuigetcoordinatemapper (&pmapper); 
          if (FAILED (hrstate)) {return hrstate; //Important step: Map from color space to depth space.
          Parameter description://"Parameter 1": Color image Type//"Parameter 2": Resolution of Color image//"Parameter 3": Resolution of depth image//"Parameter 4": Number of depth images "Parameter 5": depth pixel points//"parameter 6": The size of the memory, the number. Type is Nui_depth_image_pixel//"Parameter 7": the parameter hrstate = Pmapper->mapcolorframetodepthframe that holds the mapping result point (Nui_image_ Type_color, nui_image_resolution_640x480, nui_image_resolution_640x480, 640 *, (nui_depth_image_pixel*) Co 
 
          lortodepthlockrect.pbits,640 *, depthpoints); 
          if (FAILED (hrstate)) {return hrstate;
          }//display image Mat show;
          Show.create (480,640,CV_8UC3);
 
          Show = 0; for (int i = 0; i < image_rgb.rows. i++) {for (int j = 0; J < Image_Rgb.cols;
              J + +) {Uchar *prt_rgb = image_rgb.ptr (i);
              Uchar *prt_show = show.ptr (i); 
              Offset in memory long index = i * 640 + j; 
 
              Gets the point nui_depth_image_point Depthpointatindex = Depthpoints[index] from the array in which the mapping coordinates are saved;
                Boundary judge if (depthpointatindex.x >= 0 && depthpointatindex.x < image_depth.cols &&
                depthpointatindex.y >=0 && depthpointatindex.y < image_depth.rows) {
                Depth of judgment, between Min_distance and Max_distance, as the foreground, show//This use is also important, when the real depth of the pixel points in the depth of the image to determine the value of the time, there will be errors 
                  if (depthpointatindex.depth >= min_distance && depthpointatindex.depth <= max_distance) {
                  Prt_show[3 * j] = Prt_rgb[j * 3];
                  Prt_show[3 * j + 1] = prt_rgb[j * 3 + 1];
                Prt_show[3 * j + 2] = prt_rgb[j * 3 + 2];
      }        }} imshow ("Show", show);
         
        } Delete []depthpoints;
        Ptexture->unlockrect (0);
      M_pnuisensor->nuiimagestreamreleaseframe (Depthstreamhandle, &pimageframe_depth);
      else {cout<< "Buffer length of received texture is bogus\r\n" <<endl;
  } if (Cvwaitkey =) break;
return 0;
 }

Related Article

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.