Kinect Learning Notes One

Source: Internet
Author: User
Tags prepare
Color depth Image display: Initialize, bind stream, extract stream.

1. Extract Color data:

#include <iostream> #include "Windows.h" #include "msr_nuiapi.h" #include "cv.h" #include "highgui.h" using Namespa

CE std;
	int main (int argc,char * argv[]) {iplimage *colorimage=null;

	Colorimage = Cvcreateimage (Cvsize (640, 480), 8, 3);
	Initialize NUI HRESULT hr = nuiinitialize (Nui_initialize_flag_uses_color);
		if (HR!= S_OK) {cout<< "Nuiinitialize failed" <<endl;
	return HR; }//define event handle HANDLE H1 = CreateEvent (null, TRUE, FALSE, NULL)//control Kinect can start reading the next frame of data HANDLE H2 = null;//Save the address of the data stream to extract the number

	According to hr = Nuiimagestreamopen (NUI_IMAGE_TYPE_COLOR,NUI_IMAGE_RESOLUTION_640X480,0,2,H1,&AMP;H2);//Open the color map information channel for the Kinect device
		if (FAILED (HR))//To determine whether to extract the correct {cout<< "could not open color image stream Video" <<endl;
		Nuishutdown ();
	return HR;

		//Start reading color graph data while (1) {Const NUI_IMAGE_FRAME * pimageframe = NULL; if (WaitForSingleObject (H1, INFINITE) ==0)//Determine whether the new data is obtained {nuiimagestreamgetnextframe (H2, 0, &pimageframe);/Get the frame number According to Nuiimagebuffer *ptexture = pimageframe->pframetexture;
			Kinect_locked_rect Lockedrect; Ptexture->lockrect (0, &lockedrect, NULL, 0),//extract the data frame to the Lockedrect, it includes two data objects: Pitch the number of bytes per line, pbits the first byte address if (
				Lockedrect.pitch!= 0) {Cvzero (colorimage);
					for (int i=0; i<480; i++) {uchar* ptr = (uchar*) (colorimage->imagedata+i*colorimage->widthstep);
					BYTE * pbuffer = (byte*) (lockedrect.pbits) +i*lockedrect.pitch;//each byte represents a color information, directly using byte for (int j=0; j<640; j +)
						{Ptr[3*j] = pbuffer[4*j];//Internal data is 4 bytes, 0-1-2 is BGR, 4th is now unused ptr[3*j+1] = pbuffer[4*j+1];
					PTR[3*J+2] = pbuffer[4*j+2]; } cvshowimage ("Colorimage", colorimage);//Show image} else {cout<< "Buffer length of Receive
			D texture is bogus\r\n "<<endl;
		/release this frame data and prepare for the next frame nuiimagestreamreleaseframe (H2, pimageframe);
	} if (Cvwaitkey =) break;
	}//Close Nui link nuishutdown ();
return 0;
 }


Experimental results:


2. Extract depth data with User ID

#include <iostream> #include "Windows.h" #include "msr_nuiapi.h" #include "cv.h" #include "highgui.h" using Namespa

CE std;

Rgbquad nui_shorttoquad_depth (USHORT s)//The function I am calling the SDK with an example of the function. {USHORT realdepth = (S & 0xfff8) >> 3;//extract distance information USHORT Player = S & 7;//extract ID information//16bit information, where the lowest 3 digits are IDs (captured ID of the person caught), the remaining 13 bits is the information BYTE L = 255-(byte) (256*REALDEPTH/0X0FFF);//Because the information is extracted from the distance information, here is normalized to 0-255.

	= = = = = = = = = = = = = It is not clear why divide by 0x0fff, want to understand comrade to explain.
	Rgbquad Q;

	q.rgbred = Q.rgbblue = Q.rgbgreen = 0;
		Switch (Player) {case 0:q.rgbred = L/2;
		Q.rgbblue = L/2;
		Q.rgbgreen = L/2;
	Break
		Case 1:q.rgbred = l;
	Break
		Case 2:q.rgbgreen = l;
	Break
		Case 3:q.rgbred = L/4;
		Q.rgbgreen = l;
		Q.rgbblue = l;
	Break
		Case 4:q.rgbred = l;
		Q.rgbgreen = l;
		Q.rgbblue = L/4;
	Break
		Case 5:q.rgbred = l;
		Q.rgbgreen = L/4;
		Q.rgbblue = l;
	Break
		Case 6:q.rgbred = L/2;
		Q.rgbgreen = L/2;
		Q.rgbblue = l;
	Break Case 7:q.rgbred = 255-(L/2);
		Q.rgbgreen = 255-(L/2);
	Q.rgbblue = 255-(L/2);
} return q;
	int main (int argc,char * argv[]) {iplimage *depthindeximage=null;

	Depthindeximage = Cvcreateimage (Cvsize (320, 240), 8, 3);
	Initialize NUI HRESULT hr = nuiinitialize (Nui_initialize_flag_uses_depth_and_player_index);
		if (HR!= S_OK) {cout<< "Nuiinitialize failed" <<endl;
	return HR;
	//Open the color graph information channel for the Kinect device HANDLE h1 = CreateEvent (null, TRUE, FALSE, NULL);

	HANDLE H2 = NULL; hr = Nuiimagestreamopen (NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,NUI_IMAGE_RESOLUTION_320X240,0,2,H1,&AMP;H2);// Here according to the document information, when initialization is nui_initialize_flag_uses_depth_and_player_index, the resolution can only be 320*240 or 80*60 if (FAILED (HR)) {cout<<
		"Could not open color image stream Video" <<endl;
		Nuishutdown ();
	return HR;

		while (1) {Const NUI_IMAGE_FRAME * pimageframe = NULL;
			if (WaitForSingleObject (H1, INFINITE) ==0) {nuiimagestreamgetnextframe (H2, 0, &pimageframe); Nuiimagebuffer *ptextuRe = pimageframe->pframetexture;
			Kinect_locked_rect Lockedrect;
			Ptexture->lockrect (0, &lockedrect, NULL, 0);
				if (lockedrect.pitch!= 0) {Cvzero (depthindeximage); for (int i=0; i<240; i++) {uchar* ptr = (uchar*) (depthindeximage->imagedata+i*depthindeximage->widthste
					p);
					BYTE * pbuffer = (BYTE *) (lockedrect.pbits) +i*lockedrect.pitch;  USHORT * Pbufferrun = (ushort*) pbuffer;//Note that there is a need for conversion, because each data is 2 bytes, stored in the same color information as above, this is 2 bytes a message, can not be used in byte, converted to USHORT for (int j=0; j<320;
						J + +) {Rgbquad RGB = nui_shorttoquad_depth (Pbufferrun[j]);//Invoke function to convert ptr[3*j] = Rgb.rgbblue;
						PTR[3*J+1] = Rgb.rgbgreen;
					PTR[3*J+2] = rgb.rgbred;
			} cvshowimage ("Depthindeximage", depthindeximage);
			else {cout<< "Buffer length of received texture is bogus\r\n" <<endl;
		/release this frame data and prepare for the next frame nuiimagestreamreleaseframe (H2, pimageframe);
	} if (Cvwaitkey =) break;
}//Close Nui Link	Nuishutdown ();
return 0; }

Experimental results:


3, without the ID of the depth of data extraction

#include <iostream> #include "Windows.h" #include "msr_nuiapi.h" #include "cv.h" #include "highgui.h" using Namespa


CE std;
	int main (int argc,char * argv[]) {iplimage *depthindeximage=null;

	Depthindeximage = Cvcreateimage (Cvsize (320, 240), 8, 1)//Here we use grayscale to express depth data, the farther the data darker.
	Initialize NUI HRESULT hr = nuiinitialize (nui_initialize_flag_uses_depth);
		if (HR!= S_OK) {cout<< "Nuiinitialize failed" <<endl;
	return HR;
	//Open the color graph information channel for the Kinect device HANDLE h1 = CreateEvent (null, TRUE, FALSE, NULL);

	HANDLE H2 = NULL; hr = Nuiimagestreamopen (NUI_IMAGE_TYPE_DEPTH,NUI_IMAGE_RESOLUTION_320X240,0,2,H1,&AMP;H2);//Here according to document information, when initialization is Nui_ Initialize_flag_uses_depth_and_player_index, the resolution can only be 320*240 or 80*60 if (FAILED (HR) {cout<< "could not open co
		Lor image Stream Video "<<endl;
		Nuishutdown ();
	return HR;

		while (1) {Const NUI_IMAGE_FRAME * pimageframe = NULL; if (WaitForSingleObject (H1, INFINITE) ==0) {nuiimagestreamgetnextframe (H2, 0, &AMP;PIMAGEFRAME);
			Nuiimagebuffer *ptexture = pimageframe->pframetexture;
			Kinect_locked_rect Lockedrect;
			Ptexture->lockrect (0, &lockedrect, NULL, 0);
				if (lockedrect.pitch!= 0) {Cvzero (depthindeximage); for (int i=0; i<240; i++) {uchar* ptr = (uchar*) (depthindeximage->imagedata+i*depthindeximage->widthste
					p);
					BYTE * pbuffer = (BYTE *) (lockedrect.pbits) +i*lockedrect.pitch;  USHORT * Pbufferrun = (ushort*) pbuffer;//Note that there is a need for conversion, because each data is 2 bytes, stored in the same color information as above, this is 2 bytes a message, can not be used in byte, converted to USHORT for (int j=0; j<320; J + +) {Ptr[j] = 255-(BYTE) (256*PBUFFERRUN[J]/0X0FFF);//directly normalized data to}} cvshowimage ("Depthindexim
			Age ", depthindeximage);
			else {cout<< "Buffer length of received texture is bogus\r\n" <<endl;
		/release this frame data and prepare for the next frame nuiimagestreamreleaseframe (H2, pimageframe);
	} if (Cvwaitkey =) break;
	}//Close Nui link nuishutdown ();
return 0; }

Experimental results:


4, need to pay attention to the place

①nui_initialize_flag_uses_depth_and_player_index and nui_initialize_flag_uses_depth cannot create data streams at the same time. This I have confirmed in the experiment. And the pure depth of the image is left and right inverted.

② The normalized place divided by 0X0FFF is that the effective distance of the Kinect is 1.2m to 3.5m (official), if 3.5m is 0X0DAC in hexadecimal notation, the maximum distance my lab can measure in the actual test is 0x0f87 is 3975mm. It is estimated that they use the limit distance 0x0fff directly as a divisor.

The cv.h,highgui.h in ③ is the library in the OpenCV I use because I am familiar with this.


5, the Bone data extraction:

#include <iostream> #include "Windows.h" #include "msr_nuiapi.h" #include "cv.h" #include "highgui.h" usi  

NG namespace Std;
void Nui_drawskeleton (Nui_skeleton_data * pskel,int whichone, iplimage *skeletonimage)//Draw The skeleton, the second parameter is not used, want to track many people's children's shoes can consider using
	{float FX, FY;
    Cvpoint Skeletonpoint[nui_skeleton_position_count]; for (int i = 0; i < nui_skeleton_position_count i++)//all coordinates are converted to the coordinates of the depth diagram {NUITRANSFORMSKELETONTODEPTHIMAGEF (
		Pskel->skeletonpositions[i], &AMP;FX, &fy);
		skeletonpoint[i].x = (int) (fx*320+0.5f);
    SKELETONPOINT[I].Y = (int) (fy*240+0.5f); for (int i = 0; i < Nui_skeleton_position_count i++) {if (pskel->eskeletonpositiontrackingstat E[i]//Tracking point has three states: 1 is not tracked, 2 is tracked, 3 is traced to {cvcircle (skeletonimag
        E, Skeletonpoint[i], 3, cvscalar (0, 255, 255),-1, 8, 0);

} return; int main (int argc,char * argv[]) {iplimage *skeletonimage=nulL  

	Skeletonimage = Cvcreateimage (Cvsize (320, 240), 8, 3);  
	Initialize NUI HRESULT hr = nuiinitialize (Nui_initialize_flag_uses_skeleton);  
		if (HR!= S_OK) {cout<< "Nuiinitialize failed" <<endl;  
	return HR;   

	//Open the color graph information channel for the Kinect device HANDLE h1 = CreateEvent (null, TRUE, FALSE, NULL);  hr = nuiskeletontrackingenable (h1, 0);//Open Skeleton trace event if (FAILED (HR)) {cout << "nuiskeletontrackingenable fail"
		<< Endl;
		Nuishutdown ();
	return HR; } while (1) {if (WaitForSingleObject (H1, INFINITE) ==0) {Nui_skeleton_frame definition of skeleton frame bool BF

			Oundskeleton = false;
			if (SUCCEEDED (nuiskeletongetnextframe (0, &skeletonframe))//get the next frame of skeleton data. Extract skeleton frames directly from Kinect {for (int i = 0; i < Nui_skeleton_count i++) {if (skeletonframe.skeletondata[i].etrackingstate = = NU
					i_skeleton_tracked)//tracking up to six people, check each "person" (may be empty, not a person) whether tracking to {Bfoundskeleton = true; }} if (!bfoundskeleton) {continue;; }//Smooth out the skeleton data Nuitransformsmooth (&skeletonframe,null);//smooth skeleton frame, eliminate jitter//Draw each Skelet
			On color according to the slot within they are found.
			Cvzero (Skeletonimage); for (int i = 0; i < Nui_skeleton_count i++) {//show skeleton only if it's tracked, and the Center-shoulde
                                R Joint is at least inferred.

				Determine if the condition is a correct skeleton: The skeleton is tracked and the shoulder Center (neck position) must be traced. if (skeletonframe.skeletondata[i].etrackingstate = = nui_skeleton_tracked && skeletonframe.skeletondata[i]. Eskeletonpositiontrackingstate[nui_skeleton_position_shoulder_center]!= nui_skeleton_position_not_tracked) {NUI
				_drawskeleton (&skeletonframe.skeletondata[i], I, skeletonimage);
			} cvshowimage ("Skeletonimage", skeletonimage);//show skeleton images.
		Cvwaitkey (30);  
	}//Close Nui link nuishutdown ();  
return 0; }


Experimental results: No line, if you want to continue to do the array can be processed wiring.



Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.