Demonstration Program for detecting gestures Using Kinect + opencv

Source: Internet
Author: User

Add the result image, update the code, and change the template to 6 (0-5)

 

1. Principle: Read the depth data of Kinect, convert it to a binary image, find the contour, compare it with the profile template, and find the matching result with the smallest Hu matrix.

2, basic: openni, opencv2.2 and http://blog.163.com/gz_ricky/blog/static/182049118201122311118325/
Based on the routine

3. Results: it is only used to demonstrate the use of opencv + openni programming, and the Results Accuracy and processing speed are not optimized. It is for reference only.

 

Comparison of 0, 1 and 5 is more accurate.

 

To put it bluntly, everything is in the code.

// Kinectopencvtest. cpp: defines the entry point of the console application. <Br/> // </P> <p> # include "stdafx. H "</P> <p> # include <stdlib. h> <br/> # include <iostream> <br/> # include <string> <br/> # include <xncppwrapper. h> <br/> # include <opencv2/opencv. HPP> </P> <p> // # include "opencv/cv. H "<br/> // # include" opencv/highgui. H "<br/> using namespace STD; <br/> using namespace CV; </P> <p> # define sample_xml_path ".. /.. /data/samplesconfig. XML "</P> <p> // global template profile <br/> vector <point> g_temp Latecontours; </P> <p> // Number of templates <br/> int g_handtnum = 6; </P> <p> void checkopennierror (xnstatus eresult, string sStatus) <br/>{< br/> If (eresult! = Xn_status_ OK) <br/>{< br/> cerr <sStatus <"error:" <xngetstatusstring (eresult) <Endl; <br/> return; <br/>}</P> <p> // load the profile of the template <br/> void init_hand_template () <br/>{< br/> // int handtnum = 10; <br/> string temp = "handtemplate/"; </P> <p> int I = 0; </P> <p> for (I = 0; I <g_handtnum; I ++) <br/>{< br/> stringstream SS; <br/> SS <I <". BMP "; </P> <p> string filename = temp + SS. STR (); </P> <p> // reads the grayscale Image <br/> mat src = imread (filename, 0); </P> <p> If (! SRC. data) <br/>{< br/> printf ("file not found: % s/n", filename); <br/> continue; <br/>}</P> <p> vector <point> contours; <br/> vector <vec4i> hierarchy; </P> <p> findcontours (SRC, contours, hierarchy, cv_retr_external, cv_chain_approx_simple); <br/> // findcontours (SRC, contours, cv_retr_external, external ); </P> <p> g_template0000s.push_back (contours [0]); <br/>}</P> <p> // template matching hand <br/> int hand_t Emplate_match (MAT & hand) <br/>{< br/> // int handtnum = 10; <br/> int minid =-1; <br/> double minhu = 1; </P> <p> double Hu; <br/> int method = cv_contours_match_i1; </P> <p> // match_num = 0; </P> <p> for (INT I = 0; I <g_handtnum; I ++) {</P> <p> mat temp (g_templatew.s.at (I); <br/> Hu = matchshapes (temp, hand, method, 0 ); </P> <p> // find the template with the smallest Hu Moment <br/> If (Hu <minhu) {<br/> minhu = Hu; <br/> minid = I; <br/>}</P> <p> // PRI Ntf ("% F", Hu); <br/>}</P> <p> // display matching results <br/> int hmatch_value = 25; // template matching coefficient </P> <p> If (minhu <(double) hmatch_value)/100) <br/> return minid; <br/> else <br/> return-1; <br/>}</P> <p> void findhand (MAT & SRC, mat & DST) <br/>{< br/> vector <point> contours; <br/> vector <vec4i> hierarchy; </P> <p> // locate the external profile <br/> // findcontours (SRC, contours, hierarchy, cv_retr_external, cv_chain_approx_simple); <br/> findcont Ours (SRC, contours, cv_retr_external, cv_chain_approx_simple); // returns); <br/> // findcontours (SRC, contours, cv_retr_list, cv_chain_approx_simple ); </P> <p> mat dst_r = mat: zeros (SRC. rows, SRC. cols, cv_8uc3); <br/> dst_r.copyto (DST); </P> <p> // iterate through all the top-level contours, <br/> // draw each connected component with its own random color <br/> int idx = 0; <br/> double maxarea = 0.0; <br/> int maxid =-1; </P> <p> for (unsigned int I = 0; I <contours. size (); I ++) <br/>{< br/> mat temp (contours. at (I); <br/> double area = FABS (temporary area (temp); <br/> If (area> maxarea) <br/>{< br/> maxid = I; <br/> maxarea = area; <br/>}</P> <p> // For (; idx> = 0; idx = hierarchy [idx] [0]) <br/> // {<br/> /// scalar color (RAND () & 255, Rand () & 255, Rand () & 255 ); <br/> /// drawcontours (DST, contou RS, idx, color, cv_filled, 8, hierarchy); </P> <p> // double area = Hangzhou area (Hangzhou S. at (idx); <br/> // If (area> maxarea) <br/>/{ <br/> // maxid = idx; <br/> // maxarea = area; <br/>//}</P> <p> // display the maximum profile, and the template ID that best matches <br/> If (contours. size ()> 0) <br/>{< br/> scalar color (0,255,255); <br/> drawcontours (DST, contours, maxid, color ); </P> <p> mat hand (contours. at (maxid); <br/> int value = hand_temp Late_match (hand); </P> <p> If (value> = 0) <br/> {<br/> scalar templatecolor (255, 0,255 ); <br/> drawcontours (DST, g_templatecontours, value, templatecolor); </P> <p> printf ("Match % d/R/N", value ); </P> <p> stringstream SS; <br/> SS <"match" <value; <br/> string text = ss. STR (); <br/> puttext (DST, text, point (300, 30), font_hershey_simplex, 1.0, templatecolor ); <br/>}</P> <p> int handdetect () <BR/>{< br/> init_hand_template (); </P> <p> xnstatus eresult = xn_status_ OK; </P> <p> // 1. initial Val <br/> Xn: depthmetadata m_depthmd; <br/> Xn: imagemetadata m_imagemd; </P> <p> // For opencv mat <br/> mat m_depth16u (480,640, cv_16uc1); <br/> mat m_rgb8u (480,640, cv_8uc3 ); <br/> mat m_depthshow (480,640, cv_8uc1); <br/> mat m_imageshow (480,640, cv_8uc3); </P> <p> mat m_depthreshshow (480,640, cv_8uc1 ); <br/> Ma T m_handshow (480,640, cv_8uc3); </P> <p> // cvnamedwindow ("depth"); <br/> // cvnamedwindow ("image "); <br/> // cvnamedwindow ("depththresh"); </P> <p> char key = 0; </P> <p> // 2. initial context <br/> Xn: context mcontext; </P> <p> eresult = mcontext. init (); </P> <p> // Xn: enumerationerrors errors; <br/> // eresult = mcontext. initfromxmlfile (sample_xml_path, & errors); </P> <p> checkopennierror (eresult, "initialize context" ); </P> <p> // set mirror <br/> mcontext. setglobalmirror (! Mcontext. getglobalmirror (); </P> <p> // 3. create depth generator <br/> Xn: depthgenerator mdepthgenerator; <br/> eresult = mdepthgenerator. create (mcontext); <br/> checkopennierror (eresult, "create depth generator"); </P> <p> // 4. create Image generator <br/> Xn: imagegenerator mimagegenerator; <br/> eresult = mimagegenerator. create (mcontext); <br/> checkopennierror (eresult, "Create Image generator" ); </P> <p> // 5. set map mode <br/> xnmapoutputmode mapmode; <br/> mapmode. nxres = 640; <br/> mapmode. nyres = 480; <br/> mapmode. NFPs = 30; <br/> eresult = mdepthgenerator. setmapoutputmode (mapmode); <br/> eresult = mimagegenerator. setmapoutputmode (mapmode); </P> <p> // because the depth camera and the color camera are in different positions, and the parameters of the camera are different, therefore, the pictures obtained by the two cameras are slightly different. <br/> // adjust the angle of the Depth camera to the RGB camera position. <br/> // 6. correct view port <br/> mdepthge Nerator. getalternativeviewpointcap (). setviewpoint (mimagegenerator); </P> <p> // 7. start generate data <br/> eresult = mcontext. startgeneratingall (); </P> <p> // 8. read data <br/> eresult = mcontext. waitnoneupdateall (); <br/> while (key! = 27 )&&! (Eresult = mcontext. waitnoneupdateall () <br/>{< br/> // 9A. get the Depth Map <br/> mdepthgenerator. getmetadata (m_depthmd); <br/> memcpy (m_depth16u.data, m_depthmd.data (), 640*480*2); </P> <p> // 9B. get the image map <br/> mimagegenerator. getmetadata (m_imagemd); <br/> memcpy (m_rgb8u.data, m_imagemd.data (), 640*480*3); </P> <p> // converts unknown depth to white, ease of analysis in opencv <br/> xndepthpixel * pdepth = (xndepthpixel *) m_depth16u.da Ta; </P> <p> for (xnuint y = 0; y <m_depthmd.yres (); ++ y) <br/> {<br/> for (xnuint x = 0; x <m_depthmd.xres (); ++ X, ++ pdepth) <br/>{< br/> If (* pdepth = 0) <br/>{< br/> * pdepth = 0 xFFFF; <br/>}</P> <p> // The depth image obtained by openni is a 16-bit unsigned integer, opencv displays eight digits, so it must be converted. </P> <p> // converts a distance to a gray value (0-2550mm to 0-255 ), for example, 1000mm is converted to 1000x255/2550 = 100 <br/> // m_depth16u.convertings (m_depthshow, cv_8u, 255/2096. 0); <br/> m_depth16u.convertion (m_depthshow, cv_8u, 255/2550. 0); </P> <p> // reduce the image size to the valid range based on the data </P> <p> // process the grayscale image here, smooth and denoising <br/> // medianblur (m_depthshow, m_depththreshshow, 3); <br/> // Merge (m_depthshow); <br/> // medianblur (m_depththreshshow, m_depthshow, 3); <br /> Blur (m_depthshow, m_depthreshshow, size (3, 3); <br/> // rotate (m_depthshow); <br/> blur (m_depththreshshow, m_depthshow, size (3, 3); </P> <p> mat pyrtemp (240,320, cv_8uc1); <br/> pyrdown (m_depthshow, pyrtemp ); <br/> pyrup (pyrtemp, m_depthshow); </P> <p> // dilate (m_depthshow, m_depththreshshow, MAT (), point (-1,-1 ), 3); <br/> // erode (m_depththreshshow, m_depthshow, MAT (), point (-1,-1), 3); </P> <p> // For (INT I = 0; I <m_depth16u.rows; I ++) <br/> // For (Int J = 0; j <m_depth16u.cols; j ++) <br/> // {<br/> // If (m_depth16u.at <unsigned short> (I, j) <1) <br/> // m_depth16u.at <unsigned short> (I, j) = 0 xFFFF; </P> <p> // m_depth16u.at <double> (I, j) = 1. /(I + J + 1); <br/> //} </P> <p> // The sequence of RGB and BGR in memory is different, therefore, conversion is also required. <Br/> cvtcolor (m_rgb8u, m_imageshow, cv_rgb2bgr); </P> <p> // imshow ("depth", m_depthshow ); <br/> // imshow ("image", m_imageshow); </P> <p> double thd_max = 0 xFFFF; <br/> double thd_val = 100.0; </P> <p> // reverse the black and white image to find the maximum external contour <br/> // threshold (m_depthshow, m_depththreshshow, thd_val, thd_max, cv_thresh_binary ); <br/> threshold (m_depthshow, m_depthreshshow, thd_val, thd_max, timeout); <br/> imshow ("depththresh", m_depththreshshow ); </P> <p> findhand (m_depththreshshow, m_handshow); <br/> imshow ("hand", m_handshow ); </P> <p> key = cvwaitkey (20); <br/>}</P> <p> // 10. stop <br/> mcontext. stopgeneratingall (); <br/> mcontext. shutdown (); </P> <p> return 0; <br/>}</P> <p> int _ tmain (INT argc, _ tchar * argv []) <br/>{< br/> handdetect (); <br/>}

 

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.