Demonstration of gesture judgment using openni user data generator and opencv

Source: Internet
Author: User

Demonstration of gesture judgment using openni user data generator and opencv

1. Principle: Read the deep data and user data of Kinect, find the contour based on the image closest to the vertex in the user data, and compare it with the profile template. Finding the smallest Hu matrix is the matching result.

2, The basis: openni, opencv2.2, the previous "Using Kinect + opencv gesture detection demo program" and http://blog.163.com/gz_ricky/blog/static/182049118201122311118325/
Based on the routine

3. Result: the opponent's position judgment is improved, but the processing speed is obviously slow. It is only used to demonstrate the use of opencv + openni programming, and the Results Accuracy, processing speed, and so on are not optimized, no error processing, for reference only

 

// Handdetectbyuser. cpp: defines the entry point of the console application. <Br/> // </P> <p> # include "stdafx. H "</P> <p> # include <stdlib. h> <br/> # include <iostream> <br/> # include <string> <br/> # include <xncppwrapper. h> <br/> # include <opencv2/opencv. HPP> </P> <p> using namespace STD; <br/> using namespace CV; </P> <p> // # define sample_xml_path ".. /.. /data/samplesconfig. XML "</P> <p> // distance area (MM) <br/> // int g_forwarddist = 50; <br/> int g_backwarddist = 100; </P> <p> // global template profile <br/> ve Ctor <vector <point> g_templatecontours; </P> <p> // Number of templates <br/> int g_handtnum = 6; </P> <p> void checkopennierror (xnstatus eresult, string sStatus) <br/>{< br/> If (eresult! = Xn_status_ OK) <br/>{< br/> cerr <sStatus <"error:" <xngetstatusstring (eresult) <Endl; <br/> return; <br/>}</P> <p> // callback: a new user find <br/> void xn_callback_type user_newuser (Xn: usergenerator & generator, xnuserid NID, void * pcookie) <br/>{< br/> printf ("new user % d/N", NID ); <br/>}</P> <p> // callback: an existing user was lost <br/> void xn_callback_type user_lostuser (Xn: u Sergenerator & generator, xnuserid NID, void * pcookie) <br/>{< br/> printf ("Lost USER % d/N", NID ); <br/>}</P> <p> // load the profile of the template <br/> void init_hand_template () <br/>{< br/> // int handtnum = 10; <br/> string temp = "handtemplate/"; </P> <p> int I = 0; </P> <p> for (I = 0; I <g_handtnum; I ++) <br/>{< br/> stringstream SS; <br/> SS <I <". BMP "; </P> <p> string filename = temp + SS. STR (); </P> <p> // read Grayscale Images <br/> mat src = Imre AD (filename, 0); </P> <p> If (! SRC. data) <br/>{< br/> printf ("file not found: % s/n", filename); <br/> continue; <br/>}</P> <p> vector <point> contours; <br/> vector <vec4i> hierarchy; </P> <p> findcontours (SRC, contours, hierarchy, cv_retr_external, cv_chain_approx_simple); <br/> // findcontours (SRC, contours, cv_retr_external, external ); </P> <p> g_template0000s.push_back (contours [0]); <br/>}</P> <p> // template matching hand <br/> int hand_t Emplate_match (MAT & hand) <br/>{< br/> // int handtnum = 10; <br/> int minid =-1; <br/> double minhu = 1; </P> <p> double Hu; <br/> int method = cv_contours_match_i1; </P> <p> // match_num = 0; </P> <p> for (INT I = 0; I <g_handtnum; I ++) {</P> <p> mat temp (g_templatew.s.at (I); <br/> Hu = matchshapes (temp, hand, method, 0 ); </P> <p> // find the template with the smallest Hu Moment <br/> If (Hu <minhu) {<br/> minhu = Hu; <br/> minid = I; <br/>}</P> <p> // PRI Ntf ("% F", Hu); <br/>}</P> <p> // display matching results <br/> int hmatch_value = 25; // template matching coefficient </P> <p> If (minhu <(double) hmatch_value)/100) <br/> return minid; <br/> else <br/> return-1; <br/>}</P> <p> void findhand (MAT & SRC, mat & DST) <br/>{< br/> vector <point> contours; <br/> vector <vec4i> hierarchy; </P> <p> // locate the external profile <br/> // findcontours (SRC, contours, hierarchy, cv_retr_external, cv_chain_approx_simple); <br/> findcont Ours (SRC, contours, cv_retr_external, cv_chain_approx_simple); // returns); <br/> // findcontours (SRC, contours, cv_retr_list, cv_chain_approx_simple ); </P> <p> mat dst_r = mat: zeros (SRC. rows, SRC. cols, cv_8uc3); <br/> dst_r.copyto (DST); </P> <p> // iterate through all the top-level contours, <br/> // draw each connected component with its own random color <br/> int idx = 0; <br/> double maxarea = 0.0; <br/> int maxid =-1; </P> <p> for (unsigned int I = 0; I <contours. size (); I ++) <br/>{< br/> mat temp (contours. at (I); <br/> double area = FABS (temporary area (temp); <br/> If (area> maxarea) <br/>{< br/> maxid = I; <br/> maxarea = area; <br/>}</P> <p> // For (; idx> = 0; idx = hierarchy [idx] [0]) <br/> // {<br/> /// scalar color (RAND () & 255, Rand () & 255, Rand () & 255 ); <br/> /// drawcontours (DST, contou RS, idx, color, cv_filled, 8, hierarchy); </P> <p> // double area = Hangzhou area (Hangzhou S. at (idx); <br/> // If (area> maxarea) <br/>/{ <br/> // maxid = idx; <br/> // maxarea = area; <br/>//}</P> <p> // display the maximum profile, and the template ID that best matches <br/> If (contours. size ()> 0) <br/>{< br/> scalar color (0,255,255); <br/> drawcontours (DST, contours, maxid, color ); </P> <p> mat hand (contours. at (maxid); <br/> int value = hand_temp Late_match (hand); </P> <p> If (value> = 0) <br/> {<br/> scalar templatecolor (255, 0,255 ); <br/> drawcontours (DST, g_templatecontours, value, templatecolor); </P> <p> printf ("Match % d/R/N", value ); </P> <p> stringstream SS; <br/> SS <"match" <value; <br/> string text = ss. STR (); <br/> puttext (DST, text, point (300, 30), font_hershey_simplex, 1.0, templatecolor ); <br/>}</P> <p> unsigned short Ge Tnearestpointbyuser (const Xn: depthmetadata & DMD, const Xn: scenemetadata & SMD) <br/>{< br/> xnuint16 g_nxres = DMD. xres (); <br/> xnuint16 g_nyres = DMD. yres (); </P> <p> unsigned short mindepth = 10000; </P> <p> for (xnuint16 ny = 0; ny <g_nyres; ny ++) <br/>{< br/> for (xnuint16 Nx = 0; NX <g_nxres; NX ++) <br/>{< br/> If (SMD (NX, NY )! = 0) & (DMD (NX, NY) <mindepth) <br/> mindepth = DMD (NX, NY ); </P> <p> // If (SMD (NY, nx )! = 0) & (DMD (NY, nx) <mindepth) <br/> // mindepth = DMD (NY, nx ); <br/>}</P> <p> return mindepth; <br/>}</P> <p> void gethandbyuser (MAT & hand, unsigned short nearest, const Xn: depthmetadata & DMD, const Xn: scenemetadata & SMD) <br/>{< br/> xnuint16 g_nxres = DMD. xres (); <br/> xnuint16 g_nyres = DMD. yres (); </P> <p> mat DST = mat: zeros (g_nyres, g_nxres, cv_8uc1); <br/> DST. copyto (hand); </P> <p> for (xnuin T16 ny = 0; ny <g_nyres; ny ++) <br/> {<br/> for (xnuint16 Nx = 0; NX <g_nxres; NX ++) <br/>{< br/> If (SMD (NX, NY )! = 0) & (DMD (NX, NY) <(nearest + g_backwarddist) <br/>{< br/> hand. at <unsigned char> (NY, nx) = 0xff; <br/> // uchar * P = hand. data + ny * g_nxres + NX; <br/> // * P = 0xff; <br/>}</P> <p> int handdetect () <br/>{< br/> char key = 0; <br/> init_hand_template (); </P> <p> xnstatus eresult = xn_status_ OK; </P> <p> // initial val <br/> Xn: depthmetadata m_depthmd; <br/> // Xn: imagemetadata m_imagemd; <Br/> Xn: scenemetadata m_scenemd; </P> <p> // For opencv mat <br/> // mat m_depth16u (480,640, cv_16uc1 ); <br/> // mat m_user16u (480,640, cv_16uc1); <br/> // mat m_rgb8u (480,640, cv_8uc3); <br/> mat m_depthshow (480,640, cv_8uc1 ); <br/> mat m_usershow (480,640, cv_8uc1); <br/> // mat m_imageshow (480,640, cv_8uc3); <br/> mat m_depththreshshow (480,640, cv_8uc1 ); <br/> mat m_handshow (480,640, cv_8uc3); </P> <p> // INITIA L context <br/> Xn: context mcontext; </P> <p> eresult = mcontext. init (); </P> <p> // Xn: enumerationerrors errors; <br/> // eresult = mcontext. initfromxmlfile (sample_xml_path, & errors); </P> <p> checkopennierror (eresult, "initialize context "); </P> <p> // set mirror <br/> mcontext. setglobalmirror (! Mcontext. getglobalmirror (); </P> <p> // create depth generator <br/> Xn: depthgenerator mdepthgenerator; <br/> eresult = mdepthgenerator. create (mcontext); <br/> checkopennierror (eresult, "create depth generator"); </P> <p> // create image generator <br/> Xn :: imagegenerator mimagegenerator; <br/> eresult = mimagegenerator. create (mcontext); <br/> checkopennierror (eresult, "Create Image generator"); </P> <p> // Set map mode <br/> xnmapoutputmode mapmode; <br/> mapmode. nxres = 640; <br/> mapmode. nyres = 480; <br/> mapmode. NFPs = 30; <br/> eresult = mdepthgenerator. setmapoutputmode (mapmode); <br/> eresult = mimagegenerator. setmapoutputmode (mapmode); </P> <p> // create user generator <br/> Xn: usergenerator musergenerator; <br/> eresult = musergenerator. create (mcontext); <br/> checkopennierror (eresult, "creat E user generator "); </P> <p> // set callback by option <br/> // xncallbackhandle husercallbacks; <br/> // musergenerator. registerusercallbacks (user_newuser, user_lostuser, null, husercallbacks); </P> <p> // The depth camera and color camera are in different positions, in addition, the parameters of the lens are not identical, therefore, the pictures obtained by the two cameras are slightly different. <br/> // adjust the angle of the camera to the RGB camera position. <br/> // correct view port <br/> // mdepthgenerator. getalternativeviewpointcap (). setviewpoint (mimagegenerator); </P> <p> // start generate data <br/> eresult = mcontext. startgeneratingall (); <br/> checkopennierror (eresult, "Start generate"); </P> <p> // read data <br/> eresult = mcontext. waitnoneupdateall (); <br/> while (key! = 27 )&&! (Eresult = mcontext. waitnoneupdateall () <br/>{< br/> // get the Depth Map <br/> mdepthgenerator. getmetadata (m_depthmd); <br/> // memcpy (m_depth16u.data, m_depthmd.data (), 640*480*2 ); </P> <p> // get the user Map <br/> musergenerator. getuserpixels (0, m_scenemd); <br/> // memcpy (m_user16u.data, m_scenemd.data (), 640*480*2 ); </P> <p> // get the image map <br/> // mimagegenerator. getmetadata (m_imagemd); <br/> // memcpy (m_rgb8u.data, m_imagemd.data (), 640*480*3 ); </P> <p> // get nearest point <br/> unsigned short nearest = getnearestpointbyuser (m_depthmd, m_scenemd ); <br/> // printf ("nearest is % d/N", nearest); </P> <p> // get user hand <br/> gethandbyuser (m_depthshow, nearest, m_depthmd, m_scenemd); </P> <p> // process the grayscale image to smooth and remove noise. <br/> // erode (m_depthshow, m_depththreshshow, MAT (), point (-1,-1), 3); <br/> // dilate (m_depthreshshow, m_depthshow, MAT (), point (-1, -1), 3); </P> <p> medianblur (m_depthshow, m_depthreshshow, 3); <br/> // m_depththreshshow.copyto (m_depthshow ); <br/> medianblur (m_depththreshshow, m_depthshow, 3); <br/> // blur (m_depthshow, m_depththreshshow, size (3, 3 )); <br/> // m_depththreshshow.copyto (m_depthshow); <br/> // blur (m_depththreshshow, m_depthshow, size (3, 3 )); </P> <p> // mat pyrtemp (240,320, cv_8uc1); <br/> // pyrdown (m_depthshow, pyrtemp); <br/> // pyrup (pyrtemp, m_depthshow); </P> <p> imshow ("hand", m_depthshow); </P> <p> findhand (m_depthshow, m_handshow ); <br/> imshow ("result", m_handshow); </P> <p> key = cvwaitkey (20 ); <br/>}</P> <p> // 10. stop <br/> mcontext. stopgeneratingall (); <br/> mcontext. shutdown (); </P> <p> return 0; <br/>}</P> <p> int _ tmain (INT argc, _ tchar * argv []) <br/>{< br/> handdetect (); <br/>}

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.