Use the GLFW library to opencv the picture that is read to the background texture map of OpenGL __GLFW

Source: Internet
Author: User
Tags generator

Reprint please indicate the source: Http://my.csdn.NET/ye_shen_wei_mian

Some time ago touched a little glfw, personally do not like the Freeglut callback mechanism, GLFW is an alternative to the choice.

The following points should be noted in the use of GLFW:

1;GLFW can be used in multi-threaded use.

2. If you use multiple threads, remember that the GLFW initialization function glfwinit () can only be called once, and is invoked in the main thread, not multiple times, otherwise it will be an error.

3, GLFW's keyboard response function seems to be only in the main thread to call.

The code obtained from the Internet has been modified, and the function is to paste the OpenCV into the OpenGL window as a background texture, which can be run:

#include <iostream> #include <thread> #include <chrono> using Std::cout;


Using Std::endl;  #include "opencv.hpp"//#include "gl/glew.h" #include "gl/freeglut.h" #include "glfw/glfw3.h" glint   WindowWidth = 640;    //Define Our window width glint   windowheight = 480;    //Define Our window height glfloat fieldofview = 45.0f;  //FoV glfloat znear = 0.1f;    //Near clip plane glfloat zfar = 200.0f;
 //Far clip plane//Frame counting and limiting int    framecount = 0;


Double Framestarttime, Frameendtime, Framedrawtime;


BOOL quit = false;


glfwwindow* window;


void handlekeypress (int thekey, int theaction); Function turn a cv::mat into a texture, and return the texture ID as a gluint to use Gluint mattotexture (Cv::mat & Mat, Glenum Minfilter, Glenum magfilter, Glenum wrapfilter) {//Generate a number for our Textureid ' s unique handle GLu
	int Textureid; Glgentextures (1, &texturEID);


	Bind to our texture handle glbindtexture (gl_texture_2d, Textureid);
	Catch Silly-mistake texture interpolation method for magnification//if (Magfilter = = Gl_linear_mipmap_linear | |
	Magfilter = = Gl_linear_mipmap_nearest | |
	Magfilter = = Gl_nearest_mipmap_linear | | Magfilter = = gl_nearest_mipmap_nearest)//{//cout << "can ' t use mipmaps for magnification-setting Filte
	R to Gl_linear "<< Endl;
	Magfilter = Gl_linear;
	} glpixelstoref (Gl_unpack_alignment, 1); Set texture interpolation methods for minification and magnification gltexparameteri (gl_texture_2d, Gl_texture_min_fi
	Lter, Minfilter);


	Gltexparameteri (gl_texture_2d, Gl_texture_mag_filter, Magfilter);
	Set Texture Clamping Method Gltexparameteri (gl_texture_2d, gl_texture_wrap_s, Wrapfilter);


	Gltexparameteri (gl_texture_2d, gl_texture_wrap_t, Wrapfilter);


	GLTEXENVF (gl_texture_env, Gl_texture_env_mode, gl_decal); Set incoming texture format to://GL_BGR &NBsp     for Cv_cap_openni_bgr_image,//Gl_luminance for Cv_cap_openni_disparity_map,//Work out other mappings a
	s required (there ' a list in comments in Main ()) glenum Inputcolourformat = Gl_rgb;
	if (mat.channels () = = 1) {Inputcolourformat = gl_luminance; //Create The texture glteximage2d (gl_texture_2d,    //Type of texture 0,                //Pyramid level (for mip-mapping)-0 are the top level Gl_rgb,            //Internal colour format to convert to 640,          //Image width  i.e. 64 0 for the Kinect in standard mode,          //Image height i.e to Kinect in standard mod e 0,                //Border width in pixels (can either to 1 or 0) inputcol
		Ourformat,//Input image format (i.e. Gl_rgb, Gl_rgba, GL_BGR etc.) Gl_unsigned_byte,  //Image dataType Mat.data);        //The actual image data itself////If we ' re using mipmaps then generate them.
	Note:this requires OpenGL 3.0 or higher//if (minfilter = = Gl_linear_mipmap_linear | |
	Minfilter = = Gl_linear_mipmap_nearest | |
	Minfilter = = Gl_nearest_mipmap_linear | |
	Minfilter = = gl_nearest_mipmap_nearest)//{//Glgeneratemipmap (gl_texture_2d);
return Textureid; } void Draw (Cv::mat &camframe) {//clear the screen and depth buffer, and reset the Modelview matrix to Identi Ty//glclear (gl_color_buffer_bit |
	Gl_depth_buffer_bit);


	Glloadidentity (); Glclearcolor (1.0f, 1.0f, 1.0f, 1.0f);
	Set we clear colour to black///* Draw a triangle * *//glbegin (gl_triangles); GLCOLOR3F (1.0, 0.0, 0.0);


	   //Red//glvertex3f (1.0, 1.0, 0.0); glcolor3f (0.0, 1.0, 0.0);


	   //Green//glvertex3f (-1.0,-1.0, 0.0); glcolor3f (0.0, 0.0, 1.0);


	   //Blue//glvertex3f (1.0,-1.0, 0.0); //Glend ();


	Move things to the screen//gltranslatef (0.0f, 0.0f, -8.0f);


	Rotate around the y-axis//glrotatef (Framecount, 0.0f, 1.0f, 0.0f);
	Rotate around the x-axis//static float rateofchange = 0.01f;
	static float Degreestomovethrough = 180.0f;


	Glrotatef (Sin (framecount * rateofchange) * Degreestomovethrough, 1.0f, 0.0f, 0.0f);


	Rotate around the z-axis//glrotatef (cos (framecount * rateofchange) * Degreestomovethrough, 0.0f, 1.0f, 0.0f);


	Glenable (gl_texture_2d);
	Quad width and height float w = 6.4f;


	float h = 4.8f; Convert image and depth data to OpenGL textures gluint Imagetex = mattotexture (Camframe, Gl_linear, Gl_linear, Gl_clam
	P);


	Gluint Depthtex = Mattotexture (Depthframe, Gl_linear_mipmap_linear, Gl_linear, Gl_clamp);


	Draw the textures//Note:window co-ordinates origin is top left, texture co-ordinate origin are bottom left.
	Front facing texture glbindtexture (gl_texture_2d, Imagetex); Glbegin (gl_pOlygon);
	GLTEXCOORD2F (0, 0);
	GLVERTEX2F (-1,-1);
	GLTEXCOORD2F (1, 0);
	GLVERTEX2F (1,-1);
	GLTEXCOORD2F (1, 1);
	GLVERTEX2F (1, 1);
	GLTEXCOORD2F (0, 1);
	GLVERTEX2F (-1, 1);




	Glend ();	Glbegin (Gl_polygon); Set to polygon texture mapping and start texturing//gltexcoord2f (0.0f, 0.0f);	glvertex2f (0, 0); The upper left corner of the texture corresponds to the upper left corner of the window//gltexcoord2f (0.0f, 1.0f);	GLVERTEX2F (0, h); The lower left corner of the texture corresponds to the lower left corner of the window//gltexcoord2f (1.0f, 1.0f);	GLVERTEX2F (W, h); The lower right corner of the texture corresponds to the lower right corner of the window//gltexcoord2f (1.0f, 0.0f);	glvertex2f (w, 0);	The upper right corner of the texture corresponds to the upper right corner of//glend (); End map//////back facing texture (facing backward because of the reversed the vertex Winding)////glbindtexture (gl_
	texture_2d, Depthtex);
	Glbegin (gl_quads);
	GLTEXCOORD2F (1, 1);
	GLVERTEX2F (-W/2, H/2);
	GLTEXCOORD2F (1, 0);
	GLVERTEX2F (-W/2,-H/2);
	GLTEXCOORD2F (0, 0);
	GLVERTEX2F (W/2,-H/2);
	GLTEXCOORD2F (0, 1);
	GLVERTEX2F (W/2, H/2);


	Glend ();
	Free the Texture memory gldeletetextures (1, &imagetex); Gldeletetextures (1, &AMP;DEPThtex);


	Gldisable (gl_texture_2d);
Glfwswapbuffers (window); } void handlekeypress (int thekey, int theaction) {//If a key was pressed ... if (theaction = = glfw_press) {//.
		. act accordingly dependant on what key it was!
			Switch (thekey) {case 27:quit = true;


		Break


		Default:break;


	//end of Switch statement}//end of glfw_press} void Initgl () {//glenum err = Glewinit ();
	Define our buffer settings int redbits = 8, greenbits = 8, bluebits = 8;


	


	int alphabits = 8, Depthbits = N, stencilbits = 8;
	
	Initialise GLFW glfwinit (); Create a window if (!) ( window = Glfwcreatewindow (windowwidth, WindowHeight, "Hello world", NULL, NULL)) {cout << ' Failed to open Windo
		w! "<< Endl;
		Glfwterminate ();
	Exit (-1);


	}/* Make the window's context current */glfwmakecontextcurrent (window);


	Specify the callback function for key presses/releases//glfwsetkeycallback (handlekeypress);  initialise gleW (must occur after window creation or Glew would error)/*if (GLEW_OK!= err) {cout << "Glew initialisation er
	Ror: "<< glewgeterrorstring (Err) << Endl;
	System ("pause");
	Exit (-1);
	} cout << "Glew okay-using version:" << glewgetstring (glew_version) << Endl; *//Setup We viewport to be the entire size of the window glviewport (0, 0, (Glsizei) WindowWidth, (Glsizei) Windowheigh
	
	T); /*static const Glfloat proj[] = {0,-2.f/windowwidth,0,0, -2.f/windowheight,0,0,0, 0,0,1,0, 1,1,0,1,};*///glMa
	Trixmode (gl_projection);
	Glloadidentity ();
	GLLOADMATRIXF (proj);




	gluortho2d (0, WindowWidth, windowheight, 0);
	Change to the projection matrix and set our viewing volume Glmatrixmode (gl_projection);


	Glloadidentity (); The following code is a fancy bit of math this is equivilant to calling://Gluperspective (fieldofview/2.0f, Width/hei
	Ght, near, far); We do it the way simply to avoid requiring glu.h glfloat asPectratio = (WindowWidth > WindowHeight)?
	Float (windowwidth)/float (windowheight): float (windowheight)/float (windowwidth);
	Glfloat FH = tan (Float (fieldofview/360.0f * 3.14159f)) * znear;
	Glfloat FW = FH * aspectratio;


	Glfrustum (-FW, FW,-FH, FH, Znear, Zfar);		-----OpenGL Settings-----//gldepthfunc (gl_lequal); Specify depth function to use//glenable (gl_depth_test);    //Enable The depth buffer//glhint (gl_perspective_correction_hint, gl_nicest); Ask for nicest perspective correction//glenable (gl_cull_face);    //cull back facing polygons glfwswapinterval (1);        //Lock screen updates to vertical refresh//Switch to Modelview matrix and reset Glmatrix
	Mode (Gl_modelview);


	Glloadidentity (); Glclearcolor (1.0f, 1.0f, 1.0f, 1.0f);  Set we clear colour to black} void Lockframerate (double framerate) {//Note:framestarttime is called-A-Thing In the main loop//our allowed frame The 1 second divided by the desired FPS static double allowedframetime = 1.0/framerate;


	Get frameendtime = Glfwgettime ();


	Calc Frame Draw Time framedrawtime = Frameendtime-framestarttime;


	Double sleeptime = 0.0; Sleep if we ' ve got time to kill before the next frame if (Framedrawtime < allowedframetime) {sleeptime = Allowe
		Dframetime-framedrawtime;
		Glfwsleep (Sleeptime);
	Std::this_thread::sleep_for (std::chrono::microseconds (int) (sleeptime));
	}//Debug stuff Double potentialfps = 1.0/framedrawtime;
	Double Lockedfps = 1.0/(Glfwgettime ()-framestarttime);
	cout << "Draw:" << framedrawtime << "sleep:" << sleeptime; cout << "Pot.


FPS: "<< potentialfps <<" Locked fps: "<< lockedfps << Endl;}


	int main () {//Set up our OpenGL window, projection and Options INITGL (); Create A We video capture using the Kinect and Openni//note:to Use the Cv::viDeocapture class you must link in the Highgui lib (libopencv_highgui.so) cout << "Opening video device ..." <&lt ;
	Endl


	Cv::videocapture Capture (0);
	Set sensor to 640x480@30hz mode as opposed to 1024x768@15hz mode (which was available for image sensor only!) Note:cv_cap_openni_image_generator_output_mode = Cv_cap_openni_image_generator + CV_CAP_PROP_OPENNI_OUTPUT_MODE/ /capture.set (Cv_cap_openni_image_generator_output_mode, cv_cap_openni_vga_30hz);


	Default cout << "done." << Endl; Check that we have actually opened a connection to the sensor if (!capture.isopened ()) {cout << ' Can not ope
		n a Capture object. "<< Endl;
	return-1;
	//Create Our Cv::mat objects Cv::mat camframe;


	Cv::mat Depthframe; 
		do {framestarttime = Glfwgettime ();//Grab the time at the beginning of the frame//Grab a frame from the sensor
		Correct procedure is to grab once per frame, then retrieve as many fields as required. // *IMPORTANT Note:there  Appears to is a threading issue with the OpenCV grab () function//where if you try to grab the device before it ' s ready  To provide the next frame it takes//up to 2 seconds to provide the frame, which it might doing for a little while before Crashing//The Xnsensorserver process & then you can ' t get no more frames without restarting the//application .
		This is results in horrible, stuttery framerates and garbled sensor data. I ' ve found this can is worked around by playing a mp3 in the background.
		No, really. I ' m guessing the threading of the MP3 player introduces some kind of latency which//prevents the grab () function be ing called too soon.
		Try it if you don ' t believe me! Config:linux x64 Lmde, Kernel 3.1.0-5.dmz.1-liquorix-amd64, Nvidia 290.10 drivers,//OpenCV 2.3.2 (from git, b Uilt without TBB [same occurs with!]), openni-bin-x64-v1.5.2.23,//avin2-sensorkinect-git-unstable-branch-2011-01-04, nite-bin-unstable-x64-v1.5.2.21. if (!capture.grab () {cout << "could not grab frame ...
		Skipping frame. "<< Endl; else {/* Frame retrieval formats:data given from depth generator:openni_depth_map     &NBSP ;   -depth values in mm (CV_16UC1) openni_point_cloud_map  -XYZ in meters (CV_32FC3) Openni_disparity_map    -disparity in pixels (cv_8uc1) openni_disparity_map_32f-disparity in pixels (CV_32FC1) Openni_vali D_depth_mask  -MASK of valid pixels (not occluded, not shaded etc.) (CV_8UC1) data given from RGB image Generator:openni_bgr_image-color image (CV_8UC3) Openni_gray_image-gra
			Y image (CV_8UC1) *///Retrieve desired sensor data capture.retrieve (camframe); Cvtcolor (Camframe, Camframe, cv_BGR2RGB);
			Cv::flip (Camframe, camframe,0);
			Capture.retrieve (Depthframe, Cv_cap_openni_disparity_map);
			cout << camframe.size () << Endl;
			Cv::imwrite ("Tmp.jpg", camframe);
			Cv::waitkey (20);


			System ("pause");


			Draw Texture Contents Draw (camframe);
		Swap the active and Visual pages//glfwswapbuffers (window);


		} framecount++;




		Lock our main loop to 30fps lockframerate (30.0);


	if (Cv::waitkey () >= 0)//break;


	while (!glfwwindowshouldclose (window));


	Capture.release ();


	Glfwterminate ();
return 0;
 }

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.