Any n image splicing _ effect very good _ computer Vision Big Job 1 Final edition

Source: Internet
Author: User

#include <iostream> #include <fstream> #include <string> #include "opencv2/opencv_modules.hpp" # Include "Opencv2/highgui/highgui.hpp" #include "opencv2/stitching/detail/autocalib.hpp" #include "opencv2/stitching /detail/blenders.hpp "#include" opencv2/stitching/detail/camera.hpp "#include" opencv2/stitching/detail/exposure_ Compensate.hpp "#include" opencv2/stitching/detail/matchers.hpp "#include" opencv2/stitching/detail/motion_ Estimators.hpp "#include" opencv2/stitching/detail/seam_finders.hpp "#include" opencv2/stitching/detail/util.hpp "# Include "Opencv2/stitching/detail/warpers.hpp" #include "opencv2/stitching/warpers.hpp" #include <time.h> Using namespace Std;using namespace cv;using namespace CV::d etail;//define parameters vector<string> Img_names;bool Try_gpu = False;double Work_megapix = 0.6;//Image matches the resolution size, the area size of the image changes to work_megapix*100000double Seam_megapix = 0.1;//stitch pixel size Double Compose_megapix =0.6;//Stitching resolution FLOAT Conf_thresh = 1.f;//Two pictures from the confidence level of the same panorama wavecorrectkind wave_correct = detail::wave_correct_horiz;//waveform check, horizontal int expos_comp_type = exposurecompensator::gain_blocks;//Illumination compensation method, default is Gain_blocksfloat match_conf = 0.3f;//feature point detection confidence level, nearest neighbor match distance to the second nearest neighbor match distance, surf default is 0.65int Blend_type = blender::multi_band;//Fusion method, The default is multiband fusion float blend_strength = 5;//Fusion strength, 0-100. The default is 5.string result_name = "result.jpg";//output image filename int main () {clock_t   Start,finish;   Double totaltime; Start=clock (); int argc = 10;char* argv[] = {"1.jpg", "2.jpg", "3.jpg", "4.jpg", "5.jpg", "6.jpg", "7.jpg", "8.jpg", "9.jpg" "," 10.jpg "};for (int i = 0; i < argc; ++i) Img_names.push_back (argv[i]), int num_images = static_cast<int> (Img_names.size ());d ouble Work_scale = 1, Seam_scale = 1, Compose_scale = 1;//feature point detection and preprocessing (dimension scaling), then calculates the feature point of each graphic, and the feature point description sub cout<< "Finding Features ..." < <endl; ptr<featuresfinder> Finder;finder = new Surffeaturesfinder ()//////Use Surf feature points to detect Mat Full_img1,full_img, Img;vector <ImageFeatures> features (num_images);vector<mat> images (num_images);vector<size> full_img_sizes (num_images);d ouble seam_work_aspect = 1;for (int i = 0; i < num_images; ++i) {full_img1 = Imread (img_names[i]); Resize (fu ll_img1,full_img, size (400,300)); Full_img_sizes[i] = Full_img.size ();//calculation Work_scale, resize the image to an area of work_megapix*10^ 6 The following work_scale = Min (1.0, sqrt (Work_megapix * 1e6/full_img.size (). Area ())), Resize (full_img, IMG, size (), Work_scale, wor K_scale);//Resize the image to the area below work_megapix*10^6 Seam_scale = min (1.0, sqrt (Seam_megapix * 1e6/full_img.size (). Areas ())); Seam_work_aspect = seam_scale/work_scale;//Computes the feature point of the image, calculates the feature point descriptor, and sets the Img_idx to I (*finder) (IMG, features[i]); features[i ].img_idx = i;cout<< "Features in Image #" << i+1 << ":" << features[i].keypoints.size () <<en dl;//The source image resize to seam_megapix*10^6 and deposited in image[] Resize (full_img, IMG, Size (), Seam_scale, Seam_scale); Images[i] = Img.clone ();} Finder->collectgarbage (); Full_img.release (); Img.release ();//22 Matching of images cout<< "pairwise matching" << endl;//uses nearest neighbor and nearest neighbor match to match the feature points of any two images VECTOR&LT;MATCHESINFO&GT Pairwise_matches; Bestof2nearestmatcher Matcher (Try_gpu, match_conf);//nearest neighbor and sub-nearest neighbor Method Matcher (features, pairwise_matches); Match Matcher.collectgarbage () for every two images;//Combine all matches with confidence levels above the threshold into one set///leave only a picture that is determined to be from the same panorama vector<int> indices = Leavebiggestcomponent (features, pairwise_matches, Conf_thresh);vector<mat> img_subset;vector<string> Img_names_subset;vector<size> full_img_sizes_subset;for (size_t i = 0; i < indices.size (); ++i) {Img_names_ Subset.push_back (Img_names[indices[i]]); Img_subset.push_back (Images[indices[i]); full_img_sizes_subset.push_ Back (Full_img_sizes[indices[i]]);} Images = Img_subset;img_names = img_names_subset;full_img_sizes = full_img_sizes_subset;//Check whether the number of pictures still satisfies the requirements num_images = Static_cast<int> (Img_names.size ()); if (Num_images < 2) {cout<< "need more Images" <<endl;return-1 ;} Homographybasedestimator estimator;//based on the estimation of the vector<cameraparams> cameras;//camera Parameters Estimator (features, Pairwise_matches, cameras); for (size_t i = 0; i < CAMERAS.SIze (); ++i) {Mat r;cameras[i]. R.convertto (R, cv_32f); Cameras[i]. R = r;cout<< "Initial intrinsics #" << indices[i]+1 << ": \ n" << cameras[i]. K () <<endl;} Ptr<detail::bundleadjusterbase> adjuster;//Beam regulator parameter adjuster = new Detail::bundleadjusterray ();//Use bundle Adjustment (Light Bundle difference) method for all images of the camera parameter correction Adjuster->setconfthresh (CONF_THRESH);//Set configuration threshold mat_<uchar> Refine_mask = Mat::zeros (3, 3, cv_8u); Refine_mask (0,0) = 1;refine_mask (0,1) = 1;refine_mask (0,2) = 1;refine_mask (All) = 1;refine_mask ( ) = 1;adjuster->setrefinementmask (refine_mask);(*adjuster) (features, pairwise_matches, cameras);//Corrective/ The focal length is calculated to take the median and the focal length of all pictures and build the camera parameter, write the matrix to cameravector<double> focals;for (size_t i = 0; i < cameras.size (); ++i) {cout& lt;< "Camera #" << indices[i]+1 << ": \ n" << cameras[i]. K () <<endl;focals.push_back (cameras[i].focal);} Sort (Focals.begin (), Focals.end ()), Float warped_image_scale;if (focals.size ()% 2 = = 1) Warped_image_scale = static_cast <Float> (Focals[focals.size ()/2]) Elsewarped_image_scale = static_cast<float> (Focals[focals.size ()/2-1] + Focals[focals.size ()/2]) * 0.5f;///waveform correction vector<mat> rmats;for (size_t i = 0; i < cameras.size (); ++i) Rmats.push_ Back (Cameras[i]. R); Wavecorrect (Rmats, wave_correct);////waveform correction for (size_t i = 0; i < cameras.size (); ++i) Cameras[i]. R = rmats[i];cout<< "warping images ... "<<endl;vector<Point> Corners (num_images);//The Vertex vector<mat> masks_warped (num_images) after the unified coordinates; Vector<mat> images_warped (num_images);vector<size> sizes (num_images);vector<mat> Masks (num_ images);//Fusion mask//Prepare Image fusion mask for (int i = 0; i < num_images; ++i) {masks[i].create (Images[i].size (), cv_8u); Masks[i].setto (Scalar::all (255));} Curved image and Fusion mask ptr<warpercreator> warper_creator;warper_creator = new Cv::sphericalwarper (); Ptr<rotationwarper> warper = warper_creator->create (static_cast<float> (Warped_image_scale * SEAM_ Work_aspect)); for (int i = 0; I < Num_images; ++i) {mat_<float> K;cameras[i]. K (). ConvertTo (k, cv_32f); float swa = (float) seam_work_aspect; K (0,0) *= swa; K (0,2) *= swa; K (*=) SWA; K (*= swa;corners[i] = Warper->warp (Images[i], K, Cameras[i]. R, Inter_linear, Border_reflect, images_warped[i]);//calculate unified coordinate vertex sizes[i] = images_warped[i].size (); Warper->warp ( Masks[i], K, Cameras[i]. R, Inter_nearest, Border_constant, masks_warped[i]);//curved Current image}vector<mat> Images_warped_f (num_images); for (int i = 0; i < num_images; ++i) Images_warped[i].convertto (Images_warped_f[i], cv_32f); ptr<exposurecompensator> compensator = Exposurecompensator::createdefault (Expos_comp_type);//Set up a compensator for care compensation, The compensation method is gain_blockscompensator->feed (corners, images_warped, masks_warped);//Find seams ptr<seamfinder> seam_    Finder;seam_finder = new Detail::graphcutseamfinder (Graphcutseamfinderbase::cost_color); Seam_finder->find (Images_warped_f, corners, masks_warped);//Release unused memory Images.clear (); Images_warped.clear (); images_warped_f.clEar (); Masks.clear ();//////image fusion cout<< "compositing ..." <<endl; Mat img_warped, img_warped_s; Mat Dilated_mask, Seam_mask, Mask, mask_warped; ptr<blender> blender;double compose_work_aspect = 1;for (int img_idx = 0; img_idx < num_images; ++img_idx) {COUT&L t;< "Compositing image #" << indices[img_idx]+1<<endl;//because previously processed images are scaled in Work_scale, the image's internal// Corner (vertex after unified coordinates), mask (fused mask) needs to be recalculated//read the image and make necessary adjustments FULL_IMG1 = Imread (Img_names[img_idx]); Resize (full_img1,full_img , Size (400,300)); compose_scale = min (1.0, sqrt (Compose_megapix * 1e6/full_img.size (). Area ())); Compose_work_aspect = compose_scale/work_scale;//Update Curved Image scale Warped_image_scale *= static_cast<float> (compose_work_aspect); warper = Warper_creator->create (Warped_image_scale);//Update corners and sizesfor (int i = 0; i < num_images; ++i) {// Update camera following features cameras[i].focal *= compose_work_aspect;cameras[i].ppx *= compose_work_aspect;cameras[i].ppy *= compose_work _aspect;//update corners and sizessize sz = full_img_sizes[i]; if (Std::abs (compose_scale-1) > 1e-1) {sz.width = Cvround (Full_img_sizes[i].width * compose_scale); sz.height = CvRou nd (Full_img_sizes[i].height * compose_scale);} Mat K;cameras[i]. K (). ConvertTo (k, cv_32f); Rect ROI = Warper->warproi (sz, K, Cameras[i]. R); Corners[i] = roi.tl (); Sizes[i] = Roi.size ();} if (ABS (COMPOSE_SCALE-1) > 1e-1) Resize (full_img, IMG, Size (), Compose_scale, compose_scale); elseimg = Full_img;full_ Img.release (); Size img_size = Img.size (); Mat K;cameras[img_idx]. K (). ConvertTo (k, cv_32f);//Distort the current image Warper->warp (IMG, K, Cameras[img_idx]. R, Inter_linear, Border_reflect, img_warped);//distort Current image mask Mask.create (img_size, cv_8u); Mask.setto (Scalar::all (255)); Warper->warp (Mask, K, Cameras[img_idx]. R, Inter_nearest, Border_constant, mask_warped);//Exposure compensation compensator->apply (IMG_IDX, Corners[img_idx], img_warped, mask_warped); Img_warped.convertto (img_warped_s, cv_16s); Img_warped.release (); Img.release (); Mask.release (); Dilate (Masks_warped[img_idx], Dilated_mask, Mat ()); resize(Dilated_mask, Seam_mask, Mask_warped.size ()); mask_warped = seam_mask & mask_warped;//Initialize Blenderif (blender.empty ()) {blender = Blender::createdefault (Blend_type, TRY_GPU); Size DST_SZ = Resultroi (corners, sizes). Size (); float blend_width = sqrt (static_cast<float> (Dst_sz.area ())) * Blend_strength/100.f;if (Blend_width < 1.f) blender = Blender::createdefault (Blender::no, TRY_GPU); else { multibandblender* MB = dynamic_cast<multibandblender*> (static_cast<blender*> (Blender));mb-> Setnumbands (static_cast<int> (Ceil (blend_width)/log (2.))-1.); cout<< "Multi-Band blender, number of bands:" << mb->numbands () <<endl;} Determines the size of the final panorama based on the size of the corners vertex and image blender->prepare (corners, sizes);} Fused current Image blender->feed (img_warped_s, mask_warped, Corners[img_idx]);}  Mat result, Result_mask;blender->blend (result, result_mask); Imwrite (result_name, result);   Finish=clock ();   Totaltime= (Double) (Finish-start)/clocks_per_sec; cout<< "\" This program runs for "<<totaltime<< "Seconds! "<<endl;return 0;}

Any n image splicing _ effect very good _ computer Vision Big Job 1 Final edition

Related Article

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.