Searching and matching of surf feature points in OpenCVOpenCV3

Source: Internet
Author: User
Tags scalar


Not much to say, directly on the code bar:

#include <iostream> #include <stdio.h> #include "opencv2/core.hpp" #include "opencv2/core/utility.hpp" # Include "Opencv2/core/ocl.hpp" #include "opencv2/imgcodecs.hpp" #include "opencv2/highgui.hpp" #include "opencv2/ Features2d.hpp "#include" opencv2/calib3d.hpp "#include" opencv2/imgproc.hpp "#include" opencv2/flann.hpp "#include"
Opencv2/xfeatures2d.hpp "#include" opencv2/ml.hpp "using namespace CV;
using namespace Std;
using namespace cv::xfeatures2d;

using namespace cv::ml;    int main () {Mat a = Imread ("Box.png", Imread_grayscale);

    Read grayscale image Mat B = imread ("Box_in_scene.png", Imread_grayscale);      Ptr<surf> SURF;

    The creation method is not the same as in 2 surf = surf::create (800);
    Bfmatcher Matcher;
    Mat C, D;
    Vector<keypoint>key1, Key2;

    Vector<dmatch> matches;
    Surf->detectandcompute (A, Mat (), Key1, C);

    Surf->detectandcompute (b, Mat (), Key2, D);       Matcher.match (c, D, matches); Match sort (matches.begin (), Matches.end ());              
    Filter matching points vector< dmatch > good_matches;
    int ptspairs = std::min ((int) (Matches.size () * 0.15));
    cout << ptspairs << Endl;
    for (int i = 0; i < ptspairs; i++) {good_matches.push_back (matches[i]);
    } Mat outimg; Drawmatches (A, key1, B, Key2, Good_matches, Outimg, Scalar::all ( -1), Scalar::all ( -1),vector<char> (),  drawmatchesflags::not_draw_single_points);
    Draw the matching point std::vector<point2f> obj;

    Std::vector<point2f> scene;
        for (size_t i = 0; i < good_matches.size (); i++) {obj.push_back (key1[good_matches[i].queryidx].pt);
    Scene.push_back (key2[good_matches[i].trainidx].pt);
    } std::vector<point2f> obj_corners (4);
    Obj_corners[0] = point (0, 0);
    OBJ_CORNERS[1] = point (a.cols, 0);
    OBJ_CORNERS[2] = Point (A.cols, a.rows);
    OBJ_CORNERS[3] = Point (0, a.rows);

    Std::vector<point2f> scene_corners (4); Mat H = findhomography (obj, ScenE, RANSAC);

    Look for matching images perspectivetransform (obj_corners, Scene_corners, H); Line (Outimg,scene_corners[0] + point2f (float) a.cols, 0), scene_corners[1] + point2f ((float) a.cols, 0), Scalar (0, 255, 0       ), 2, LINE_AA); Draw Line (Outimg,scene_corners[1] + point2f (float) a.cols, 0), scene_corners[2] + point2f ((float) a.cols, 0), Scalar (0,
    255, 0), 2, LINE_AA); Line (Outimg,scene_corners[2] + point2f (float) a.cols, 0), scene_corners[3] + point2f ((float) a.cols, 0), Scalar (0, 255, 0
    ), 2, LINE_AA); Line (Outimg,scene_corners[3] + point2f (float) a.cols, 0), Scene_corners[0] + point2f ((float) a.cols, 0), Scalar (0, 255, 0
    ), 2, LINE_AA);
    Imshow ("AAAA", outimg);
Cvwaitkey (0); }


Operation diagram:



-------------Read template------------cv::mat img_object = Imread ("/storage/emulated/0/applepearface/imgtemp.jpg");

    -------------image processing---------cv::mat img_scene (yimage);
    /*//Detect surf feature point int minhessian = 400;

    Orbdescriptorextractor detector (Minhessian);
    Std::vector<keypoint> Keypoints_1, keypoints_2;
    Detector.detect (img_1, keypoints_1);
    Detector.detect (Img_2, keypoints_2);
    --Step 2:calculate descriptors (feature vectors) orbdescriptorextractor extractor;
    Mat Descriptors_1, descriptors_2;
    Extractor.compute (img_1, Keypoints_1, descriptors_1);

    Extractor.compute (Img_2, keypoints_2, descriptors_2);
    --Step 3:matching descriptor vectors with a brute force Matcher bfmatcher matcher (NORM_L2);
    std::vector< Dmatch > matches;

    Matcher.match (Descriptors_1, descriptors_2, matches);
    --Draw matches Mat img_matches;

  Drawmatches (img_1, Keypoints_1, Img_2, keypoints_2, matches, img_matches); * *  Read data//cv::mat Img_object = Cv::imread ("doll01.jpg");
    Cv::mat Img_scene = Cv::imread ("doll012.jpg");
        if (!img_object.data | |!img_scene.data) {cout << "Error reading images." << Endl;
    return 0;
    }//Build feature detector and description sub-extractor CV::P tr<cv::featuredetector> detector = cv::featuredetector::create ("ORB");

    CV::P TR&LT;CV::D escriptorextractor> descriptor = CV::D escriptorextractor::create ("ORB");
    Detection feature point vector<cv::keypoint> kp_object, Kp_scene;
    Detector->detect (Img_object, kp_object);

    Detector->detect (Img_scene, Kp_scene);
    Calculation Description Sub Cv::mat desp_object, Desp_scene;
    Descriptor->compute (Img_object, Kp_object, Desp_object);

    Descriptor->compute (Img_scene, Kp_scene, Desp_scene);
    /* if (desp_object.type () = cv_32f) {Desp_object.convertto (desp_object, cv_32f);
    } if (Desp_scene.type ()! = cv_32f) {Desp_scene.convertto (Desp_scene, cv_32f); } *//MatchDescription Sub-VECTOR&LT;CV::D match> matches;
    Cv::flannbasedmatcher Matcher (New Cv::flann::lshindexparams (20, 10, 2));
    Matcher.match (Desp_object, desp_scene, matches);

    cout << "Find Total" << matches.size () << "matches." << Endl;
    Filter match//double min_dist = 100000;
    for (int i = 0; i < matches.size (); i++) {//float a = matches[i].distance;
    if (a < Min_dist) {//min_dist = matches[i].distance;
    }//}//VECTOR&LT;CV::D match> good_matches; for (int i = 0; i < matches.size (); i++) {////if (Matches[i].distance < 3 * min_dist) {//Good_ma
    Tches.push_back (Matches[i]);
    }//}//Display matching//cout << "good matches=" << matches.size () << Endl;
    Cv::mat img_matches;

    CV::d rawmatches (Img_object, Kp_object, Img_scene, Kp_scene, matches, img_matches);
    Locating target CV::VECTOR&LT;CV::P oint2f> obj_points; CV::VECTOR&LT;CV::P oint2f> SCene;
        for (int i = 0; i < matches.size (); i++) {obj_points.push_back (kp_object[matches[i].queryidx].pt);
    Scene.push_back (kp_scene[matches[i].trainidx].pt);


    } Cv::mat H = Cv::findhomography (obj_points, Scene, Cv_ransac);
    CV::VECTOR&LT;CV::P oint2f> obj_corners (4);
    CV::VECTOR&LT;CV::P oint2f> scene_corners (4);
    Obj_corners[0] = CV::P oint (0, 0);
    OBJ_CORNERS[1] = CV::P oint (img_object.cols, 0);
    OBJ_CORNERS[2] = CV::P oint (Img_object.cols, img_object.rows);

    OBJ_CORNERS[3] = CV::P oint (0, img_object.rows);

    CV::p erspectivetransform (Obj_corners, Scene_corners, H); Cv::line (Img_matches, scene_corners[0] + CV::P oint2f (img_object.cols, 0), Scene_corners[1] + CV::P oint2f (img_
    Object.cols, 0), cv::scalar (0, 255, 0), 4); Cv::line (Img_matches, scene_corners[1] + CV::P oint2f (img_object.cols, 0), scene_corners[2] + CV::P oint2f (img_
    Object.cols, 0), cv::scalar (0, 255, 0), 4); Cv::line (Img_matches, scene_corners[2] + CV::P oint2f (iMg_object.cols, 0), Scene_corners[3] + CV::P oint2f (img_object.cols, 0), cv::scalar (0, 255, 0), 4); Cv::line (Img_matches, scene_corners[3] + CV::P oint2f (img_object.cols, 0), Scene_corners[0] + CV::P oint2f (img_


    Object.cols, 0), cv::scalar (0, 255, 0), 4);
    Cv::mat dstsize; Cv::resize (Img_matches, Dstsize, Size (2 * H, W));


Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.