码迷,mamicode.com
首页 > 其他好文 > 详细

特征匹配篇

时间:2016-05-20 09:58:25      阅读:882      评论:0      收藏:0      [点我收藏+]

标签:

1. ORB特征匹配 BruteForce-Hamming匹配

//使用ORB特征匹配两张图片,并进行运行时间,对称性测试,ratio测试

#include <iostream>
#include <ctime>
//#include <dirent.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/nonfree/features2d.hpp> 
#include <opencv2/nonfree/nonfree.hpp>

using namespace cv;
using namespace std;

/*
int main(int argc, const char *argv[])
{

if (argc != 4){
cout << "usage:match <image1> <image2> <ratio>\n";
exit(-1);
}

double ratio = (double)atof(argv[3]);
string image1_name = string(argv[1]), image2_name = string(argv[2]);

Mat image1 = imread(image1_name, 1);
Mat image2 = imread(image2_name, 1);
*/
int main()
{
    Mat image1 = imread("img_1.bmp", 1);
    Mat image2 = imread("img_2.bmp", 1);
    Ptr<FeatureDetector> detector;
    Ptr<DescriptorExtractor> extractor;
    float ratio = 0.8;           // 修改获得不同实验结果

    // ORB特征点、描述子检测器
    detector = FeatureDetector::create("ORB");
    extractor = DescriptorExtractor::create("ORB");
    
    cout << "ORB特征点、描述子、BruteForce-Hamming匹配" << endl;
    cout << "ratio = " << ratio << endl;
    clock_t begin = clock();
    // 特征点
    vector<KeyPoint> keypoints1, keypoints2;
    detector->detect(image1, keypoints1);
    detector->detect(image2, keypoints2);

    cout << "# keypoints of image1 :" << keypoints1.size() << endl;
    cout << "# keypoints of image2 :" << keypoints2.size() << endl;
    // 计算描述子
    Mat descriptors1, descriptors2;
    extractor->compute(image1, keypoints1, descriptors1);
    extractor->compute(image2, keypoints2, descriptors2);

    cout << "Descriptors size :" << descriptors1.cols << ":" << descriptors1.rows << endl;

    vector< vector<DMatch> > matches12, matches21;
    Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");
    // knn:K最近邻,flann: 快速最近邻逼近搜索
    matcher->knnMatch(descriptors1, descriptors2, matches12, 2);  // 最近邻匹配
    matcher->knnMatch(descriptors2, descriptors1, matches21, 2);

    // BFMatcher bfmatcher(NORM_L2, true); 
    // vector<DMatch> matches;
    // bfmatcher.match(descriptors1, descriptors2, matches);
    cout << "Matches1-2:" << matches12.size() << endl;
    cout << "Matches2-1:" << matches21.size() << endl;

    // ratio test proposed by David Lowe paper = 0.8
    std::vector<DMatch> good_matches1, good_matches2;

    // Yes , the code here is redundant, it is easy to reconstruct it ....
    for (int i = 0; i < matches12.size(); i++){
        if (matches12[i][0].distance < ratio * matches12[i][1].distance)
            good_matches1.push_back(matches12[i][0]);
    }

    for (int i = 0; i < matches21.size(); i++){
        if (matches21[i][0].distance < ratio * matches21[i][1].distance)
            good_matches2.push_back(matches21[i][0]);
    }

    cout << "Good matches1:" << good_matches1.size() << endl;
    cout << "Good matches2:" << good_matches2.size() << endl;

    // Symmetric Test 对称性测试
    std::vector<DMatch> better_matches;
    for (int i = 0; i<good_matches1.size(); i++){
        for (int j = 0; j<good_matches2.size(); j++){
            if (good_matches1[i].queryIdx == good_matches2[j].trainIdx && good_matches2[j].queryIdx == good_matches1[i].trainIdx){
                better_matches.push_back(DMatch(good_matches1[i].queryIdx, good_matches1[i].trainIdx, good_matches1[i].distance));
                break;
            }
        }
    }

    cout << "Better matches:" << better_matches.size() << endl;

    // 计时结束
    clock_t end = clock();
    double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
    cout << "Time Costs : " << elapsed_secs << endl;

    // 输出
    Mat output;
    drawMatches(image1, keypoints1, image2, keypoints2, better_matches, output);
    imshow("Matches result", output);
    waitKey(0);

    return 0;
}

 2. surf特征点、描述子、Flann算法匹配描述子

// surf特征点匹配:surf特征点、描述子、Flann算法匹配描述子

#include "opencv2/core/core.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/highgui/highgui.hpp"
#include <opencv2/nonfree/nonfree.hpp>
#include<opencv2/legacy/legacy.hpp>
#include <iostream>
#include <ctime>
using namespace cv;
using namespace std;


int main(int argc, char** argv)
{
    Mat img_1 = imread("img_1.bmp", 1);
    Mat img_2 = imread("img_2.bmp", 1);
    if (!img_1.data || !img_2.data) { 
        printf("读取图片image错误! \n"); return false; 
    }

    cout << "SURF特征点、描述子、FLANN描述子匹配" << endl;
    cout << "筛选条件:5倍最小距离" << endl;

    clock_t begin = clock();
    // 特征点
    int minHessian = 300;   // surf算法中的hessian阈值
    SURF detector(minHessian);
    std::vector<KeyPoint> keypoints_1, keypoints_2;
    detector.detect(img_1, keypoints_1);
    detector.detect(img_2, keypoints_2);
    cout << "# keypoints of image1 :" << keypoints_1.size() << endl;
    cout << "# keypoints of image2 :" << keypoints_2.size() << endl;

    // 描述子/特征向量
    SURF extractor;
    Mat descriptors_1, descriptors_2;
    extractor.compute(img_1, keypoints_1, descriptors_1);
    extractor.compute(img_2, keypoints_2, descriptors_2);
    cout << "Descriptors size :" << descriptors_1.cols << ":" << descriptors_1.rows << endl;

    // 匹配描述子
    FlannBasedMatcher matcher;
    std::vector< DMatch > matches;
    matcher.match(descriptors_1, descriptors_2, matches);
    double max_dist = 0; double min_dist = 100;

    // 特征点最大最小距离
    for (int i = 0; i < descriptors_1.rows; i++) {
        double dist = matches[i].distance;
        if (dist < min_dist) min_dist = dist;
        if (dist > max_dist) max_dist = dist;
    }
    // 输出
    printf("> 最大距离(Max dist) : %f \n", max_dist);
    printf("> 最小距离(Min dist) : %f \n", min_dist);

    // 筛选
    std::vector< DMatch > good_matches;
    for (int i = 0; i < descriptors_1.rows; i++) {
        if (matches[i].distance < 5 * min_dist) {
            good_matches.push_back(matches[i]);
        }
    }
    cout << "Good_matches:" << good_matches.size() << endl;
    // 计时结束
    clock_t end = clock();
    double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
    cout << "Time Costs : " << elapsed_secs << endl;

    // 绘制
    Mat img_matches;
    drawMatches(img_1, keypoints_1, img_2, keypoints_2,
        good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
        vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
    /*
    // 输出 每个匹配的特征点
    for (int i = 0; i < good_matches.size(); i++)
    {
        printf(">符合条件的匹配点 [%d] 特征点1: %d  -- 特征点2: %d  \n", i, good_matches[i].queryIdx, good_matches[i].trainIdx);
    }
    */
    // 显示
    imshow("匹配效果图", img_matches);

    // 任意键退出
    waitKey(0);
    return 0;
}

 3. sift特征点、描述子+FLANN算法

技术分享
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/features2d/features2d.hpp> 
#include <opencv2/nonfree/nonfree.hpp>

#include <iostream>
//#include <dirent.h>
#include <ctime>
using namespace cv;
using namespace std;


int main(int argc, const char *argv[]){
    /*
    if (argc != 3){
        cout << "usage:match <level> <image1> <image2>\n";
        exit(-1);
    }

    string arg2 = string(argv[2]);
    int level = atoi(arg2.c_str());
    string image1_name = string(argv[1]), image2_name = string(argv[2]);
    // getline(cin, image1_name);
    // getline(cin, image2_name);
    */
    Mat image1 = imread("img_1.bmp", 1);
    Mat image2 = imread("img_2.bmp", 1);

    Ptr<FeatureDetector> detector;
    Ptr<DescriptorExtractor> extractor;

    initModule_nonfree();
    /*
    * SIFT,SURF, ORB
    */
    detector = FeatureDetector::create("SIFT");
    extractor = DescriptorExtractor::create("SIFT");

    cout << "Sift特征点、描述子、FLANN匹配" << endl;
    // cout << "ratio = " << ratio << endl;
    clock_t begin = clock();

    vector<KeyPoint> keypoints1, keypoints2;
    detector->detect(image1, keypoints1);
    detector->detect(image2, keypoints2);

    cout << "# keypoints of image1 :" << keypoints1.size() << endl;
    cout << "# keypoints of image2 :" << keypoints2.size() << endl;

    Mat descriptors1, descriptors2;
    extractor->compute(image1, keypoints1, descriptors1);
    extractor->compute(image2, keypoints2, descriptors2);



    cout << "Descriptors size :" << descriptors1.cols << ":" << descriptors1.rows << endl;

    vector< vector<DMatch> > matches12, matches21;
    Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("FlannBased");
    matcher->knnMatch(descriptors1, descriptors2, matches12, 2);
    matcher->knnMatch(descriptors2, descriptors1, matches21, 2);

    cout << "Matches1-2:" << matches12.size() << endl;
    cout << "Matches2-1:" << matches21.size() << endl;

    // ratio test proposed by David Lowe paper = 0.8
    std::vector<DMatch> good_matches1, good_matches2;

    for (int i = 0; i < matches12.size(); i++){
        const float ratio = 0.8;
        if (matches12[i][0].distance < ratio * matches12[i][1].distance)
            good_matches1.push_back(matches12[i][0]);
    }

    for (int i = 0; i < matches21.size(); i++){
        const float ratio = 0.8;
        if (matches21[i][0].distance < ratio * matches21[i][1].distance)
            good_matches2.push_back(matches21[i][0]);
    }

    cout << "Good matches1:" << good_matches1.size() << endl;
    cout << "Good matches2:" << good_matches2.size() << endl;

    // Symmetric Test
    std::vector<DMatch> better_matches;
    for (int i = 0; i<good_matches1.size(); i++){
        for (int j = 0; j<good_matches2.size(); j++){
            if (good_matches1[i].queryIdx == good_matches2[j].trainIdx && good_matches2[j].queryIdx == good_matches1[i].trainIdx){
                better_matches.push_back(DMatch(good_matches1[i].queryIdx, good_matches1[i].trainIdx, good_matches1[i].distance));
                break;
            }
        }
    }

    cout << "Better matches:" << better_matches.size() << endl;

    clock_t end = clock();
    double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
    cout << "Time Costs : " << elapsed_secs << endl;

    // show it on an image
    Mat output;
    drawMatches(image1, keypoints1, image2, keypoints2, better_matches, output);
    imshow("Matches result", output);
    waitKey(0);

    return 0;
}
View Code

 4. SIFT特征点、描述子+暴力匹配

技术分享
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/nonfree/features2d.hpp> //
#include <opencv2/nonfree/nonfree.hpp>

#include <iostream>
//#include <dirent.h>
#include <ctime>
using namespace cv;
using namespace std;


int main(int argc, const char *argv[]){
    /*
    if (argc != 3){
    cout << "usage:match <level> <image1> <image2>\n";
    exit(-1);
    }

    string arg2 = string(argv[2]);
    int level = atoi(arg2.c_str());
    string image1_name = string(argv[1]), image2_name = string(argv[2]);
    // getline(cin, image1_name);
    // getline(cin, image2_name);
    */
    Mat image1 = imread("img_1.bmp", 1);
    Mat image2 = imread("img_2.bmp", 1);

    Ptr<FeatureDetector> detector;
    Ptr<DescriptorExtractor> extractor;

    initModule_nonfree();
    /*
    * SIFT,SURF, ORB
    */
    detector = FeatureDetector::create("SIFT");
    extractor = DescriptorExtractor::create("SIFT");

    cout << "sift特征点、描述子、暴力匹配" << endl;
    //cout << "筛选条件:5倍最小距离" << endl;
    clock_t begin = clock();

    vector<KeyPoint> keypoints1, keypoints2;
    detector->detect(image1, keypoints1);
    detector->detect(image2, keypoints2);

    cout << "# keypoints of image1 :" << keypoints1.size() << endl;
    cout << "# keypoints of image2 :" << keypoints2.size() << endl;

    Mat descriptors1, descriptors2;
    extractor->compute(image1, keypoints1, descriptors1);
    extractor->compute(image2, keypoints2, descriptors2);



    cout << "Descriptors size :" << descriptors1.cols << ":" << descriptors1.rows << endl;

    vector< vector<DMatch> > matches12, matches21;
    Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce");
    matcher->knnMatch(descriptors1, descriptors2, matches12, 2);
    matcher->knnMatch(descriptors2, descriptors1, matches21, 2);

    // BFMatcher bfmatcher(NORM_L2, true);
    // vector<DMatch> matches;
    // bfmatcher.match(descriptors1, descriptors2, matches);
    cout << "Matches1-2:" << matches12.size() << endl;
    cout << "Matches2-1:" << matches21.size() << endl;

    // ratio test proposed by David Lowe paper = 0.8
    std::vector<DMatch> good_matches1, good_matches2;

    for (int i = 0; i < matches12.size(); i++){
        const float ratio = 0.8;
        if (matches12[i][0].distance < ratio * matches12[i][1].distance)
            good_matches1.push_back(matches12[i][0]);
    }

    for (int i = 0; i < matches21.size(); i++){
        const float ratio = 0.8;
        if (matches21[i][0].distance < ratio * matches21[i][1].distance)
            good_matches2.push_back(matches21[i][0]);
    }

    cout << "Good matches1:" << good_matches1.size() << endl;
    cout << "Good matches2:" << good_matches2.size() << endl;

    // Symmetric Test
    std::vector<DMatch> better_matches;
    for (int i = 0; i<good_matches1.size(); i++){
        for (int j = 0; j<good_matches2.size(); j++){
            if (good_matches1[i].queryIdx == good_matches2[j].trainIdx && good_matches2[j].queryIdx == good_matches1[i].trainIdx){
                better_matches.push_back(DMatch(good_matches1[i].queryIdx, good_matches1[i].trainIdx, good_matches1[i].distance));
                break;
            }
        }
    }

    cout << "Better matches:" << better_matches.size() << endl;

    clock_t end = clock();
    double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
    cout << "Time Costs : " << elapsed_secs << endl;

    // show it on an image
    Mat output;
    drawMatches(image1, keypoints1, image2, keypoints2, better_matches, output);
    imshow("Matches result", output);
    waitKey(0);

    return 0;
}
View Code

 5. fast特征点 surf描述子 暴力匹配

技术分享
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/nonfree/features2d.hpp> //
#include <opencv2/nonfree/nonfree.hpp>

#include <iostream>
//#include <dirent.h>
#include <ctime>
using namespace cv;
using namespace std;

#define IMG_DIR "./imgs/"

bool has_suffix(const std::string &str, const std::string &suffix);



int main(int argc, const char *argv[]){
    /*
    if (argc != 2){
        cout << "usage:match <method>\n";
        exit(-1);
    }
    string method = string(argv[1]);

    string image1_name, image2_name;

    getline(cin, image1_name);
    getline(cin, image2_name);
    */
    Mat image1 = imread("img_1.bmp", 1);
    Mat image2 = imread("img_2.bmp", 1);

    FastFeatureDetector fast(40);   // 检测的阈值为40  
    SurfDescriptorExtractor extractor;

    //Ptr<DescriptorExtractor> extractor = DescriptorExtractor::create("SIFT"); 
    // WHY CANNOT WORK ???
    cout << "fast特征点、surf描述子、暴力匹配" << endl;
    clock_t begin = clock();




    vector<KeyPoint> keypoints1, keypoints2;
    fast.detect(image1, keypoints1);
    fast.detect(image2, keypoints2);

    cout << "# keypoints of image1 :" << keypoints1.size() << endl;
    cout << "# keypoints of image2 :" << keypoints2.size() << endl;

    Mat descriptors1, descriptors2;
    extractor.compute(image1, keypoints1, descriptors1);
    extractor.compute(image2, keypoints2, descriptors2);


    clock_t end = clock();
    double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
    cout << "Time Costs : " << elapsed_secs << endl;

    BFMatcher bfmatcher(NORM_L2, true);
    vector<DMatch> matches;
    bfmatcher.match(descriptors1, descriptors2, matches);
    cout << "# matches : " << matches.size() << endl;

    // show it on an image
    Mat output;
    drawMatches(image1, keypoints1, image2, keypoints2, matches, output);
    imshow("Matches result", output);
    waitKey(0);

    return 0;
}


bool has_suffix(const std::string &str, const std::string &suffix)
{
    return str.size() >= suffix.size() &&
        str.compare(str.size() - suffix.size(), suffix.size(), suffix) == 0;
}
View Code

6. fast特征点、orb描述子、BruteForce-Hamming匹配

技术分享
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/nonfree/features2d.hpp> //
#include <opencv2/nonfree/nonfree.hpp>

#include <iostream>
//#include <dirent.h>
#include <ctime>
using namespace cv;
using namespace std;

int main(int argc, const char *argv[]){
    /*
    if (argc != 3){
        cout << "usage:match <image1> <image2>\n";
        exit(-1);
    }

    string image1_name = string(argv[1]), image2_name = string(argv[2]);
    // getline(cin, image1_name);
    // getline(cin, image2_name);
    */
    Mat image1 = imread("img_1.bmp", 1);
    Mat image2 = imread("img_2.bmp", 1);

    vector<KeyPoint> keypoints_1, keypoints_2;
    Mat descriptors_1, descriptors_2;

    cout << "fast特征点、orb描述子、BruteForce-Hamming匹配" << endl;
    //cout << "筛选条件:5倍最小距离" << endl;
    clock_t begin = clock();

    Ptr<FeatureDetector> detector;
    detector = new DynamicAdaptedFeatureDetector(new FastAdjuster(10, true), 3000, 6000, 8);
    detector->detect(image1, keypoints_1);
    detector->detect(image2, keypoints_2);
    cout << "# keypoints of image1 :" << keypoints_1.size() << endl;
    cout << "# keypoints of image2 :" << keypoints_2.size() << endl;

    initModule_nonfree();//NB. need this, otherwise get coredump ,oops !!!!!
    Ptr<DescriptorExtractor> extractor = DescriptorExtractor::create("ORB");
    extractor->compute(image1, keypoints_1, descriptors_1);
    extractor->compute(image2, keypoints_2, descriptors_2);

    vector< vector<DMatch> > matches;
    Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");
    matcher->knnMatch(descriptors_1, descriptors_2, matches, 500);

    //look whether the match is inside a defined area of the image
    //only 25% of maximum of possible distance
    double tresholdDist = 0.25 * sqrt(double(image1.size().height*image1.size().height + image1.size().width*image1.size().width));

    vector< DMatch > good_matches2;
    good_matches2.reserve(matches.size());
    for (size_t i = 0; i < matches.size(); ++i){
        for (int j = 0; j < matches[i].size(); j++)    {
            Point2f from = keypoints_1[matches[i][j].queryIdx].pt;
            Point2f to = keypoints_2[matches[i][j].trainIdx].pt;

            //calculate local distance for each possible match
            double dist = sqrt((from.x - to.x) * (from.x - to.x) + (from.y - to.y) * (from.y - to.y));

            //save as best match if local distance is in specified area and on same height
            if (dist < tresholdDist && abs(from.y - to.y)<5)  {
                good_matches2.push_back(matches[i][j]);
                j = matches[i].size();
            }
        }
    }

    cout << "Good matches :" << good_matches2.size() << endl;

    clock_t end = clock();
    double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
    cout << "Time Costs : " << elapsed_secs << endl;

    // show it on an image
    Mat output;
    drawMatches(image1, keypoints_1, image2, keypoints_2, good_matches2, output);
    imshow("Matches result", output);
    waitKey(0);

}
View Code

 参考资料

1. http://blog.csdn.net/vonzhoufz/article/details/46594369

2. 《opencv3编程入门》

特征匹配篇

标签:

原文地址:http://www.cnblogs.com/lizhongpingchn/p/5509298.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!