Jacob的全景圖像融合算法系列
OpenCV 尺度不變特征檢測:SIFT隔节、SURF、BRISK寂呛、ORB
OpenCV 匹配興趣點:SIFT怎诫、SURF 和二值描述子
OpenCV 估算圖像的投影關(guān)系:基礎(chǔ)矩陣和RANSAC
OpenCV 單應(yīng)矩陣應(yīng)用:全景圖像融合原理
圖像融合:拉普拉斯金字塔融合算法
上一篇文章中講到如何檢測圖像中的興趣點,以便后續(xù)的局部圖像分析昧谊。為了進行基于興趣點的圖像分析刽虹,我們需要構(gòu)建多種表征方式,精確地描述每個關(guān)鍵點呢诬。這些描述子通常是二值類型涌哲、整數(shù)型或浮點數(shù)型組成的向量胖缤。好的描述子要具有足夠的獨特性和魯棒性,能唯一地表示圖像中的每個特征點阀圾,并且在亮度和視角變化時仍能提取出同一批點集哪廓。此外,盡量能夠簡潔初烘,以減少計算資源的占用涡真。
1.Harris、FAST
這兩個特征檢測算子不具有尺度不變等特性肾筐,所以使用這兩個算子進行檢測并匹配的效果一般不會很好哆料。常見的方案是通過比較特征點附近的一個方塊的像素集合的相似度,算法使用差的平方和(SSD)吗铐,效果如下东亦,這里不進行代碼演示』I可以看到典阵,即使在視角差別不大的情況下,就已經(jīng)有非常多的錯誤匹配項镊逝。
2.SIFT壮啊、SURF
so,尺度不變檢測算子的優(yōu)勢就體現(xiàn)出來了撑蒜。SIFT歹啼、SURF在檢測出特征點之后,可以生成相應(yīng)的描述子(Descriptor)减江。這些描述子具有的信息量比單純地比較像素塊的SSD多得多染突,于是能夠更好地進行圖像的匹配。至于描述子的數(shù)據(jù)結(jié)構(gòu)辈灼,上一篇文章中提到過份企,這里不再贅述。其中SIFT是128維的向量巡莹,SURF則是檢測Haar小波特征司志。
直接進行匹配的話,也會有很多的錯誤匹配項降宅,比上面那兩位好不到哪里去骂远。那么,算法研究員們想出了一些匹配策略腰根,能夠在一定程度上減少錯誤項激才。主要有:
- 交叉檢查匹配項
交叉檢查是指在第一幅圖像匹配到第二幅圖像后,再用第二幅圖像的關(guān)鍵點再逐個跟第一幅的圖像進行比較,只有在兩個方向都匹配了同一個關(guān)鍵點時瘸恼,才認為是一個有效的匹配項劣挫。
- 比率檢測法
我們?yōu)槊總€關(guān)鍵點找到兩個最佳的匹配項,方法是使用kNN最近鄰(可以看我的這篇文章东帅,其實在這里只是用了歐氏距離)压固。接下來計算排名第二的匹配項與排名第一的匹配項的差值之比(如果兩個匹配項近乎相等,則結(jié)果接近為1)靠闭。比率過高的匹配項作為模糊匹配項帐我,從結(jié)果中被排除。
- 匹配差值的閾值化
很簡單愧膀,就是將差值過大的匹配項排除掉拦键。
上面的一些匹配策略可以結(jié)合使用來提升匹配效果。代碼實現(xiàn)如下
/******************************************************
* Created by 楊幫杰 on 10/5/18
* Right to use this code in any way you want without
* warranty, support or any guarantee of it working
* E-mail: yangbangjie1998@qq.com
* Association: SCAU 華南農(nóng)業(yè)大學
******************************************************/
#include <iostream>
#include <vector>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/objdetect.hpp>
#include <opencv2/xfeatures2d.hpp>
#define IMAGE1_PATH "/home/jacob/下載/church01.jpg"
#define IMAGE2_PATH "/home/jacob/下載/church02.jpg"
#define IMAGE3_PATH "/home/jacob/下載/church03.jpg"
using namespace cv;
using namespace std;
int main()
{
/*******************SIFT檩淋、SURF:描述并匹配局部強度值模式***********************/
Mat image1= imread(IMAGE1_PATH,IMREAD_GRAYSCALE);
Mat image2= imread(IMAGE2_PATH,IMREAD_GRAYSCALE);
vector<KeyPoint> keypoints1;
vector<KeyPoint> keypoints2;
//創(chuàng)建SURF特征檢測器
Ptr<Feature2D> ptrFeature2D = xfeatures2d::SURF::create(2000.0);
//創(chuàng)建SIFT特征檢測器
//Ptr<Feature2D> ptrFeature2D = xfeatures2d::SIFT::create(74);
//檢測特征點
ptrFeature2D->detect(image1,keypoints1);
ptrFeature2D->detect(image2,keypoints2);
Mat featureImage;
drawKeypoints(image1,keypoints1,featureImage,
Scalar(255,255,255),DrawMatchesFlags::DRAW_RICH_KEYPOINTS);
imshow("SURF",featureImage);
cout << "Number of SURF keypoints (image 1): " << keypoints1.size() << endl;
cout << "Number of SURF keypoints (image 2): " << keypoints2.size() << endl;
//提取特征描述子
Mat descriptors1;
Mat descriptors2;
ptrFeature2D->compute(image1,keypoints1,descriptors1);
ptrFeature2D->compute(image2,keypoints2,descriptors2);
//使用L2范式(歐氏距離)進行配對
BFMatcher matcher(NORM_L2);
//進行交叉匹配
//BFMatcher matcher(NORM_L2, true);
vector<DMatch> matches;
matcher.match(descriptors1,descriptors2, matches);
Mat imageMatches;
drawMatches(
image1, keypoints1, // 1st image and its keypoints
image2, keypoints2, // 2nd image and its keypoints
matches, // the matches
imageMatches, // the image produced
Scalar(255, 255, 255), // color of lines
Scalar(255, 255, 255), // color of points
vector< char >(), // masks if any
DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS | DrawMatchesFlags::DRAW_RICH_KEYPOINTS);
imshow("SURF Matches",imageMatches);
cout << "Number of matches: " << matches.size() << endl;
//使用比率檢測法
//為每個關(guān)鍵點找出兩個最佳匹配項
vector<vector<DMatch> > matches2;
matcher.knnMatch(descriptors1, descriptors2,
matches2,
2); // find the k (2) best matches
matches.clear();
//比率設(shè)定為0.6
double ratioMax= 0.6;
vector<vector<DMatch> >::iterator it;
for (it= matches2.begin(); it!= matches2.end(); ++it) {
// first best match/second best match
if ((*it)[0].distance/(*it)[1].distance < ratioMax) {
matches.push_back((*it)[0]);
}
}
drawMatches(
image1,keypoints1, // 1st image and its keypoints
image2,keypoints2, // 2nd image and its keypoints
matches, // the matches
imageMatches, // the image produced
Scalar(255,255,255), // color of lines
Scalar(255,255,255), // color of points
vector< char >(), // masks if any
DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS | DrawMatchesFlags::DRAW_RICH_KEYPOINTS);
cout << "Number of matches (after ratio test): " << matches.size() << endl;
imshow("SURF Matches (ratio test at 0.6)",imageMatches);
//差值閾值化匹配矿咕,這里設(shè)為0.3
float maxDist = 0.3;
matches2.clear();
matcher.radiusMatch(descriptors1, descriptors2, matches2,
maxDist); // maximum acceptable distance
drawMatches(
image1, keypoints1, // 1st image and its keypoints
image2, keypoints2, // 2nd image and its keypoints
matches2, // the matches
imageMatches, // the image produced
Scalar(255, 255, 255), // color of lines
Scalar(255, 255, 255), // color of points
vector<vector< char >>(), // masks if any
DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS | DrawMatchesFlags::DRAW_RICH_KEYPOINTS);
int nmatches = 0;
for (int i = 0; i< matches2.size(); i++)
nmatches += matches2[i].size();
cout << "Number of matches (with max radius): " << nmatches << endl;
imshow("SURF Matches (with max radius)", imageMatches);
/****************************尺度無關(guān)的匹配**************************************/
image1= imread(IMAGE1_PATH,CV_LOAD_IMAGE_GRAYSCALE);
image2= imread(IMAGE3_PATH,CV_LOAD_IMAGE_GRAYSCALE);
cout << "Number of SIFT keypoints (image 1): " << keypoints1.size() << endl;
cout << "Number of SIFT keypoints (image 2): " << keypoints2.size() << endl;
ptrFeature2D = xfeatures2d::SIFT::create();
ptrFeature2D->detectAndCompute(image1, noArray(), keypoints1, descriptors1);
ptrFeature2D->detectAndCompute(image2, noArray(), keypoints2, descriptors2);
matcher.match(descriptors1,descriptors2, matches);
//選取最好的50個
nth_element(matches.begin(),matches.begin()+50,matches.end());
matches.erase(matches.begin()+50,matches.end());
drawMatches(
image1, keypoints1, // 1st image and its keypoints
image2, keypoints2, // 2nd image and its keypoints
matches, // the matches
imageMatches, // the image produced
Scalar(255, 255, 255), // color of lines
Scalar(255, 255, 255), // color of points
vector<char>(), cv::DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS| cv::DrawMatchesFlags::DRAW_RICH_KEYPOINTS);
imshow("Multi-scale SIFT Matches",imageMatches);
cout << "Number of matches: " << matches.size() << endl;
waitKey();
return 0;
}
結(jié)果如下
3.ORB、BRISK
SIFT和SURF的描述子向量分別是浮點型的128位和64位狼钮,對他們操作耗資巨大,為了減少計算資源的使用捡絮,算法研究員們又引入了二值描述子的概念熬芜。其中ORB和BRISK生成的就是二值描述子。
其中福稳,ORB實際上是在BRIEF描述子基礎(chǔ)上構(gòu)建的涎拉。實現(xiàn)過程是在關(guān)鍵點周圍的鄰域內(nèi)隨機選取一對像素點,從而創(chuàng)建一個二值描述子的圆。比較這兩個像素點的強度值鼓拧,如果第一個點的強度值較大,就把對應(yīng)描述子的位(bit)設(shè)為1越妈,否則為0季俩。對一批隨機像素點進行上述處理,就產(chǎn)生了一個由若干位組成的描述子梅掠,通常在128到512位酌住。對于ORB,為了解決旋轉(zhuǎn)不變性阎抒,對256個隨機點對進行旋轉(zhuǎn)后進行判別酪我。
二值描述子之間的比較一般使用Hamming Distance(漢明距離),表示的是兩個等長子串或者二進制數(shù)之間不同位的個數(shù)且叁,如
# 舉例說明以下字符串間的漢明距離為:
"karolin" and "kathrin" is 3.
"karolin" and "kerstin" is 3.
1011101 and 1001001 is 2.
2173896 and 2233796 is 3.
代碼實現(xiàn)如下
/******************************************************
* Created by 楊幫杰 on 10/5/18
* Right to use this code in any way you want without
* warranty, support or any guarantee of it working
* E-mail: yangbangjie1998@qq.com
* Association: SCAU 華南農(nóng)業(yè)大學
******************************************************/
#include <iostream>
#include <vector>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/objdetect.hpp>
#include <opencv2/xfeatures2d.hpp>
#define IMAGE1_PATH "/home/jacob/下載/church01.jpg"
#define IMAGE2_PATH "/home/jacob/下載/church02.jpg"
#define IMAGE3_PATH "/home/jacob/下載/church03.jpg"
using namespace cv;
using namespace std;
int main()
{
Mat image1= imread(IMAGE1_PATH,CV_LOAD_IMAGE_GRAYSCALE);
Mat image2= imread(IMAGE2_PATH,CV_LOAD_IMAGE_GRAYSCALE);
vector<KeyPoint> keypoints1;
vector<KeyPoint> keypoints2;
Mat descriptors1;
Mat descriptors2;
//構(gòu)建ORB特征檢測器
//Ptr<Feature2D> feature =ORB::create(60);
//構(gòu)建BRISK特征檢測器
Ptr<Feature2D> feature = BRISK::create(80);
feature->detectAndCompute(image1, noArray(), keypoints1, descriptors1);
feature->detectAndCompute(image2, noArray(), keypoints2, descriptors2);
Mat featureImage;
drawKeypoints(image1,keypoints1,featureImage,
Scalar(255,255,255),DrawMatchesFlags::DRAW_RICH_KEYPOINTS);
imshow("ORB",featureImage);
cout << "Number of ORB keypoints (image 1): " << keypoints1.size() << endl;
cout << "Number of ORB keypoints (image 2): " << keypoints2.size() << endl;
// 使用FREAK(快速視網(wǎng)膜關(guān)鍵點)都哭,配合BRISK
// feature = xfeatures2d::FREAK::create();
// feature->compute(image1, keypoints1, descriptors1);
// feature->compute(image1, keypoints2, descriptors2);
//二值描述子必須用Hamming規(guī)范
BFMatcher matcher(NORM_HAMMING);
vector<DMatch> matches;
matcher.match(descriptors1,descriptors2, matches);
Mat imageMatches;
drawMatches(
image1,keypoints1, // 1st image and its keypoints
image2,keypoints2, // 2nd image and its keypoints
matches, // the matches
imageMatches, // the image produced
Scalar(255,255,255), // color of lines
Scalar(255,255,255), // color of points
vector< char >(), // masks if any
DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS | DrawMatchesFlags::DRAW_RICH_KEYPOINTS);
imshow("ORB Matches", imageMatches);
//imshow("BRISK Matches", imageMatches);
//imshow("FREAK with BRISK Matches", imageMatches);
cout << "Number of matches: " << matches.size() << endl;
waitKey();
return 0;
}
結(jié)果如下
References:
OpenCV尺度不變特征檢測:SIFT、SURF、BRISK欺矫、ORB
Hamming Distance (漢明距離)
【特征檢測】ORB特征提取算法
opencv計算機視覺編程攻略(第三版) —— Robert