用opencv的findFundmentalMat计算图像的基础矩阵,得出的结果不对,麻烦各位看看是什么原因!
我用sift匹配后的匹配点计算基础矩阵,得出的结果明显不对,但也没找出问题出在哪,求各位大神指点,下面是我的代码和计算结果
#include <iostream>
#include <opencv2/opencv.hpp>
#include <opencv2/xfeatures2d.hpp>
#include <opencv2/imgproc/imgproc.hpp>
using namespace cv;
using namespace std;
int main(int argc ,char **argv)
{
Ptr<Feature2D> detector = xfeatures2d::SIFT::create();
Mat img_1 = imread(argv[1]);
Mat img_2 = imread(argv[2]);
if(img_1.empty() || img_2.empty())
{
printf("can't read one of the images\n");
return -1;
}
//检测特征点
vector<KeyPoint> keypoints_1, keypoints_2;
detector->detect(img_1, keypoints_1);
detector->detect(img_2, keypoints_2);
//计算特征描述子
Mat descriptors_1, descriptors_2;
detector->compute(img_1, keypoints_1, descriptors_1);
detector->compute(img_2, keypoints_2, descriptors_2);
//使用BFMatcher进行匹配
BFMatcher matcher;
vector<DMatch> matches;
matcher.match(descriptors_1, descriptors_2, matches);
//绘制匹配出的关键点
Mat img_matches;
drawMatches(img_1, keypoints_1, img_2, keypoints_2, matches, img_matches);
imshow("【匹配图】", img_matches);
vector<Point2f> p_keypoint_1;
vector<Point2f> p_keypoint_2;
for(size_t i=0 ;i<matches.size() ; i++)
{
p_keypoint_1.push_back(keypoints_1[ matches[i].queryIdx ].pt);
p_keypoint_2.push_back(keypoints_2[ matches[i].trainIdx ].pt);
}
Mat m_Fundamental;
m_Fundamental = findFundamentalMat(p_keypoint_1, p_keypoint_2, FM_RANSAC, 3, 0.99);
cout<<"基础矩阵:"<<endl;
for(int row=0; row<m_Fundamental.rows; row++)
{
for(int col=0; col<m_Fundamental.cols; col++)
cout << m_Fundamental.at<float>(row,col) <<"\t";
cout<<endl;
}
cout<<endl;
waitKey(0);
return 0;
}
基础矩阵:
-3.52364e+26 0.346281 4.87306e-15
-1.72984e-28 0.441426 -2.81974e-16
-2.79058e-39 -0.897757 6.43679e-35