SLAM14讲 第七章 2D-2D 对极几何
#include <iostream> #include <opencv2/core/core.hpp> #include <opencv2/features2d/features2d.hpp> #include <opencv2/highgui/highgui.hpp> #include <opencv2/calib3d/calib3d.hpp> // #include "extra.h" // use this if in OpenCV2 using namespace std; using namespace cv; /**************************************************** * 本程序演示了如何使用2D-2D的特征匹配估计相机运动 * **************************************************/ //前一节的orb特征提取和匹配封装成find_feature_matches函数 void find_feature_matches ( const Mat& img_1, const Mat& img_2, std::vector<KeyPoint>& keypoints_1, std::vector<KeyPoint>& keypoints_2, std::vector< DMatch >& matches ); void pose_estimation_2d2d ( std::vector<KeyPoint> keypoints_1, std::vector<KeyPoint> keypoints_2, std::vector< DMatch > matches, Mat& essential_matrix, Mat& R, Mat& t ); // 像素坐标转相机归一化坐标 Point2d pixel2cam ( const Point2d& p, const Mat& K ); int main ( int argc, char** argv ) { if ( argc != 3 ) { cout<<"usage: pose_estimation_2d2d img1 img2"<<endl; return 1; } //-- 读取图像 Mat img_1 = imread ( argv[1], CV_LOAD_IMAGE_COLOR ); Mat img_2 = imread ( argv[2], CV_LOAD_IMAGE_COLOR ); vector<KeyPoint> keypoints_1, keypoints_2; vector<DMatch> matches; find_feature_matches ( img_1, img_2, keypoints_1, keypoints_2, matches ); cout<<"一共找到了"<<matches.size() <<"组匹配点"<<endl; //-- 估计两张图像间运动 Mat R,t,essential_matrix; pose_estimation_2d2d ( keypoints_1, keypoints_2, matches, essential_matrix, R, t ); //-- 验证E=t^R*scale Mat t_x = ( Mat_<double> ( 3,3 ) << 0, -t.at<double> ( 2,0 ), t.at<double> ( 1,0 ), t.at<double> ( 2,0 ), 0, -t.at<double> ( 0,0 ), -t.at<double> ( 1,0 ), t.at<double> ( 0,0 ), 0 ); cout<<"t^R="<<endl<<t_x*R<<endl; //-- 验证对极约束x2Tt^Rx1是否=0 Mat K = ( Mat_<double> ( 3,3 ) << 520.9, 0, 325.1, 0, 521.0, 249.7, 0, 0, 1 ); for ( DMatch m: matches ) { Point2d pt1 = pixel2cam ( keypoints_1[ m.queryIdx ].pt, K ); Mat y1 = ( Mat_<double> ( 3,1 ) << pt1.x, pt1.y, 1 ); Point2d pt2 = pixel2cam ( keypoints_2[ m.trainIdx ].pt, K ); Mat y2 = ( Mat_<double> ( 3,1 ) << pt2.x, pt2.y, 1 ); Mat d1 = y2.t() * t_x * R * y1; Mat d2 = y2.t() * essential_matrix * y1; cout << "epipolar constraint = " << d1 << endl; cout << "用E计算 = " << d2 << endl; } return 0; } //前一节的orb特征提取和匹配封装成find_feature_matches函数 void find_feature_matches ( const Mat& img_1, const Mat& img_2, std::vector<KeyPoint>& keypoints_1, std::vector<KeyPoint>& keypoints_2, std::vector< DMatch >& matches ) { //-- 初始化 Mat descriptors_1, descriptors_2; // used in OpenCV3 Ptr<FeatureDetector> detector = ORB::create(); Ptr<DescriptorExtractor> descriptor = ORB::create(); // use this if you are in OpenCV2 // Ptr<FeatureDetector> detector = FeatureDetector::create ( "ORB" ); // Ptr<DescriptorExtractor> descriptor = DescriptorExtractor::create ( "ORB" ); Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create ( "BruteForce-Hamming" ); //-- 第一步:检测 Oriented FAST 角点位置 detector->detect ( img_1,keypoints_1 ); detector->detect ( img_2,keypoints_2 ); //-- 第二步:根据角点位置计算 BRIEF 描述子 descriptor->compute ( img_1, keypoints_1, descriptors_1 ); descriptor->compute ( img_2, keypoints_2, descriptors_2 ); //-- 第三步:对两幅图像中的BRIEF描述子进行匹配,使用 Hamming 距离 vector<DMatch> match; //BFMatcher matcher ( NORM_HAMMING ); matcher->match ( descriptors_1, descriptors_2, match ); //-- 第四步:匹配点对筛选 double min_dist=10000, max_dist=0; //找出所有匹配之间的最小距离和最大距离, 即是最相似的和最不相似的两组点之间的距离 for ( int i = 0; i < descriptors_1.rows; i++ ) { double dist = match[i].distance; if ( dist < min_dist ) min_dist = dist; if ( dist > max_dist ) max_dist = dist; } printf ( "-- Max dist : %f \n", max_dist ); printf ( "-- Min dist : %f \n", min_dist ); //当描述子之间的距离大于两倍的最小距离时,即认为匹配有误.但有时候最小距离会非常小,设置一个经验值30作为下限. for ( int i = 0; i < descriptors_1.rows; i++ ) { if ( match[i].distance <= max ( 2*min_dist, 30.0 ) ) { matches.push_back ( match[i] ); } } } // 像素坐标p转相机归一化坐标x Point2d pixel2cam ( const Point2d& p, const Mat& K ) { return Point2d ( //Mat获取元素M.at<double>(i,j) ( p.x - K.at<double> ( 0,2 ) ) / K.at<double> ( 0,0 ), ( p.y - K.at<double> ( 1,2 ) ) / K.at<double> ( 1,1 ) ); } //位姿估计 void pose_estimation_2d2d ( std::vector<KeyPoint> keypoints_1, std::vector<KeyPoint> keypoints_2, std::vector< DMatch > matches, Mat& essential_matrix, Mat& R, Mat& t ) { // 相机内参,TUM Freiburg2 Mat K = ( Mat_<double> ( 3,3 ) << 520.9, 0, 325.1, 0, 521.0, 249.7, 0, 0, 1 ); //-- 把匹配点转换为vector<Point2f>的形式 vector<Point2f> points1; vector<Point2f> points2; for ( int i = 0; i < ( int ) matches.size(); i++ )//遍历所有的匹配点 { //vector类中的push_back函数:在vector尾部加入一个数据 //std::vector< DMatch > matches //queryIdx : 查询点的索引(当前要寻找匹配结果的点在它所在图片上的索引). //trainIdx : 被查询到的点的索引(存储库中的点的在存储库上的索引) //std::vector<KeyPoint> keypoints_1——pt存储point2f格式的坐标 points1.push_back ( keypoints_1[matches[i].queryIdx].pt ); points2.push_back ( keypoints_2[matches[i].trainIdx].pt ); } //-- 计算基础矩阵F Mat fundamental_matrix; //CV_FM_7POINT, CV_FM_8POINT, CV_FM_LMEDS, CV_FM_RANSAC fundamental_matrix = findFundamentalMat ( points1, points2, CV_FM_8POINT );//八点法 cout<<"fundamental_matrix is "<<endl<< fundamental_matrix<<endl; //-- 计算本质矩阵E //F和E之间只差相机参数 Point2d principal_point ( 325.1, 249.7 ); //相机光心, TUM dataset标定值,double double focal_length = 521; //相机焦距, TUM dataset标定值 //Mat essential_matrix; essential_matrix = findEssentialMat ( points1, points2, focal_length, principal_point ); cout<<"essential_matrix is "<<endl<< essential_matrix<<endl; //-- 计算单应矩阵H Mat homography_matrix; //ransacReprojThreshold——将点对视为内点的最大允许重投影错误阈值(仅用于RANSAC和RHO方法),1-10 homography_matrix = findHomography ( points1, points2, RANSAC, 3 ); cout<<"homography_matrix is "<<endl<<homography_matrix<<endl; //-- 从本质矩阵中恢复旋转和平移信息.E-->R,t 图1到图2的变换 recoverPose ( essential_matrix, points1, points2, R, t, focal_length, principal_point ); cout<<"R is "<<endl<<R<<endl; cout<<"t is "<<endl<<t<<endl; }
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 基于Microsoft.Extensions.AI核心库实现RAG应用
· Linux系列:如何用heaptrack跟踪.NET程序的非托管内存泄露
· 开发者必知的日志记录最佳实践
· SQL Server 2025 AI相关能力初探
· Linux系列:如何用 C#调用 C方法造成内存泄露
· 震惊!C++程序真的从main开始吗?99%的程序员都答错了
· 别再用vector<bool>了!Google高级工程师:这可能是STL最大的设计失误
· 单元测试从入门到精通
· 【硬核科普】Trae如何「偷看」你的代码?零基础破解AI编程运行原理
· 上周热点回顾(3.3-3.9)