“Demo for 3D Reconstruction”

记录三维重建的流程和代码。

特征提取和匹配

一些常见的特征提取和匹配方法很多,利用opencv集成的‘SIFT,SURF,ORB’等算法可以完成特征提取和匹配的流程。由于本文中提取的是电线的特征(如下图,需要重建的是中间的电线),不能用上面的方法,所以首先用传统算法提取图像中地面部分的特征点,匹配点算出相机外参数(相对的),由对极约束找出电线的匹配点。

这里提供一个demo:

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
#include <iostream>
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>

using namespace std;
using namespace cv;

int main ( int argc, char** argv )
{
    if ( argc != 3 )
    {
        cout<<"usage: feature_extraction img1 img2"<<endl;
        return 1;
    }
    //-- 读取图像
    Mat img_1 = imread ( argv[1], CV_LOAD_IMAGE_COLOR );
    Mat img_2 = imread ( argv[2], CV_LOAD_IMAGE_COLOR );

    //-- 初始化
    std::vector<KeyPoint> keypoints_1, keypoints_2;
    Mat descriptors_1, descriptors_2;
    Ptr<FeatureDetector> detector = ORB::create();
    Ptr<DescriptorExtractor> descriptor = ORB::create();
    // Ptr<FeatureDetector> detector = FeatureDetector::create(detector_name);
    // Ptr<DescriptorExtractor> descriptor = DescriptorExtractor::create(descriptor_name);
    Ptr<DescriptorMatcher> matcher  = DescriptorMatcher::create ( "BruteForce-Hamming" );

    //-- 第一步:检测 Oriented FAST 角点位置
    detector->detect ( img_1,keypoints_1 );
    detector->detect ( img_2,keypoints_2 );

    //-- 第二步:根据角点位置计算 BRIEF 描述子
    descriptor->compute ( img_1, keypoints_1, descriptors_1 );
    descriptor->compute ( img_2, keypoints_2, descriptors_2 );

    Mat outimg1;
    drawKeypoints( img_1, keypoints_1, outimg1, Scalar::all(-1), DrawMatchesFlags::DEFAULT );
    imshow("ORB特征点",outimg1);

    //-- 第三步:对两幅图像中的BRIEF描述子进行匹配,使用 Hamming 距离
    vector<DMatch> matches;
    //BFMatcher matcher ( NORM_HAMMING );
    matcher->match ( descriptors_1, descriptors_2, matches );

    //-- 第四步:匹配点对筛选
    double min_dist=10000, max_dist=0;

    //找出所有匹配之间的最小距离和最大距离, 即是最相似的和最不相似的两组点之间的距离
    for ( int i = 0; i < descriptors_1.rows; i++ )
    {
        double dist = matches[i].distance;
        if ( dist < min_dist ) min_dist = dist;
        if ( dist > max_dist ) max_dist = dist;
    }
    
    // 仅供娱乐的写法
    min_dist = min_element( matches.begin(), matches.end(), [](const DMatch& m1, const DMatch& m2) {return m1.distance<m2.distance;} )->distance;
    max_dist = max_element( matches.begin(), matches.end(), [](const DMatch& m1, const DMatch& m2) {return m1.distance<m2.distance;} )->distance;

    printf ( "-- Max dist : %f \n", max_dist );
    printf ( "-- Min dist : %f \n", min_dist );

    //当描述子之间的距离大于两倍的最小距离时,即认为匹配有误.但有时候最小距离会非常小,设置一个经验值30作为下限.
    std::vector< DMatch > good_matches;
    for ( int i = 0; i < descriptors_1.rows; i++ )
    {
        if ( matches[i].distance <= max ( 2*min_dist, 30.0 ) )
        {
            good_matches.push_back ( matches[i] );
        }
    }

    //-- 第五步:绘制匹配结果
    Mat img_match;
    Mat img_goodmatch;
    drawMatches ( img_1, keypoints_1, img_2, keypoints_2, matches, img_match );
    drawMatches ( img_1, keypoints_1, img_2, keypoints_2, good_matches, img_goodmatch );
    imshow ( "所有匹配点对", img_match );
    imshow ( "优化后匹配点对", img_goodmatch );
    waitKey(0);

    return 0;
}

由算出来的 $R,t$ 算出 $F$

这里假设得到的$R,t$是世界坐标系下的参数,计算F需要先折算会相机坐标系下的相对参数。

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
void CaclulateFByRGlobal()
{
	Eigen::Matrix3d Rl0;
	/*Rl0 << 0.775071, -0.631781, -0.0108743,
		0.0352285, 0.0603884, -0.997553,
		0.630892, 0.772791, 0.069062;*/ //BA huzhou
	Rl0 << -0.75596, -0.654554, 0.00914511,
		-0.0120758, -2.38882e-05, -0.999927,
		0.654507, -0.756015, -0.0078862;  //dalian

	Eigen::Vector3d cl0;
	/*cl0 << 943.42, 1019.13, 54.7952;*/    //huzhou
	cl0 << 974.676, 1021.41, 16.9945;   //dalian

	Eigen::Matrix3d Rm0;
	/*Rm0 << 0.785548, -0.618716, -0.010268,
		0.0190466, 0.0407612, -0.998987,
		0.618508, 0.784557, 0.0438043;*/    //huzhou
	Rm0 << -0.740306, -0.672267, -0.00202373,
		0.00930091, -0.00723215, -0.999931,
		0.672205, -0.740274, 0.0116067;   //dalian
	Eigen::Vector3d cm0;
	/*cm0 << 947.823, 1016.18, 54.5612;*/   //huzhou
	cm0 << 970.431, 1017.26, 15.493;   //dalian


	Eigen::Matrix3d Rr0;
	/*Rr0 << 0.79421, -0.60764, 0.00190083,
		0.0225506, 0.0263483, -0.999398,
		0.607224, 0.793775, 0.0346288; */   //huzhou
	Rr0 << -0.762675, -0.646221, -0.0269323,
		0.0197389, 0.0183655, -0.999636,
		0.64648, -0.76293, -0.00125122;  //dalian
	Eigen::Vector3d cr0;
	/*cr0 << 951.094, 1014.74, 54.7585;*/   //huzhou
	cr0 << 966.922, 1014.19, 17.0275; //dalian


	Eigen::Matrix3d Kl;
	/*Kl << 3.7612906067774788e+003, 0., 2.0457995013785016e+003, 0.,
		3.7159736130516289e+003, 1.4796241830921265e+003, 0., 0., 1.;*/ //huzhou
	Kl << 3.7343664904677757e+003, 0., 2.0458971603779855e+003, 0.,
		3.6840622549040932e+003, 1.5309521054590580e+003, 0., 0., 1.;  //dalian

	Eigen::Matrix3d Km;
	/*Km << 3.7516261809333914e+003, 0., 2.0321668349416393e+003, 0.,
		3.7075460080696898e+003, 1.4757973862050546e+003, 0., 0., 1.;*///huzhou
	Km << 3.6807406030598072e+003, 0., 2.0575809009145160e+003, 0.,
		3.6316348410018245e+003, 1.5323338632609398e+003, 0., 0., 1.;  //dalian

	Eigen::Matrix3d Kr;
	/*Kr << 3.7604405383652875e+003, 0., 2.0415438075955501e+003, 0.,
		3.7160779331549415e+003, 1.4845370511494227e+003, 0., 0., 1.;*/ //huzhou
	Kr << 3.6902009246169255e+003, 0., 2.0795207000036444e+003, 0.,
		3.6414589667424284e+003, 1.5276047177099535e+003, 0., 0., 1.; //dalian


	std::vector<Eigen::Matrix3d> vec_rotation{ Rl0,Rm0,Rr0 };
	std::vector<Eigen::Vector3d> vec_center{ cl0,cm0,cr0 };
	std::vector<Eigen::Matrix3d> vec_k_inv{ Kl.inverse(),Km.inverse(),Kr.inverse() };

	std::cout << "0: l0, 1: m0, 2 r0 " << std::endl;
	for (uint i = 0; i < vec_rotation.size(); i++)
	{
		for (uint j = i + 1; j < vec_rotation.size(); j++)
		{
			Eigen::Matrix3d R_relative = vec_rotation[j] * (vec_rotation[i].transpose());
			Eigen::Vector3d t_relative = vec_rotation[j] * (vec_center[i] - vec_center[j]);
			Eigen::Matrix3d t_relative_AntisymmetricMatrix;
			t_relative_AntisymmetricMatrix << 0, -t_relative[2], t_relative[1],
				                              t_relative[2], 0, -t_relative[0],
				                              -t_relative[1], t_relative[0], 0;

			//cout << "t_relative_AntisymmetricMatrix" << endl << t_relative_AntisymmetricMatrix << endl;

			Eigen::Matrix3d F = vec_k_inv[j].transpose() * t_relative_AntisymmetricMatrix * R_relative * vec_k_inv[i];
			F /= F(2, 2);
			std::cout << "i: " << i << " , j: " << j << " F:" << std::endl << F << std::endl;
			std::cout << "c_relative: " << (-R_relative.transpose() * t_relative).transpose() << std::endl;
		}
	}

}

电线提取,匹配,重建

代码我放github了。

updatedupdated2020-02-292020-02-29