1. 題目
已經(jīng)給定3幀(不連續(xù))RGB-D相機(jī)拍攝的 RGB + depth 圖像矢赁,以及他們之間的變換矩陣(以第一幀為參考幀)贬丛,請(qǐng)將上述3幀RGB-D圖像分別生成點(diǎn)云并融合出最終的點(diǎn)云輸出。
數(shù)據(jù)如下:
相機(jī)位姿文件:
cameraTrajectory.txt內(nèi)容如下:
//# tx ty tz qx qy qz qw
0.000000000 0.000000000 0.000000000 0.000000000 0.000000000 0.000000000 1.000000000
-0.288952827 0.222811699 -0.252029210 0.054562528 -0.312418818 -0.288284063 0.903498590
-0.650643229 0.383824050 -0.501303971 -0.016285975 -0.159155473 -0.111743204 0.980774045
2.RGBD圖像轉(zhuǎn)點(diǎn)云數(shù)據(jù)+點(diǎn)云融合代碼
// RDGD圖像轉(zhuǎn)點(diǎn)云數(shù)據(jù)
#include <pcl/point_types.h>
//點(diǎn)云文件IO(pcd文件和ply文件)
#include <pcl/io/pcd_io.h>
#include <pcl/io/ply_io.h>
//kd樹
#include <pcl/kdtree/kdtree_flann.h>
//特征提取
#include <pcl/features/normal_3d_omp.h>
#include <pcl/features/normal_3d.h>
//重構(gòu)
#include <pcl/surface/gp3.h>
#include <pcl/surface/poisson.h>
//可視化
#include <pcl/visualization/pcl_visualizer.h>
// 矩陣變換
#include <pcl/common/transforms.h>
//多線程
#include <boost/thread/thread.hpp>
#include <fstream>
#include <iostream>
#include <stdio.h>
#include <string.h>
#include <string>
#include <opencv2/opencv.hpp>
#include <eigen3/Eigen/Dense>
using namespace std;
using namespace cv;
typedef pcl::PointXYZRGB PointT;
typedef pcl::PointCloud<PointT> PointCloud;
// camera instrinsic parameters相機(jī)內(nèi)參
struct CAMERA_INTRINSIC_PARAMETERS
{
double fx, fy, cx, cy, scale;
};
// RGBD圖像轉(zhuǎn)點(diǎn)云數(shù)據(jù)
PointCloud::Ptr image2PointCloud(
Mat rgb,
Mat depth,
CAMERA_INTRINSIC_PARAMETERS camera)
{
PointCloud::Ptr cloud(new PointCloud);
for (int m = 0; m < depth.rows; m++)
for (int n = 0; n < depth.cols; n++)
{
// 獲取深度圖中(m,n)處的值
ushort d = depth.ptr<ushort>(m)[n];
// d 可能沒有值抄邀,若如此昼榛,跳過(guò)此點(diǎn)
if (d == 0)
continue;
// d 存在值,則向點(diǎn)云增加一個(gè)點(diǎn)
PointT p;
// 計(jì)算這個(gè)點(diǎn)的空間坐標(biāo)
p.z = double(d) / camera.scale;
p.x = (n - camera.cx) * p.z / camera.fx;
p.y = (m - camera.cy) * p.z / camera.fy;
// 從rgb圖像中獲取它的顏色
p.b = rgb.ptr<uchar>(m)[n * 3];
p.g = rgb.ptr<uchar>(m)[n * 3 + 1];
p.r = rgb.ptr<uchar>(m)[n * 3 + 2];
// 把p加入到點(diǎn)云中
cloud->points.push_back(p);
}
// 設(shè)置并保存點(diǎn)云
cloud->height = 1;
cloud->width = cloud->points.size();
cloud->is_dense = false;
return cloud;
}
// 讀取相機(jī)姿態(tài)CameraTrajectory
// # tx ty tz qx qy qz qw
void readCameraTrajectory(
string camTransFile,
vector<Eigen::Isometry3d> &poses)
{
ifstream fcamTrans(camTransFile);
if (!fcamTrans.is_open())
{
cerr << "trajectory is empty!" << endl;
return;
}
else
{
string str;
while ((getline(fcamTrans, str)))
{
Eigen::Quaterniond q; //四元數(shù)
Eigen::Vector3d t;
Eigen::Isometry3d T = Eigen::Isometry3d::Identity();
// 第一行為注釋
if (str.at(0) == '#')
{
cout << "str" << str << endl;
continue;
}
istringstream strdata(str);
strdata >> t[0] >> t[1] >> t[2] >> q.x() >> q.y() >> q.z() >> q.w();
T.rotate(q);
T.pretranslate(t);
poses.push_back(T);
}
}
}
// 簡(jiǎn)單點(diǎn)云疊加融合
PointCloud::Ptr pointCloudFusion(
PointCloud::Ptr &original,
cv::Mat curr_rgb_im,
cv::Mat curr_depth_im,
Eigen::Isometry3d T,
CAMERA_INTRINSIC_PARAMETERS camera )
{
// ---------- 開始你的代碼 ------------- -//
PointCloud::Ptr newCloud(new PointCloud()),transCloud(new PointCloud());
newCloud=image2PointCloud(curr_rgb_im,curr_depth_im,camera);
pcl::transformPointCloud(*newCloud,*transCloud,T.matrix());
*original+=*transCloud;
return original;
// ---------- 結(jié)束你的代碼 ------------- -//
}
// 顯示rgb點(diǎn)云
boost::shared_ptr<pcl::visualization::PCLVisualizer> rgbVis(
pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr cloud)
{
// --------------------------------------------
// -----Open 3D viewer and add point cloud-----
// --------------------------------------------
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer(new pcl::visualization::PCLVisualizer("3D Viewer"));
viewer->setBackgroundColor(0, 0, 0);
pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(cloud);
viewer->addPointCloud<pcl::PointXYZRGB>(cloud, rgb, "sample cloud");
viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "sample cloud");
viewer->addCoordinateSystem(1.0);
viewer->initCameraParameters();
return (viewer);
}
int main()
{
// 相機(jī)內(nèi)參
CAMERA_INTRINSIC_PARAMETERS cameraParams{517.0, 516.0, 318.6, 255.3, 5000.0};
int frameNum = 3;
vector<Eigen::Isometry3d> poses;
PointCloud::Ptr fusedCloud(new PointCloud());
string color_im_path = "xxx/rgb/rgb";
string depth_im_path = "xxx/depth/depth";
string cameraPosePath = "xxx/cameraTrajectory.txt";
readCameraTrajectory(cameraPosePath, poses);
for (int idx = 0; idx < frameNum; idx++)
{
string rgbPath = color_im_path + to_string(idx) + ".png";
string depthPath = depth_im_path + to_string(idx) + ".png";
cv::Mat color_im = cv::imread(rgbPath);
if (color_im.empty())
{
cerr << "Fail to load rgb image!" << endl;
}
cv::Mat depth_im = cv::imread(depthPath, -1);
if (depth_im.empty())
{
cerr << "Fail to load depth image!" << endl;
}
if (idx == 0)
{
fusedCloud = image2PointCloud(color_im, depth_im, cameraParams);
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer1;
viewer1 = rgbVis(fusedCloud);
}
else
{
fusedCloud = pointCloudFusion(fusedCloud, color_im,depth_im, poses[idx], cameraParams);
}
}
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer;
viewer = rgbVis(fusedCloud);
while (!viewer->wasStopped())
{
viewer->spinOnce(100);
boost::this_thread::sleep(boost::posix_time::microseconds(100000));
}
pcl::io::savePCDFile( "fusedCloud.pcd", *fusedCloud );
return 0;
}
第一幀點(diǎn)云
3幀點(diǎn)云融合
參考:
- https://mp.weixin.qq.com/s?__biz=MzIxOTczOTM4NA==&mid=2247486281&idx=1&sn=1b36bcfd9f492dabc44ae2f10562e040&chksm=97d7eedea0a067c89eb9b1e71f7cf5dd12410c8c81d43dc2a21f9c6f28babd7add017ad14705&scene=21#wechat_redirect
- https://blog.csdn.net/weixin_42905141/article/details/100765920
- 公眾號(hào):計(jì)算機(jī)視覺life