点云处理STEP 1 :Kinect+PCL 获取原始点云
程序员文章站
2022-03-03 20:12:55
...
论文方向是点云处理,最近几个月要参加实习,所以用博客随时记录论文的进程。
这是使用Kinect v2 和 pcl 1.8.0 获取的物体的点云深度图像:
#include <vtkAutoInit.h>
#include <Windows.h>
#include <iostream>
#include <kinect.h>
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/filters/radius_outlier_removal.h>
#include <pcl/filters/conditional_removal.h>
#include <pcl/filters/statistical_outlier_removal.h>
#include <string.h>
using namespace std;
typedef pcl::PointXYZ MyPointDataType;
template<class Interface>
//安全释放指针
inline void SafeRelease(Interface *& pInterfaceToRelease)
{
if (pInterfaceToRelease != NULL)
{
pInterfaceToRelease->Release();
pInterfaceToRelease = NULL;
}
}
string num2str(int i)
{
stringstream ss;
ss << i;
return ss.str();
}
//主函数
int main()
{
//调用Kinect库函数获取Kinect传感器
IKinectSensor* m_pKinectSensor = nullptr;
HRESULT hr;
hr = GetDefaultKinectSensor(&m_pKinectSensor);
if (FAILED(hr))
{
return hr;
}
IMultiSourceFrameReader* m_pMultiFrameReader = nullptr;
if (m_pKinectSensor)
{
hr = m_pKinectSensor->Open();
if (SUCCEEDED(hr))
{
hr = m_pKinectSensor->OpenMultiSourceFrameReader(
FrameSourceTypes::FrameSourceTypes_Depth,
&m_pMultiFrameReader);
}
}
if (!m_pKinectSensor || FAILED(hr))
{
return E_FAIL;
}
UINT16 *depthData = new UINT16[424 * 512];
IDepthFrameReference* m_pDepthFrameReference = nullptr;
IColorFrameReference* m_pColorFrameReference = nullptr;
IDepthFrame* m_pDepthFrame = nullptr;
IColorFrame* m_pColorFrame = nullptr;
IMultiSourceFrame* m_pMultiFrame = nullptr;
ICoordinateMapper* m_pCoordinateMapper = nullptr;
int count = 0;
while (count <= 30)
{
Sleep(5000);
while (true)
{
hr = m_pMultiFrameReader->AcquireLatestFrame(&m_pMultiFrame);
if (FAILED(hr) || !m_pMultiFrame)
{
continue;
}
break;
}
if (SUCCEEDED(hr))
hr = m_pMultiFrame->get_DepthFrameReference(&m_pDepthFrameReference);
if (SUCCEEDED(hr))
hr = m_pDepthFrameReference->AcquireFrame(&m_pDepthFrame);
hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper);
pcl::PointCloud<MyPointDataType>::Ptr cloud(new pcl::PointCloud<MyPointDataType>);
cloud->width = 512 * 424;
cloud->height = 1;
cloud->is_dense = false;
cloud->points.resize(cloud->width * cloud->height);
if (SUCCEEDED(hr))
{
hr = m_pDepthFrame->CopyFrameDataToArray(424 * 512, depthData);
CameraSpacePoint* m_pCameraCoordinates = new CameraSpacePoint[512 * 424];
hr = m_pCoordinateMapper->MapDepthFrameToCameraSpace(512 * 424, depthData, 512 * 424, m_pCameraCoordinates);
for (int i = 0; i < 512 * 424; i++)
{
CameraSpacePoint cameraP = m_pCameraCoordinates[i];
if (cameraP.X != -std::numeric_limits<float>::infinity() && cameraP.Y != -std::numeric_limits<float>::infinity() && cameraP.Z != -std::numeric_limits<float>::infinity())
{
float cameraX = static_cast<float>(cameraP.X);
float cameraY = static_cast<float>(cameraP.Y);
float cameraZ = static_cast<float>(cameraP.Z);
cloud->points[i].x = cameraX;
cloud->points[i].y = cameraY;
cloud->points[i].z = cameraZ;
}
}
}
string s = "pcd文件名称";
s += ".pcd";
pcl::io::savePCDFile(s, *cloud, false);
std::cerr << "Saved " << cloud->points.size() << " data points." << std::endl;
s.clear();
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer(new pcl::visualization::PCLVisualizer("3D viewer"));
viewer->addPointCloud(cloud);
viewer->resetCamera();
viewer->addCoordinateSystem(0.1);
viewer->initCameraParameters();
while (!viewer->wasStopped()){
viewer->spinOnce();
}
count++;
cout << "test" << endl;
SafeRelease(m_pDepthFrame);
SafeRelease(m_pDepthFrameReference);
SafeRelease(m_pColorFrame);
SafeRelease(m_pColorFrameReference);
SafeRelease(m_pMultiFrame);
}
m_pKinectSensor->Close();
SafeRelease(m_pKinectSensor);
std::system("pause");
return 0;
}