Advertisement

Kinect的RGB和Depth数据融合

阅读量:
复制代码
    #include "stdafx.h"
    #include "kinect.h"
    
    #include <iostream>
    #include <opencv2\opencv.hpp>
    #include <opencv2/highgui/highgui.hpp>
    #include <opencv2/imgproc/imgproc.hpp>
    #include <time.h>
    #include <opencv2/core/core.hpp>  
    #include <thread>
    #include <mutex>
    //#include "PracticalSocket.h"
    #include <queue>
    
    using namespace cv;
    using namespace std;
    template<class Interface>      //函数模板
    inline void SafeRelease(Interface *& pInterfaceToRelease)
    {
    	if (pInterfaceToRelease != NULL)
    	{
    		pInterfaceToRelease->Release();
    		pInterfaceToRelease = NULL;
    	}
    }
    int main()
    {
    
    	setUseOptimized(true);
    	int depthWidth = 512;
    	int depthHeight = 424;
    
    	
    	HRESULT hResult = S_OK;
    	
    	//ColorSpacePoint* pointsCloud = nullptr;
    	IKinectSensor* pSensor;
    	ICoordinateMapper* pCoordinateMapper;
    	hResult = GetDefaultKinectSensor(&pSensor);
    	if (FAILED(hResult)) { cerr << "Error:GetDefaultKinectSensor" << endl; return -1; }
    
    
    	IMultiSourceFrameReader* m_pMultiFrameReader;
    //	IBodyFrameSource* m_pBodyFrameSource;
    	//IBodyFrameReader* m_pBodyFrameReader;
    
    	hResult = pSensor->Open();
    
    	//pSensor->get_BodyFrameSource(&m_pBodyFrameSource);
    	// 获取多数据源到读取器  
    	 pSensor->OpenMultiSourceFrameReader(
    		FrameSourceTypes::FrameSourceTypes_Color |
    		FrameSourceTypes::FrameSourceTypes_Infrared |
    		FrameSourceTypes::FrameSourceTypes_Depth,
    		&m_pMultiFrameReader);
    	if (FAILED(hResult)) { cerr << "Error:IKinectSensor::Open()" << endl; return -11; }
    
    	pSensor->get_CoordinateMapper(&pCoordinateMapper);
    	
    	if (FAILED(hResult)) { cerr << "Error:IKinectSensor::get_DepthFrameSource()" << endl; return -1; }
    	// 三个数据帧及引用
    	IDepthFrame* m_pDepthFrame = NULL;
    	IColorFrame* m_pColorFrame = NULL;
    	IDepthFrameReference* m_pDepthFrameReference = NULL;
    	IColorFrameReference* m_pColorFrameReference = NULL;
    	ColorSpacePoint*        m_pColorCoordinates = NULL;
    //	CameraSpacePoint*        m_pCameraCoordinates = NULL;
    	IMultiSourceFrame* m_pMultiFrame = NULL;//多源数据帧初始化
    	
    	//unsigned int bufferSize = depthWidth * depthHeight * sizeof(unsigned short);
    	//Mat bufferMat(depthHeight, depthWidth, CV_16SC1);
    	//Mat depthMat(depthHeight, depthWidth, CV_8UC1);
    	//Mat i_depthToRgb(424, 512, CV_8UC4);
    	BYTE *bgraData = new BYTE[1080 * 1920 * 4];
    	UINT16 *depthData = new UINT16[424 * 512];
    	//m_pCameraCoordinates = new CameraSpacePoint[512 * 424];
    	m_pColorCoordinates = new ColorSpacePoint[512 * 424];
    	//Mat i_rgb(1080, 1920, CV_8UC4);
    	//Mat bufferMat;
    	while (1)
    	{
    		//Frame
    		//IDepthFrame* pDepthFrame = nullptr;
    		// 获取新的一个多源数据帧
    		hResult = m_pMultiFrameReader->AcquireLatestFrame(&m_pMultiFrame);
    		//UINT16 *depthData = new UINT16[424 * 512];
    		if (SUCCEEDED(hResult))
    			hResult = m_pMultiFrame->get_ColorFrameReference(&m_pColorFrameReference);
    		if (SUCCEEDED(hResult))
    			hResult = m_pColorFrameReference->AcquireFrame(&m_pColorFrame);
    		if (SUCCEEDED(hResult))
    			hResult = m_pMultiFrame->get_DepthFrameReference(&m_pDepthFrameReference);
    		if (SUCCEEDED(hResult))
    			hResult = m_pDepthFrameReference->AcquireFrame(&m_pDepthFrame);
    
    		// color拷贝到图片中
    		UINT nColorBufferSize = 1920 * 1080 * 4;
    		if (SUCCEEDED(hResult))
    			hResult = m_pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, bgraData, ColorImageFormat::ColorImageFormat_Bgra);
    		Mat color(1080,1920,CV_8UC4 ,bgraData);
    		
    		UINT nDepthBufferSize = 424 * 512;
    		if (SUCCEEDED(hResult))
    		{
    			hResult = m_pDepthFrame->CopyFrameDataToArray(nDepthBufferSize, reinterpret_cast<UINT16*>(depthData));
    		}
    		Mat bufferMat(424,512,CV_16UC1,depthData);
    		bufferMat.convertTo(bufferMat, CV_8U, -255.0f / 4500.0f, 255.0f);
    		imshow("depth",bufferMat);
    		// 深度图映射到彩色图上
    		if (SUCCEEDED(hResult))
    		{
    			HRESULT hr = pCoordinateMapper->MapDepthFrameToColorSpace(512 * 424, depthData, 512 * 424, m_pColorCoordinates);		
    	// 注意这里的彩色点数要写成424*512,至于为什么。。。可能是为了代表下一个参数colorSpacePoints的大小
    		}//里m_pColorCoordinates是最终得到的512*424大小的数组,元素为一个个坐标,代表了对应的彩色图上的坐标。
    		Mat i_depthToRgb(424, 512, CV_8UC4);
    		if (SUCCEEDED(hResult))
    		{
    			for (int i = 0; i < 424 * 512; i++)
    			{
    				ColorSpacePoint p = m_pColorCoordinates[i];
    				if (p.X != -std::numeric_limits<float>::infinity() && p.Y != -std::numeric_limits<float>::infinity())
    				{
    					int colorX = static_cast<int>(p.X + 0.5f);
    					int colorY = static_cast<int>(p.Y + 0.5f);
    
    					if ((colorX >= 0 && colorX < 1920) && (colorY >= 0 && colorY < 1080))
    					{
    						i_depthToRgb.data[i * 4] = color.data[(colorY * 1920 + colorX) * 4];
    						i_depthToRgb.data[i * 4 + 1] = color.data[(colorY * 1920 + colorX) * 4 + 1];
    						i_depthToRgb.data[i * 4 + 2] = color.data[(colorY * 1920 + colorX) * 4 + 2];
    						i_depthToRgb.data[i * 4 + 3] = color.data[(colorY * 1920 + colorX) * 4 + 3];
    					}
    				}
    			}
    		}
    		imshow("caoa", color);
    		imshow("rgb2depth", i_depthToRgb);
    		if (waitKey(1) == VK_ESCAPE)
    		{
    			break;
    		}
    		SafeRelease(m_pColorFrame);
    		SafeRelease(m_pDepthFrame);
    		SafeRelease(m_pColorFrameReference);
    		SafeRelease(m_pDepthFrameReference);
    		SafeRelease(m_pMultiFrame);
    
    	}
    	SafeRelease(pCoordinateMapper);
    	pSensor->Close();
    	return 0;
    }
    	/*	m_pDepthFrame->CopyFrameDataToArray(424 * 512, depthData);
    		if (SUCCEEDED(hResult)) { cerr << "Error:IDepthFrameSource::OpenReader()" << endl; return -1234; }*/
    		//		hResult = pDepthFrame->AccessUnderlyingBuffer(&bufferSize,
    		//			reinterpret_cast<UINT16**>(&bufferMat.data));
    		//			if (SUCCEEDED(hResult)){
    		//					bufferMat.convertTo(depthMat, CV_8U, -255.0f / 4500.0f, 255.0f);
    		//			}
    		//}
    
    			SafeRelease(pDepthFrame);
    		//	imshow("depth", depthMat);
    
    
    		/*IColorFrame* pColorFrame = nullptr;
    		hResult = pColorReader->AcquireLatestFrame(&pColorFrame);*/
    	
    	
    		/*	hResult = pDepthFrame->AccessUnderlyingBuffer(&bufferSize,
    				reinterpret_cast<UINT16**>(&bufferMat.data));*/
    
    	/*	
    		pCoordinateMapper->MapDepthFrameToColorSpace(
    			depthHeight*depthWidth, depthData, depthHeight*depthWidth, pointsCloud);*/
    		//	pointsCloud最终得到的512 * 424大小的数组,元素为一个个坐标,代表了深度图对应的彩色图上的坐标。
    			/*hResult = pColorFrame->CopyConvertedFrameDataToArray(
    				bufferSize, reinterpret_cast<BYTE*>(bufferMat.data), ColorImageFormat::ColorImageFormat_Bgra);*/
    
    				//	resize(bufferMat, colorMat, cv::Size(), 0.25, 0.25);
    
    		//for (int i = 0; i < 424 * 512; i++)
    		//{
    		//	ColorSpacePoint p = pointsCloud[i];
    		//	if (p.X != -std::numeric_limits<float>::infinity() && p.Y != -std::numeric_limits<float>::infinity())
    		//	{
    		//		int colorX = static_cast<int>(p.X + 0.5f);
    		//		int colorY = static_cast<int>(p.Y + 0.5f);
    
    		//		if ((colorX >= 0 && colorX < 1920) && (colorY >= 0 && colorY < 1080))
    		//		{
    		//			i_depthToRgb.data[i * 4] = bufferMat.data[(colorY * 1920 + colorX) * 4];
    		//			i_depthToRgb.data[i * 4 + 1] = bufferMat.data[(colorY * 1920 + colorX) * 4 + 1];
    		//			i_depthToRgb.data[i * 4 + 2] = bufferMat.data[(colorY * 1920 + colorX) * 4 + 2];
    		//			i_depthToRgb.data[i * 4 + 3] = bufferMat.data[(colorY * 1920 + colorX) * 4 + 3];
    		//		}
    		//	}
    		//}
    		//imshow("rgb2depth", i_depthToRgb);

全部评论 (0)

还没有任何评论哟~