前言
前面的部落格中介紹了如何通過Kinect獲得彩色圖像、深度圖像以及人體骨骼點:
- Kinect學習(三):擷取RGB顔色資料
- Kinect學習(四):提取深度資料
- Kinect學習(五):提取帶使用者ID的深度資料
- Kinect學習(六):提取人體關節點資料
這次要将這幾者綜合起來,同時從Kinect那裡拿來這些資料。
代碼
這裡的代碼隻是将前面幾篇部落格中的内容整合了一下,就不做過多說明了。
#include <Windows.h>
#include <iostream>
#include <NuiApi.h>
#include <opencv2/opencv.hpp>
using namespace std;
using namespace cv;
typedef struct structBGR {
BYTE blue;
BYTE green;
BYTE red;
BYTE player;
} BGR;
bool tracked[NUI_SKELETON_COUNT] = { FALSE };
cv::Point skeletonPoint[NUI_SKELETON_COUNT][NUI_SKELETON_POSITION_COUNT] = { cv::Point(, ) };
cv::Point colorPoint[NUI_SKELETON_COUNT][NUI_SKELETON_POSITION_COUNT] = { cv::Point(, ) };
void getColorImage(HANDLE & colorStreamHandle, cv::Mat & colorImg);
BGR Depth2RGB(USHORT depthID);
void getDepthImage(HANDLE & depthStreamHandle, cv::Mat & depthImg, cv::Mat & mask);
void drawSkeleton(cv::Mat &img, cv::Point pointSet[], int which_one);
void getSkeletonImage(cv::Mat & skeletonImg, cv::Mat & colorImg);
int main(int argc, char* argv[])
{
cv::Mat colorImg;
colorImg.create(, , CV_8UC3);
cv::Mat depthImg;
depthImg.create(, , CV_8UC3);
cv::Mat skeletonImg;
skeletonImg.create(, , CV_8UC3);
cv::Mat mask;
mask.create(, , CV_8UC3);
HANDLE colorEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
HANDLE depthEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
HANDLE skeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
HANDLE colorStreamHandle = NULL;
HANDLE depthStreamHandle = NULL;
HRESULT hr;
hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX
| NUI_INITIALIZE_FLAG_USES_SKELETON);
if (FAILED(hr))
{
cout << "Nui initialize failed." << endl;
return hr;
}
hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, , , colorEvent, &colorStreamHandle);
if (FAILED(hr))
{
cout << "Can not open color stream." << endl;
return hr;
}
hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_320x240, , , depthEvent, &depthStreamHandle);
if (FAILED(hr))
{
cout << "Can not open depth stream." << endl;
return hr;
}
hr = NuiSkeletonTrackingEnable(skeletonEvent, );
if (FAILED(hr))
{
cout << "Can not enable skeleton tracking." << endl;
return hr;
}
cv::namedWindow("mask", CV_WINDOW_AUTOSIZE);
cv::namedWindow("colorImg", CV_WINDOW_AUTOSIZE);
cv::namedWindow("depthImg", CV_WINDOW_AUTOSIZE);
cv::namedWindow("skeletonImg", CV_WINDOW_AUTOSIZE);
while ()
{
if (WaitForSingleObject(colorEvent, ) == )
{
getColorImage(colorStreamHandle, colorImg);
}
if (WaitForSingleObject(depthEvent, ) == )
{
getDepthImage(depthStreamHandle, depthImg, mask);
}
if (WaitForSingleObject(skeletonEvent, ) == )
{
getSkeletonImage(skeletonImg, colorImg);
}
cv::imshow("mask", mask);
cv::imshow("colorImg", colorImg);
cv::imshow("depthImg", depthImg);
cv::imshow("skeletonImg", skeletonImg);
if (cv::waitKey() == )
break;
}
NuiShutdown();
cv::destroyAllWindows();
return ;
}
void getColorImage(HANDLE & colorStreamHandle, cv::Mat & colorImg)
{
const NUI_IMAGE_FRAME * pImageFrame = NULL;
HRESULT hr = NuiImageStreamGetNextFrame(colorStreamHandle, , &pImageFrame);
if (FAILED(hr))
{
cout << "Could not get color image" << endl;
NuiShutdown();
return;
}
INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;
NUI_LOCKED_RECT LockedRect;
pTexture->LockRect(, &LockedRect, NULL, );
if (LockedRect.Pitch != )
{
for (int i = ; i < colorImg.rows; i++)
{
uchar *ptr = colorImg.ptr<uchar>(i); //第i行的指針
//每個位元組代表一個顔色資訊,直接使用uchar
uchar *pBuffer = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;
for (int j = ;j < colorImg.cols;j++)
{
//内部資料是4個位元組,0-1-2是BGR,第4個現在未使用
ptr[ * j] = pBuffer[ * j];
ptr[ * j + ] = pBuffer[ * j + ];
ptr[ * j + ] = pBuffer[ * j + ];
}
}
}
else
{
cout << "捕獲彩色圖像出錯" << endl;
}
pTexture->UnlockRect();
NuiImageStreamReleaseFrame(colorStreamHandle, pImageFrame);
}
// 處理深度資料的每一個像素,如果屬于同一個使用者的ID,那麼像素就标為同種顔色,不同的使用者,
// 其ID不一樣,顔色的标示也不一樣,如果不屬于某個使用者的像素,那麼就采用原來的深度值
BGR Depth2RGB(USHORT depthID)
{
//每像素共16bit的資訊,其中最低3位是ID(所捕捉到的人的ID),剩下的13位才是資訊
USHORT realDepth = (depthID & ) >> ; //深度資訊,高13位
USHORT player = depthID & ; //提取使用者ID資訊,低3位
//因為提取的資訊是距離資訊,為了便于顯示,這裡歸一化為0-255
BYTE depth = (BYTE)( * realDepth / );
BGR color_data;
color_data.blue = color_data.green = color_data.red = ;
color_data.player = player;
//RGB三個通道的值都是相等的話,就是灰階的
//Kinect系統能夠處理辨識傳感器前多至6個人物的資訊,但同一時刻最多隻有2個玩家可被追蹤(即骨骼跟蹤)
switch (player)
{
case :
color_data.blue = depth / ;
color_data.green = depth / ;
color_data.red = depth / ;
break;
case :
color_data.blue = depth;
break;
case :
color_data.green = depth;
break;
case :
color_data.red = depth;
break;
case :
color_data.blue = depth;
color_data.green = depth;
color_data.red = depth / ;
break;
case :
color_data.blue = depth;
color_data.green = depth / ;
color_data.red = depth;
break;
case :
color_data.blue = depth / ;
color_data.green = depth;
color_data.red = depth;
break;
}
return color_data;
}
void getDepthImage(HANDLE & depthStreamHandle, cv::Mat & depthImg, cv::Mat & mask)
{
const NUI_IMAGE_FRAME * pImageFrame = NULL;
HRESULT hr = NuiImageStreamGetNextFrame(depthStreamHandle, , &pImageFrame);
if (FAILED(hr))
{
cout << "Could not get depth image" << endl;
NuiShutdown();
return;
}
INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;
NUI_LOCKED_RECT LockedRect;
pTexture->LockRect(, &LockedRect, NULL, );
if (LockedRect.Pitch != )
{
for (int i = ;i < depthImg.rows;i++)
{
uchar * ptr = depthImg.ptr<uchar>(i);
uchar * ptr_mask = mask.ptr<uchar>(i);
uchar *pBufferRun = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;
USHORT * pBuffer = (USHORT*)pBufferRun;
for (int j = ;j < depthImg.cols;j++)
{
// ptr[j] = 255 - (uchar)(255 * pBuffer[j] / 0x0fff); //直接将資料歸一化處理
// ptr[j] = (uchar)(255 * pBuffer[j] / 0x0fff); //直接将資料歸一化處理
BGR rgb = Depth2RGB(pBuffer[j]);
ptr[ * j] = rgb.blue;
ptr[ * j + ] = rgb.green;
ptr[ * j + ] = rgb.red;
switch (rgb.player)
{
case :
ptr_mask[ * j] = ;
ptr_mask[ * j + ] = ;
ptr_mask[ * j + ] = ;
break;
case :
ptr_mask[ * j] = ;
ptr_mask[ * j + ] = ;
ptr_mask[ * j + ] = ;
break;
case :
ptr_mask[ * j] = ;
ptr_mask[ * j + ] = ;
ptr_mask[ * j + ] = ;
break;
case :
ptr_mask[ * j] = ;
ptr_mask[ * j + ] = ;
ptr_mask[ * j + ] = ;
break;
case :
ptr_mask[ * j] = ;
ptr_mask[ * j + ] = ;
ptr_mask[ * j + ] = ;
break;
case :
ptr_mask[ * j] = ;
ptr_mask[ * j + ] = ;
ptr_mask[ * j + ] = ;
break;
case :
ptr_mask[ * j] = ;
ptr_mask[ * j + ] = ;
ptr_mask[ * j + ] = ;
break;
default:
ptr_mask[ * j] = ;
ptr_mask[ * j + ] = ;
ptr_mask[ * j + ] = ;
break;
}
}
}
}
else
{
cout << "捕獲深度圖像出錯" << endl;
}
pTexture->UnlockRect();
NuiImageStreamReleaseFrame(depthStreamHandle, pImageFrame);
}
void drawSkeleton(cv::Mat &img, cv::Point pointSet[], int which_one)
{
cv::Scalar color;
switch (which_one)
{
case :
color = cv::Scalar(, , );
break;
case :
color = cv::Scalar(, , );
break;
case :
color = cv::Scalar(, , );
break;
case :
color = cv::Scalar(, , );
break;
case :
color = cv::Scalar(, , );
break;
case :
color = cv::Scalar(, , );
break;
}
// 脊柱
if ((pointSet[NUI_SKELETON_POSITION_HEAD].x != || pointSet[NUI_SKELETON_POSITION_HEAD].y != ) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HEAD], pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], color, );
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != ) &&
(pointSet[NUI_SKELETON_POSITION_SPINE].x != || pointSet[NUI_SKELETON_POSITION_SPINE].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SPINE], color, );
if ((pointSet[NUI_SKELETON_POSITION_SPINE].x != || pointSet[NUI_SKELETON_POSITION_SPINE].y != ) &&
(pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SPINE], pointSet[NUI_SKELETON_POSITION_HIP_CENTER], color, );
// 左上肢
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != ) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, );
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, );
if ((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, );
if ((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x != || pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, );
// 右上肢
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != ) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], color, );
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], color, );
if ((pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], color, );
if ((pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], pointSet[NUI_SKELETON_POSITION_HAND_RIGHT], color, );
// 左下肢
if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != ) &&
(pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_LEFT], color, );
if ((pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_LEFT], pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], color, );
if ((pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], color, );
if ((pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].x != || pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], pointSet[NUI_SKELETON_POSITION_FOOT_LEFT], color, );
// 右下肢
if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != ) &&
(pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], color, );
if ((pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], color, );
if ((pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], color, );
if ((pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != ) &&
(pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].x != || pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].y != ))
cv::line(img, pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT], color, );
}
void getSkeletonImage(cv::Mat & skeletonImg, cv::Mat & colorImg)
{
NUI_SKELETON_FRAME skeletonFrame = { }; //骨骼幀的定義
bool foundSkeleton = false;
HRESULT hr = NuiSkeletonGetNextFrame(, &skeletonFrame);
if (SUCCEEDED(hr))
{
//NUI_SKELETON_COUNT是檢測到的骨骼數(即,跟蹤到的人數)
for (int i = ;i < NUI_SKELETON_COUNT;i++)
{
NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;
// Kinect最多檢測到6個人,但隻能跟蹤2個人的骨骼,再檢查是否跟蹤到了
if (trackingState == NUI_SKELETON_TRACKED)
{
foundSkeleton = true;
}
}
}
if (!foundSkeleton)
{
return;
}
NuiTransformSmooth(&skeletonFrame, NULL);
skeletonImg.setTo();
for (int i = ;i < NUI_SKELETON_COUNT;i++)
{
// 判斷是否是一個正确骨骼的條件:骨骼被跟蹤到并且肩部中心(頸部位置)必須跟蹤到
if (skeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED && skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER] != NUI_SKELETON_POSITION_NOT_TRACKED)
{
float fx, fy;
// 拿到所有跟蹤到的關節點的坐标,并轉換為我們的深度空間的坐标,因為我們是在深度圖像中
// 把這些關節點标記出來的
// NUI_SKELETON_POSITION_COUNT為跟蹤到的一個骨骼的關節點的數目,為20
for (int j = ;j < NUI_SKELETON_POSITION_COUNT;j++)
{
NuiTransformSkeletonToDepthImage(skeletonFrame.SkeletonData[i].SkeletonPositions[j], &fx, &fy);
skeletonPoint[i][j].x = (int)fx;
skeletonPoint[i][j].y = (int)fy;
}
for (int j = ;j < NUI_SKELETON_POSITION_COUNT;j++)
{
if (skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[j] != NUI_SKELETON_POSITION_NOT_TRACKED)
{
cv::circle(skeletonImg, skeletonPoint[i][j], , cv::Scalar(, , ), , , );
tracked[i] = true;
// 在彩色圖中也繪制骨骼關鍵點
LONG color_x, color_y;
NuiImageGetColorPixelCoordinatesFromDepthPixel(NUI_IMAGE_RESOLUTION_640x480, ,
skeletonPoint[i][j].x, skeletonPoint[i][j].y, , &color_x, &color_y);
colorPoint[i][j].x = (int)color_x;
colorPoint[i][j].y = (int)color_y;
cv::circle(colorImg, colorPoint[i][j], , cv::Scalar(, , ), , , );
}
}
drawSkeleton(skeletonImg, skeletonPoint[i], i);
drawSkeleton(colorImg, colorPoint[i], i);
}
}
}
結果
(由于有我同學的照片,是以打了碼)
參考資料
- https://blog.csdn.net/zouxy09/article/details/8163265
- https://blog.csdn.net/timebomb/article/details/7169372