标签:
环境:kinect1.7+opencv2.4+vc2015
#include <afxsock.h>
#include<opencv.hpp>
#include<Windows.h>
#include<NuiApi.h>
using namespace std;
using namespace cv;
//---参数--------------------------------------------
#define NUM_OF_MODEL 3 //本地模板的个数
#define NUM_OF_SKELETONS 10 //kinect使用的关节数目,在坐姿模式里只读取10个关节点,编号是从2-11号骨骼
#define NUM_OF_FRAME 120 //一批处理的视频帧的数量
#define SHAKE_THRESHOLD_VALUE 0.1 //关节点防抖动的阈值,只有关节位移大于0.1米才计算位移向量
#define INERRED_THRESHOLD_VALUE 0.3 //关节点丢失后kinect会根据内置算法推断该关节点位置,这个阈值是限制推断出来的关节三维坐标位移超过0.3米
Mat modelskeleton[NUM_OF_MODEL]; //模板放进这里
string int2type(int _intval) //模板中各个动作的名字写进这里
{
string time_str;
switch (_intval)
{
case 1:
time_str = "drinking"; //第一个动作喝水
break;
case 2:
time_str = "writing";
break;
case 3:
time_str = "on_phone";
break;
case 0:
time_str = "no action";
break;
}
return time_str;
}
string int2str(int _intval)
{
string time_str;
char c[10];
sprintf_s(c, "%d", _intval);
time_str = c;
return c;
}
Mat setInput0(Mat now, int id)//从本地xml文件里读入模板动作后计算每两帧模板动作的位移向量,返回值存入modelskeleton[NUM_OF_MODEL]
{
cout << "done" << endl;
Mat temp(NUM_OF_FRAME, NUM_OF_SKELETONS, CV_32FC3, Scalar(0, 0, 0));
for (int i = 0;i < NUM_OF_FRAME;i++)
{
for (int j = 0;j < NUM_OF_SKELETONS; j++)
{
temp.ptr<float>(i)[j * 3] = now.ptr<float>(i + 1)[j * 3] - now.ptr<float>(i)[j * 3];
temp.ptr<float>(i)[j * 3 + 1] = now.ptr<float>(i + 1)[j * 3 + 1] - now.ptr<float>(i)[j * 3 + 1];
temp.ptr<float>(i)[j * 3 + 2] = now.ptr<float>(i + 1)[j * 3 + 2] - now.ptr<float>(i)[j * 3 + 2];
}
}
return temp;
}
void drawSkeleton(Mat &image, CvPoint pointSet[], NUI_SKELETON_FRAME now,int userid)//画出骨骼,推断出来的关节点用灰色表示
{
CvScalar color;
color = cvScalar(255);
if ((pointSet[NUI_SKELETON_POSITION_HEAD].x != 0 || pointSet[NUI_SKELETON_POSITION_HEAD].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HEAD], pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SPINE], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SPINE], pointSet[NUI_SKELETON_POSITION_HIP_CENTER], color, 2);
//左上肢
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, 2);
//右上肢
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], pointSet[NUI_SKELETON_POSITION_HAND_RIGHT], color, 2);
//左下肢
if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HIP_LEFT], pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], pointSet[NUI_SKELETON_POSITION_FOOT_LEFT], color, 2);
//右下肢
if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT], color, 2);
for (int i = 0;i < 20 ;i++)
{if(now.SkeletonData[userid].SkeletonPositions[i].x)
if (now.SkeletonData[userid].eSkeletonPositionTrackingState[i] == 1|| now.SkeletonData[userid].eSkeletonPositionTrackingState[i] == 0)
circle(image, pointSet[i], 5, Scalar(128), 2);
else
circle(image, pointSet[i], 5, Scalar(255), 2);
}
}
//用于匹配的类
class Process {
private:
Mat framess;
Vector4 mark;//追踪到当前最后一帧人物的中心坐标
int matching;
float disvalsModel[NUM_OF_MODEL][NUM_OF_FRAME] = { 0 };
public:
Process() :matching(0)
{
framess.create(NUM_OF_FRAME, NUM_OF_SKELETONS, CV_32FC3);
mark = { 0 };
}
void classify(Mat &outmark)//将匹配到的动作编号转化为文字保存进mark里
{
float fx, fy;
NuiTransformSkeletonToDepthImage(mark, &fx, &fy);
fx *= 2;
fy *= 2;
circle(outmark, cvPoint((int)fx,(int)fy),2,Scalar(64,255,255 ),3);
const string str = "the pose is " + int2type(matching);
putText(outmark, str, cvPoint((int)fx, (int)fy), CV_FONT_HERSHEY_COMPLEX,1,Scalar(64, 255, 255));
}
void shakeMask(NUI_SKELETON_FRAME past, NUI_SKELETON_FRAME &now, int userid)
{
for (int j = 3;j < NUM_OF_SKELETONS;j++)
{
if (now.SkeletonData[userid].eSkeletonPositionTrackingState[j] == 1 && (
(now.SkeletonData[userid].SkeletonPositions[j].x - past.SkeletonData[userid].SkeletonPositions[j].x)>INERRED_THRESHOLD_VALUE ||
(now.SkeletonData[userid].SkeletonPositions[j].y - past.SkeletonData[userid].SkeletonPositions[j].y) > INERRED_THRESHOLD_VALUE ||
(now.SkeletonData[userid].SkeletonPositions[j].z - past.SkeletonData[userid].SkeletonPositions[j].z) > INERRED_THRESHOLD_VALUE))
{
now.SkeletonData[userid].SkeletonPositions[j].x = past.SkeletonData[userid].SkeletonPositions[j].x;
now.SkeletonData[userid].SkeletonPositions[j].y = past.SkeletonData[userid].SkeletonPositions[j].y;
now.SkeletonData[userid].SkeletonPositions[j].z = past.SkeletonData[userid].SkeletonPositions[j].z;
}
if (now.SkeletonData[userid].SkeletonPositions[j].x - past.SkeletonData[userid].SkeletonPositions[j].x < SHAKE_THRESHOLD_VALUE)
now.SkeletonData[userid].SkeletonPositions[j].x = past.SkeletonData[userid].SkeletonPositions[j].x;
if (now.SkeletonData[userid].SkeletonPositions[j].y - past.SkeletonData[userid].SkeletonPositions[j].y < SHAKE_THRESHOLD_VALUE)
now.SkeletonData[userid].SkeletonPositions[j].y = past.SkeletonData[userid].SkeletonPositions[j].y;
if (now.SkeletonData[userid].SkeletonPositions[j].z - past.SkeletonData[userid].SkeletonPositions[j].z < SHAKE_THRESHOLD_VALUE)
now.SkeletonData[userid].SkeletonPositions[j].z = past.SkeletonData[userid].SkeletonPositions[j].z;
}
}
void setInput(NUI_SKELETON_FRAME past, NUI_SKELETON_FRAME now, int counter, int userid)//前后两帧做差求位移向量
{
if (counter == NUM_OF_FRAME-1)
{
mark.x = now.SkeletonData[userid].Position.x;
mark.y = now.SkeletonData[userid].Position.y;
mark.z = now.SkeletonData[userid].Position.z;
}
for (int j = 0;j < NUM_OF_SKELETONS;j++)
{
framess.ptr<float>(counter)[3 * j] = now.SkeletonData[userid].SkeletonPositions[j].x - past.SkeletonData[userid].SkeletonPositions[j].x;
framess.ptr<float>(counter)[3 * j + 1] = now.SkeletonData[userid].SkeletonPositions[j].y - past.SkeletonData[userid].SkeletonPositions[j].y;
framess.ptr<float>(counter)[3 * j + 2] = now.SkeletonData[userid].SkeletonPositions[j].z - past.SkeletonData[userid].SkeletonPositions[j].z;
}
}
float rtwCal(int flag)
{
float rtwMat[3] = { 0 };
Mat rtwmat(NUM_OF_FRAME, NUM_OF_FRAME, CV_32FC1, Scalar(0));
for (int i = 0;i < NUM_OF_FRAME;i++)
{
for (int j = 0;j < NUM_OF_FRAME;j++)
{
for (int k = 0;k < (NUM_OF_SKELETONS * 3);k++)//多维向量余弦计算
{
rtwMat[0] += framess.ptr<float>(i)[k] * modelskeleton[flag].ptr<float>(j)[k];
rtwMat[1] += framess.ptr<float>(i)[k] * framess.ptr<float>(i)[k];
rtwMat[2] += modelskeleton[flag].ptr<float>(j)[k] * modelskeleton[flag].ptr<float>(j)[k];
}
rtwmat.ptr<float>(i)[j] = rtwMat[0] / (sqrt(rtwMat[1])*sqrt(rtwMat[2])); }
}
float routeVal[NUM_OF_FRAME] = { 0 };
for (int i = 0;i < NUM_OF_FRAME;i++)
{
for (int j = 0;j < NUM_OF_FRAME;j++)
{
routeVal[i] += rtwmat.ptr<float>(j)[(j + i) % NUM_OF_FRAME];
}
}
float result = routeVal[0];
for (int i = 0;i < NUM_OF_FRAME-1;i++)
{
if (result<routeVal[i+1])
result = routeVal[i + 1];
}
return result;
}
void processBegin()
{
int result = 0;
float rtwDistance[NUM_OF_MODEL] = { 0 };
for (int i = 0;i < NUM_OF_MODEL;i++)
{
rtwDistance[i] = rtwCal(i);
cout << "the distance between " + int2str(i+1) << "is" << rtwDistance[i] << endl;
}
float temp = rtwDistance[0];
for (int i = 0;i < NUM_OF_MODEL - 1;i++)
{
if (temp < rtwDistance[i + 1])
{
temp = rtwDistance[i + 1];
result = i + 1;
}
}
matching = result+1;
if (temp < 0.3)
matching = 0;
cout << "this time is type" << int2str(matching) << endl << endl;
}
};
int main()
{
Mat image(480, 640, CV_8UC3, Scalar(0, 0, 0));
//
HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
HANDLE skeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
hr = NuiSkeletonTrackingEnable(skeletonEvent, NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT);
//
HANDLE ColorFrameStream = NULL;
HANDLE NextFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, NextFrameEvent, &ColorFrameStream);
if (FAILED(hr))
{
cout << "Could not open image stream video" << endl;
return hr;
}
//
NUI_SKELETON_FRAME pastFrame = { 0 };
NUI_SKELETON_FRAME nowFrame = { 0 };
Mat mark(image.size(), CV_8UC3, Scalar(0, 0, 0));//整个匹配运算的结果保存在mark里,和颜色通道一起叠加输出到屏幕
int userid = 0;
bool bFoundSkeleton = FALSE;
//启动kinect
while (!bFoundSkeleton)
{
if (WaitForSingleObject(skeletonEvent, INFINITE) == 0)
{
hr = NuiSkeletonGetNextFrame(0, &nowFrame);
for (int i = 0;i < NUI_SKELETON_COUNT;i++)
{
NUI_SKELETON_TRACKING_STATE trackingState = nowFrame.SkeletonData[i].eTrackingState;
if (trackingState == NUI_SKELETON_TRACKED)
{
bFoundSkeleton = TRUE;
userid = i;
}
}
}
}
int idcounter = 0;
//读入模板初始化
FileStorage fds("monitor_file.xml", FileStorage::READ);
for (int i = 0;i < NUM_OF_MODEL;i++)
{
modelskeleton[i].create(NUM_OF_FRAME, NUM_OF_SKELETONS, CV_32FC3);
Mat temp1(NUM_OF_FRAME + 1, NUM_OF_SKELETONS, CV_32FC3);
fds["mat_id" + int2str(idcounter)] >> temp1;
modelskeleton[i] = setInput0(temp1, idcounter);
idcounter++;
}
fds.release();
while (1)
{
//
int frameCounter = 0;
Process process=Process();
for (;userid < NUI_SKELETON_COUNT;userid++)
{
if (nowFrame.SkeletonData[userid].eTrackingState == NUI_SKELETON_TRACKED &&
nowFrame.SkeletonData[userid].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER] != NUI_SKELETON_POSITION_NOT_TRACKED)
{
while (frameCounter < NUM_OF_FRAME)
{
//显示颜色通道图像
const NUI_IMAGE_FRAME * pImageFrame = NULL;
if (WaitForSingleObject(skeletonEvent, INFINITE) == 0)
{
NuiImageStreamGetNextFrame(ColorFrameStream, 0, &pImageFrame);
INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;
NUI_LOCKED_RECT LockedRect;
pTexture->LockRect(0, &LockedRect, NULL, 0);
for (int i = 0; i < image.rows; i++)
{
uchar *ptr = image.ptr<uchar>(i);
uchar *pBuffer = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;
for (int j = 0; j < image.cols; j++)
{
ptr[3 * j] = pBuffer[4 * j];
ptr[3 * j + 1] = pBuffer[4 * j + 1];
ptr[3 * j + 2] = pBuffer[4 * j + 2];
}
}
imshow("colorImage", image + mark);
pTexture->UnlockRect(0);
cvWaitKey(33);
NuiImageStreamReleaseFrame(ColorFrameStream, pImageFrame);
//
pastFrame = nowFrame;
hr = NuiSkeletonGetNextFrame(0, &nowFrame);
NuiTransformSmooth(&nowFrame, NULL);
Mat skeletonimg(240, 320, CV_8UC1, Scalar(0));
Vector4 cpoint[20] = { 0 };
CvPoint cvp[20];
for (int j = 0;j < 20;j++)
{
cpoint[j].x = nowFrame.SkeletonData[userid].SkeletonPositions[j].x;
cpoint[j].y = nowFrame.SkeletonData[userid].SkeletonPositions[j].y;
cpoint[j].z = nowFrame.SkeletonData[userid].SkeletonPositions[j].z;
}
float fx, fy;
for (int i = 0;i < 20;i++)
{
NuiTransformSkeletonToDepthImage(cpoint[i], &fx, &fy);
cvp[i].x = (int)fx;
cvp[i].y = (int)fy;
}
drawSkeleton(skeletonimg, cvp,nowFrame,userid);
imshow("skeleton", skeletonimg);
process.setInput(pastFrame, nowFrame, frameCounter, userid);
frameCounter++;
}
}
process.processBegin();
Mat mark1(image.size(), CV_8UC3, Scalar(0, 0, 0));
process.classify(mark1);
mark = mark1;
imshow("colorImage", mark1);
waitKey(33);
break;
}
}
}
NuiShutdown();
return 0;
}
kinect每读入一帧骨骼数据,消除抖动后,与上一帧做差,得到这一帧里发生的位移向量,存入Mat framess(NUM_OF_FRAME,NUM_OF_SKELETONS,CV_32FC3)矩阵中。
获得了NUM_OF_FRAME帧数据,即framess写满了后,计算framess中每一帧的位移向量分别与模板的各帧位移向量的夹角余弦。当前程序里一批读入120帧,framess是120*10*3的结构,匹配用的模板计算后也是120*10*3的结构,两个结构按行求夹角余弦,即30维向量与30维向量的运算。得到的余弦值放入用于动态时间规划的120*120的矩阵中。
因为对于两个相同的动作,如果他们存在相位差,且无法判断用以匹配的动作的起始点在哪里时,对动态时间规划的矩阵求其各条斜率为1的直线上的矩阵元素的和。要注意这个方法只适合用一对一对应的动态时间规划。
//@这个程序就是随便写的专门用来采集模板动作的程序
#include <afxsock.h>
#include<opencv.hpp>
#include<Windows.h>
#include<NuiApi.h>
using namespace std;
using namespace cv;
#define NUM_OF_SKELETONS 10
#define NUM_OF_FRAMES 121 //文件操作里的帧数必须比识别中的多一
void drawSkeleton(Mat &image, CvPoint pointSet[])
{
CvScalar color;
color = cvScalar(255);
if ((pointSet[NUI_SKELETON_POSITION_HEAD].x != 0 || pointSet[NUI_SKELETON_POSITION_HEAD].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HEAD], pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SPINE], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SPINE], pointSet[NUI_SKELETON_POSITION_HIP_CENTER], color, 2);
//左上肢
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, 2);
//右上肢
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], pointSet[NUI_SKELETON_POSITION_HAND_RIGHT], color, 2);
//左下肢
if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HIP_LEFT], pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], pointSet[NUI_SKELETON_POSITION_FOOT_LEFT], color, 2);
//右下肢
if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].y != 0))
line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT], color, 2);
}
string int2str(int _intval)
{
string time_str;
char c[10];
sprintf_s(c, "%d", _intval);
time_str = c;
return c;
}
int main()
{
Mat image(480, 640, CV_8UC3, Scalar(0, 0, 0));
//
HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
HANDLE skeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
hr = NuiSkeletonTrackingEnable(skeletonEvent, NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT);
int userid = 0;
if (FAILED(hr))
{
cout << "Could not open skeleton stream video" << endl;
NuiShutdown();
return hr;
}
//
HANDLE ColorFrameStream = NULL;
HANDLE NextFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, NextFrameEvent, &ColorFrameStream);
if (FAILED(hr))
{
cout << "Could not open image stream video" << endl;
return hr;
}
//
NUI_SKELETON_FRAME nowFrame = { 0 };
bool bFoundSkeleton = FALSE;
bool getdata = FALSE;
int counter = 0;
Mat temp(NUM_OF_FRAMES, NUM_OF_SKELETONS, CV_32FC3,Scalar(0,0,0));
//
FileStorage fs("monitor_file.xml", FileStorage::WRITE);
fs.release();
//
while (!bFoundSkeleton)
{
if (WaitForSingleObject(skeletonEvent, INFINITE) == 0)
{
hr = NuiSkeletonGetNextFrame(0, &nowFrame);
for (int i=0;i < NUI_SKELETON_COUNT;i++)
{
NUI_SKELETON_TRACKING_STATE trackingState = nowFrame.SkeletonData[i].eTrackingState;
if (trackingState == NUI_SKELETON_TRACKED)
{
bFoundSkeleton = TRUE;
userid = i;
}
}
}
}
imshow("colorImage", Mat(480, 640, CV_8UC3, Scalar(64, 255, 255)));
waitKey(1000);
while (!getdata)
{
int frameCounter = 0;
for (;userid < NUI_SKELETON_COUNT;userid++)
{
if (nowFrame.SkeletonData[userid].eTrackingState == NUI_SKELETON_TRACKED &&
nowFrame.SkeletonData[userid].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER] != NUI_SKELETON_POSITION_NOT_TRACKED)
{
while (frameCounter < NUM_OF_FRAMES)
{
//
const NUI_IMAGE_FRAME * pImageFrame = NULL;
if (WaitForSingleObject(skeletonEvent, INFINITE) == 0)
{
NuiImageStreamGetNextFrame(ColorFrameStream, 0, &pImageFrame);
INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;
NUI_LOCKED_RECT LockedRect;
pTexture->LockRect(0, &LockedRect, NULL, 0);
for (int i = 0; i < image.rows; i++)
{
uchar *ptr = image.ptr<uchar>(i);
uchar *pBuffer = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;
for (int j = 0; j < image.cols; j++)
{
ptr[3 * j] = pBuffer[4 * j];
ptr[3 * j + 1] = pBuffer[4 * j + 1];
ptr[3 * j + 2] = pBuffer[4 * j + 2];
}
}
imshow("colorImage", image);
pTexture->UnlockRect(0);
NuiImageStreamReleaseFrame(ColorFrameStream, pImageFrame);
cvWaitKey(33);
//
hr = NuiSkeletonGetNextFrame(0, &nowFrame);
NuiTransformSmooth(&nowFrame, 0);
Mat skeletonimg(240, 320, CV_8UC1, Scalar(0));//接下来kinect读取到的骨骼数据转换为深度空间图像后保存在image中
Vector4 cpoint[20] = { 0 };
CvPoint cvp[20];
for (int j = 0;j < 20;j++)//kinect一共获取20个关节的空间坐标
{
cpoint[j].x = nowFrame.SkeletonData[userid].SkeletonPositions[j].x;
cpoint[j].y = nowFrame.SkeletonData[userid].SkeletonPositions[j].y;
cpoint[j].z = nowFrame.SkeletonData[userid].SkeletonPositions[j].z;
}
float fx, fy;
for (int i = 0;i < 20;i++)
{
NuiTransformSkeletonToDepthImage(cpoint[i], &fx, &fy);
cvp[i].x = (int)fx;
cvp[i].y = (int)fy;
}
drawSkeleton(skeletonimg, cvp);//使用函数drawSkeleton将关节的点连成线
imshow("skeleton", skeletonimg);
if (FAILED(hr))
{
cout << "Could not get next skeleton stream " << endl;
return hr;
}
for (int j = 0;j < NUM_OF_SKELETONS;j++)
{
temp.ptr<float>(frameCounter)[3*j] = nowFrame.SkeletonData[userid].SkeletonPositions[j].x;
temp.ptr<float>(frameCounter)[3 * j+1] = nowFrame.SkeletonData[userid].SkeletonPositions[j].y;
temp.ptr<float>(frameCounter)[3 * j+2] = nowFrame.SkeletonData[userid].SkeletonPositions[j].z;
//cout << "model" + int2str(counter) << " frame" + int2str(frameCounter) << "data is: " << nowFrame.SkeletonData[userid].SkeletonPositions[j].x << nowFrame.SkeletonData[userid].SkeletonPositions[j].y << nowFrame.SkeletonData[userid].SkeletonPositions[j].z << endl;
}
}
frameCounter++;
}
FileStorage fs("monitor_file.xml", FileStorage::APPEND);
fs << "mat_id" + int2str(counter) << temp;
fs.release();
counter++;
break;
}
}
if (counter == 3)
getdata = TRUE;
imshow("colorImage", Mat(480, 640, CV_8UC3, Scalar(255, 64, 255)));
waitKey(1000);
}
NuiShutdown();
return 0;
}
值得一提的是如果直接用WRITE方式连续写三个数据进去,比如:
fs<<”mat_”<< image1<< image2<< image3;
那么读取的时候出现打开了xml文件却读不到任何数据的情况,所以我在外部先把xml用write方式打开,初始化掉,在循环里用append的方式追加写入。
标签:
原文地址:http://blog.csdn.net/daky_u/article/details/51405913