MicroSoft Azure Kinect DK 如何输出人体追踪JSON数据写入本地
1.安装依赖:在Solution Explorer 点击右键 ->Manager Nuget Package For Solution,安装以下3个依赖
Microsoft.Azure.Kinect.BodyTracking
Microsoft.Azure.Kinect.BodyTracking.ONNXRuntime
Microsoft.Azure.Kinect.Sensor
2.在Visual Studio 新建空白C++项目,执行以下代码
常见缩写
k4a:Kinect For Azure
k4abt:Kinect For Azure Body Tracking
3.注意事项:检查内存占用是否持续增加,确保在适当地方释放内存
深度传感器和彩色相机之间矫正:深度传感器和彩色相机之间坐标轴原点不一样,单位也不一样。分别是毫米和像素点。
找不到create_json_string方法请看我博客里面这篇文章:用c写一个函数 传入 keys和values数组,输出json字符串
#include <stdio.h>
#include <stdlib.h>
#include <k4a/k4a.h>
#include <k4abt.h>
#include <iostream>
#include <fstream>
#include "Tools.h"
using namespace std;
#include <map>
const int num_size = 30;
#define VERIFY(result, error) \
if(result != K4A_RESULT_SUCCEEDED) \
{ \
printf("%s \n - (File: %s, Function: %s, Line: %d)\n", error, __FILE__, __FUNCTION__, __LINE__); \
exit(1); \
} \
void writeToFileWithJsonFormat(k4abt_joint_t joint)
{
//printf("positionX:%f", joint.position.xyz.x);
//printf("positionY:%f", joint.position.xyz.y);
//printf("positionZ:%f", joint.position.xyz.z);
//printf("orientationW:%f", joint.orientation.wxyz.w);
//printf("orientationX:%f", joint.orientation.wxyz.x);
//printf("orientationY:%f", joint.orientation.wxyz.y);
//printf("orientationZ:%f", joint.orientation.wxyz.z);
//printf("confidence_level:%d\n", joint.confidence_level);
FILE* fp;
errno_t err;
err = fopen_s(&fp, "C://Users//Joe//Desktop//c#.txt", "w");
if (fp)
{
char json_string[3000] = "[";
char positionX[num_size] = "";
sprintf_s(positionX, "%f", joint.position.xyz.x);
char positionY[num_size] = "";
sprintf_s(positionY, "%f", joint.position.xyz.y);
char positionZ[num_size] = "";
sprintf_s(positionZ, "%f", joint.position.xyz.z);
char orientationW[num_size] = "";
sprintf_s(orientationW, "%f", joint.orientation.wxyz.w);
char orientationX[num_size] = "";
sprintf_s(orientationX, "%f", joint.orientation.wxyz.x);
char orientationY[num_size] = "";
sprintf_s(orientationY, "%f", joint.orientation.wxyz.y);
char orientationZ[num_size] = "";
sprintf_s(orientationZ, "%f", joint.orientation.wxyz.z);
char confidence_level[num_size] = "";
sprintf_s(confidence_level, "%d", joint.confidence_level);
const char* keys[] = { "positionX", "positionY", "positionZ" ,"orientationW","orientationX","orientationY","orientationZ","confidence_level" };
const char* values[] = { positionX, positionY, positionZ,orientationW ,orientationX ,orientationY,orientationZ,confidence_level };
int key_count = sizeof(keys) / sizeof(keys[0]);
char* joint_string = create_json_string(keys, values, key_count);
//strcat_s(json_string, sizeof(json_string), joint_string);
int res = fputs(joint_string, fp);
printf("Json write result:%d %s", res, joint_string);
fclose(fp);
}
}
int main()
{
k4a_device_t device = NULL;
VERIFY(k4a_device_open(0, &device), "Open K4A Device failed!");
// Start camera. Make sure depth camera is enabled.
k4a_device_configuration_t deviceConfig = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
deviceConfig.depth_mode = K4A_DEPTH_MODE_NFOV_UNBINNED;
deviceConfig.color_resolution = K4A_COLOR_RESOLUTION_OFF;
VERIFY(k4a_device_start_cameras(device, &deviceConfig), "Start K4A cameras failed!");
k4a_calibration_t sensor_calibration;
VERIFY(k4a_device_get_calibration(device, deviceConfig.depth_mode, deviceConfig.color_resolution, &sensor_calibration),
"Get depth camera calibration failed!");
k4abt_tracker_t tracker = NULL;
k4abt_tracker_configuration_t tracker_config = K4ABT_TRACKER_CONFIG_DEFAULT;
VERIFY(k4abt_tracker_create(&sensor_calibration, tracker_config, &tracker), "Body tracker initialization failed!");
int frame_count = 0;
do
{
k4a_capture_t sensor_capture;
k4a_wait_result_t get_capture_result = k4a_device_get_capture(device, &sensor_capture, K4A_WAIT_INFINITE);
if (get_capture_result == K4A_WAIT_RESULT_SUCCEEDED)
{
frame_count++;
k4a_wait_result_t queue_capture_result = k4abt_tracker_enqueue_capture(tracker, sensor_capture, K4A_WAIT_INFINITE);
if (queue_capture_result == K4A_WAIT_RESULT_TIMEOUT)
{
// It should never hit timeout when K4A_WAIT_INFINITE is set.
printf("Error! Add capture to tracker process queue timeout!\n");
break;
}
else if (queue_capture_result == K4A_WAIT_RESULT_FAILED)
{
printf("Error! Add capture to tracker process queue failed!\n");
break;
}
k4abt_frame_t body_frame = NULL;
k4a_wait_result_t pop_frame_result = k4abt_tracker_pop_result(tracker, &body_frame, K4A_WAIT_INFINITE);
if (pop_frame_result == K4A_WAIT_RESULT_SUCCEEDED)
{
// Successfully popped the body tracking result. Start your processing
const int num_bodies = k4abt_frame_get_num_bodies(body_frame);
k4abt_joint_t* joints = new k4abt_joint_t[num_bodies];
for (size_t i = 0; i < num_bodies; i++)
{
k4abt_skeleton_t skeleton;
k4abt_frame_get_body_skeleton(body_frame, i, &skeleton);
uint32_t id = k4abt_frame_get_body_id(body_frame, i);
//3代表脖子
k4abt_joint_t joint = skeleton.joints[K4ABT_JOINT_HEAD];
writeToFileWithJsonFormat(joint);
}
printf("%zu bodies are detected!\n", num_bodies);
}
else if (pop_frame_result == K4A_WAIT_RESULT_TIMEOUT)
{
// It should never hit timeout when K4A_WAIT_INFINITE is set.
printf("Error! Pop body frame result timeout!\n");
break;
}
else
{
printf("Pop body frame result failed!\n");
break;
}
k4abt_frame_release(body_frame);
}
else if (get_capture_result == K4A_WAIT_RESULT_TIMEOUT)
{
// It should never hit time out when K4A_WAIT_INFINITE is set.
printf("Error! Get depth frame time out!\n");
break;
}
else
{
printf("Get depth capture returned error: %d\n", get_capture_result);
break;
}
k4a_capture_release(sensor_capture); // Remember to release the sensor capture once you finish using it
} while (frame_count < INT_MAX);
printf("Finished body tracking processing!\n");
k4abt_tracker_shutdown(tracker);
k4abt_tracker_destroy(tracker);
k4a_device_stop_cameras(device);
k4a_device_close(device);
return 0;
}
网友评论