基本思想:手中存在一个Kinect的深度相机,需求测试被检测的目标的深度图,逐开始开发,好东西啊,我也想要个,好东西

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_json

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_sed_02


43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_深度学习_03

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_json_04

一、下载sdk安装sdk,sdk是后续写vs代码导入库的关键头文件和库文件:​ ​Azure Kinect 传感器 SDK 下载 | Microsoft Learn​ ​ 测试一下可视化

TCP工具

链接: https://pan.baidu.com/s/1VMSDgsT8vHCwxmiKMLlKrA?pwd=8q94 提取码: 8q94

链接:https://pan.baidu.com/s/1qrtv6RJV1q57tm16NNoaBg?pwd=dvga
提取码:dvga

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_sed_05

二、测试​ ​https://github.com/microsoft/Azure-Kinect-Sensor-SDK/tree/develop/examples​ ​中的demo

设置vs的工程

包含的头文件
C:\Program Files\Azure Kinect SDK v1.4.1\sdk\include
包含的库文件
C:\Program Files\Azure Kinect SDK v1.4.1\sdk\windows-desktop\amd64\release\lib
链接的库
k4a.lib
k4arecord.lib
运行过程中需要使用dll
depthengine_2_0.dll
k4a.dll
k4arecord.dll

测试代码

#include <iostream>
#include <fstream>
#include <string>
#include <iomanip>
#include <vector>
#include <k4a/k4a.h>
using namespace std;

static string get_serial(k4a_device_t device)
{
size_t serial_number_length = 0;

if (K4A_BUFFER_RESULT_TOO_SMALL != k4a_device_get_serialnum(device, NULL, &serial_number_length))
{
cout << "Failed to get serial number length" << endl;
k4a_device_close(device);
exit(-1);
}

char* serial_number = new (std::nothrow) char[serial_number_length];
if (serial_number == NULL)
{
cout << "Failed to allocate memory for serial number (" << serial_number_length << " bytes)" << endl;
k4a_device_close(device);
exit(-1);
}

if (K4A_BUFFER_RESULT_SUCCEEDED != k4a_device_get_serialnum(device, serial_number, &serial_number_length))
{
cout << "Failed to get serial number" << endl;
delete[] serial_number;
serial_number = NULL;
k4a_device_close(device);
exit(-1);
}

string s(serial_number);
delete[] serial_number;
serial_number = NULL;
return s;
}

static void print_calibration()
{
uint32_t device_count = k4a_device_get_installed_count();
cout << "Found " << device_count << " connected devices:" << endl;
cout << fixed << setprecision(6);

for (uint8_t deviceIndex = 0; deviceIndex < device_count; deviceIndex++)
{
k4a_device_t device = NULL;
if (K4A_RESULT_SUCCEEDED != k4a_device_open(deviceIndex, &device))
{
cout << deviceIndex << ": Failed to open device" << endl;
exit(-1);
}

k4a_calibration_t calibration;

k4a_device_configuration_t deviceConfig = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
deviceConfig.color_format = K4A_IMAGE_FORMAT_COLOR_MJPG;
deviceConfig.color_resolution = K4A_COLOR_RESOLUTION_1080P;
deviceConfig.depth_mode = K4A_DEPTH_MODE_NFOV_UNBINNED;
deviceConfig.camera_fps = K4A_FRAMES_PER_SECOND_30;
deviceConfig.wired_sync_mode = K4A_WIRED_SYNC_MODE_STANDALONE;
deviceConfig.synchronized_images_only = true;

// get calibration
if (K4A_RESULT_SUCCEEDED !=
k4a_device_get_calibration(device, deviceConfig.depth_mode, deviceConfig.color_resolution, &calibration))
{
cout << "Failed to get calibration" << endl;
exit(-1);
}

auto calib = calibration.depth_camera_calibration;

cout << "\n===== Device " << (int)deviceIndex << ": " << get_serial(device) << " =====\n";
cout << "resolution width: " << calib.resolution_width << endl;
cout << "resolution height: " << calib.resolution_height << endl;
cout << "principal point x: " << calib.intrinsics.parameters.param.cx << endl;
cout << "principal point y: " << calib.intrinsics.parameters.param.cy << endl;
cout << "focal length x: " << calib.intrinsics.parameters.param.fx << endl;
cout << "focal length y: " << calib.intrinsics.parameters.param.fy << endl;
cout << "radial distortion coefficients:" << endl;
cout << "k1: " << calib.intrinsics.parameters.param.k1 << endl;
cout << "k2: " << calib.intrinsics.parameters.param.k2 << endl;
cout << "k3: " << calib.intrinsics.parameters.param.k3 << endl;
cout << "k4: " << calib.intrinsics.parameters.param.k4 << endl;
cout << "k5: " << calib.intrinsics.parameters.param.k5 << endl;
cout << "k6: " << calib.intrinsics.parameters.param.k6 << endl;
cout << "center of distortion in Z=1 plane, x: " << calib.intrinsics.parameters.param.codx << endl;
cout << "center of distortion in Z=1 plane, y: " << calib.intrinsics.parameters.param.cody << endl;
cout << "tangential distortion coefficient x: " << calib.intrinsics.parameters.param.p1 << endl;
cout << "tangential distortion coefficient y: " << calib.intrinsics.parameters.param.p2 << endl;
cout << "metric radius: " << calib.intrinsics.parameters.param.metric_radius << endl;

k4a_device_close(device);
}
}

static void calibration_blob(uint8_t deviceIndex = 0, string filename = "calibration.json")
{
k4a_device_t device = NULL;

if (K4A_RESULT_SUCCEEDED != k4a_device_open(deviceIndex, &device))
{
cout << deviceIndex << ": Failed to open device" << endl;
exit(-1);
}

size_t calibration_size = 0;
k4a_buffer_result_t buffer_result = k4a_device_get_raw_calibration(device, NULL, &calibration_size);
if (buffer_result == K4A_BUFFER_RESULT_TOO_SMALL)
{
vector<uint8_t> calibration_buffer = vector<uint8_t>(calibration_size);
buffer_result = k4a_device_get_raw_calibration(device, calibration_buffer.data(), &calibration_size);
if (buffer_result == K4A_BUFFER_RESULT_SUCCEEDED)
{
ofstream file(filename, ofstream::binary);
file.write(reinterpret_cast<const char*>(&calibration_buffer[0]), (long)calibration_size);
file.close();
cout << "Calibration blob for device " << (int)deviceIndex << " (serial no. " << get_serial(device)
<< ") is saved to " << filename << endl;
}
else
{
cout << "Failed to get calibration blob" << endl;
exit(-1);
}
}
else
{
cout << "Failed to get calibration blob size" << endl;
exit(-1);
}
}

static void print_usage()
{
cout << "Usage: calibration_info [device_id] [output_file]" << endl;
cout << "Using calibration_info.exe without any command line arguments will display" << endl
<< "calibration info of all connected devices in stdout. If a device_id is given" << endl
<< "(0 for default device), the calibration.json file of that device will be" << endl
<< "saved to the current directory." << endl;
}

int main(int argc, char** argv)
{
if (argc == 1)
{
print_calibration();
}
else if (argc == 2)
{
calibration_blob((uint8_t)atoi(argv[1]), "calibration.json");
}
else if (argc == 3)
{
calibration_blob((uint8_t)atoi(argv[1]), argv[2]);
}
else
{
print_usage();
}

return 0;
}

测试结果

Found 1 connected devices:

===== Device 0: 000102414212 =====
resolution width: 640
resolution height: 576
principal point x: 318.544495
principal point y: 324.605713
focal length x: 504.977936
focal length y: 505.119293
radial distortion coefficients:
k1: 1.385396
k2: 0.693018
k3: 0.031932
k4: 1.721525
k5: 1.104018
k6: 0.179520
center of distortion in Z=1 plane, x: 0.000000
center of distortion in Z=1 plane, y: 0.000000
tangential distortion coefficient x: 0.000076
tangential distortion coefficient y: 0.000153
metric radius: 0.000000

F:\Project1\x64\Release\Project1.exe (进程 3320)已退出,代码为 0。
按任意键关闭此窗口. . .

三、贴一个测试深度图的代码vs2022目录,需要引入一下opencv的库

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_深度学习_06

main.cpp

#include <k4a/k4a.hpp>

#include <fstream>
#include <iostream>
#include <vector>
#include <array>

#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui_c.h>


#include "Pixel.h"
#include "DepthPixel.h"
#include "ImageProperties.h"

using namespace std;
using namespace cv;
using namespace sen;

Rect select;
bool mousedown_flag = false; //鼠标按下的标识符
bool select_flag = false; //选择区域的标识符
Point origin;
Mat frame;


void onMouse(int event, int x, int y, int, void*)
{
//注意onMouse是void类型的,没有返回值!
//为了把这些变量的值传回主函数,这些变量必须设置为全局变量
if (mousedown_flag)
{
select.x = MIN(origin.x, x); //不一定要等鼠标弹起才计算矩形框,而应该在鼠标按下开始到弹起这段时间实时计算所选矩形框
select.y = MIN(origin.y, y);
select.width = abs(x - origin.x); //算矩形宽度和高度
select.height = abs(y - origin.y);
cout << "rect: " << select.x << " " << select.y << " " << select.width << " " << select.height << endl;
}
if (event == CV_EVENT_LBUTTONDOWN)
{
mousedown_flag = true;
select_flag = false;
origin = Point(x, y);
select = Rect(x, y, 0, 0); //这里一定要初始化,宽和高为(0,0)是因为在opencv中Rect矩形框类内的点是包含左上角那个点的,但是不含右下角那个点
}
else if (event == CV_EVENT_LBUTTONUP)
{
mousedown_flag = false;
select_flag = true;
}
}


void ave_depth(Rect& select, const k4a_image_t& depthImage)
{
const int width = k4a_image_get_width_pixels(depthImage);
const int height = k4a_image_get_height_pixels(depthImage);
int bon_w = select.x + select.width;
int bon_h = select.y + select.height;
const uint16_t* depthData = reinterpret_cast<const uint16_t*>(k4a_image_get_buffer(depthImage));
unsigned long sum_depth = 0;
if (select.y != bon_h && select.x != bon_w) {
for (int h = select.y; h <= bon_h; ++h)
{
for (int w = select.x; w <= bon_w; ++w)
{
const size_t currentPixel = static_cast<size_t>(h * width + w);
sum_depth += (unsigned long)depthData[currentPixel];
if (h == bon_h && w == bon_w) {
long points_num = (select.width + 1) * (select.height + 1);
long ave_depth = sum_depth / points_num;
cout << ave_depth << endl;
}
}
}
}

}


int main(int argc, char** argv)
{
const uint32_t deviceCount = k4a_device_get_installed_count();
if (deviceCount == 0)
{
cout << "no azure kinect devices detected!" << endl;
}

k4a_device_configuration_t config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
config.camera_fps = K4A_FRAMES_PER_SECOND_15;
config.depth_mode = K4A_DEPTH_MODE_NFOV_UNBINNED;
config.color_format = K4A_IMAGE_FORMAT_COLOR_BGRA32;
config.color_resolution = K4A_COLOR_RESOLUTION_720P;
config.synchronized_images_only = true;

cout << "Started opening K4A device..." << endl;
k4a_device_t device = nullptr;
k4a_device_open(0, &device);
k4a_device_start_cameras(device, &config);
cout << "Finished opening K4A device." << endl;

std::vector<Pixel> depthTextureBuffer;

k4a_capture_t capture;

k4a_image_t depthImage;

cv::Mat depthFrame;

namedWindow("kinect depth map master", 1);
setMouseCallback("kinect depth map master", onMouse, 0);

while (1)
{
if (k4a_device_get_capture(device, &capture, K4A_WAIT_INFINITE) == K4A_WAIT_RESULT_SUCCEEDED)
{
{
depthImage = k4a_capture_get_depth_image(capture);

ColorizeDepthImage(depthImage, DepthPixelColorizer::ColorizeBlueToRed, GetDepthModeRange(config.depth_mode), &depthTextureBuffer);

depthFrame = cv::Mat(k4a_image_get_height_pixels(depthImage), k4a_image_get_width_pixels(depthImage), CV_8UC4, depthTextureBuffer.data());

//画出矩形框
rectangle(depthFrame, select, Scalar(0, 0, 255), 1, 8, 0);//能够实时显示在画矩形窗口时的痕迹
ave_depth(select, depthImage);//计算框选区域的平均深度

cv::imshow("kinect depth map master", depthFrame);

k4a_image_release(depthImage);
}

}
if (waitKey(30) == 27 || waitKey(30) == 'q')
{
k4a_capture_release(capture);
k4a_device_close(device);
break;
}
}
return 0;
}

ImageProperties.cpp

#include <cmath>
#include<iostream>

#include "ImageProperties.h"

void sen::ColorizeDepthImage(const k4a::image& depthImage,
DepthPixelVisualizationFunction visualizationFn,
std::pair<uint16_t, uint16_t> expectedValueRange,
std::vector<Pixel>* buffer)
{
const k4a_image_format_t imageFormat = depthImage.get_format();
if (imageFormat != K4A_IMAGE_FORMAT_DEPTH16 && imageFormat != K4A_IMAGE_FORMAT_IR16)

{
throw std::logic_error("Attempted to colorize a non-depth image!");
}

const int width = depthImage.get_width_pixels();
const int height = depthImage.get_height_pixels();

buffer->resize(static_cast<size_t>(width * height));

const uint16_t* depthData = reinterpret_cast<const uint16_t*>(depthImage.get_buffer());
for (int h = 0; h < height; ++h)
{
for (int w = 0; w < width; ++w)
{
const size_t currentPixel = static_cast<size_t>(h * width + w);
(*buffer)[currentPixel] = visualizationFn(depthData[currentPixel],
expectedValueRange.first,
expectedValueRange.second);
}
}
}

DepthPixel.h

#pragma once
#include <algorithm>

#include "Pixel.h"
#include "ImageProperties.h"

namespace sen
{
// Functions that provide ways to take depth images and turn them into color representations
// suitable for showing to humans.
//
class DepthPixelColorizer
{
public:
// Computes a color representation of a depth pixel on the blue-red spectrum, using min
// as the value for blue and max as the value for red.
//
static inline Pixel ColorizeBlueToRed(const DepthPixel& depthPixel,
const DepthPixel& min,
const DepthPixel& max)
{
constexpr uint8_t PixelMax = std::numeric_limits<uint8_t>::max();

// Default to opaque black.
//
Pixel result = { uint8_t(0), uint8_t(0), uint8_t(0), PixelMax };

// If the pixel is actual zero and not just below the min value, make it black
//
if (depthPixel == 0)
{
return result;
}

uint16_t clampedValue = depthPixel;
clampedValue = std::min(clampedValue, max);
clampedValue = std::max(clampedValue, min);

// Normalize to [0, 1]
//
float hue = (clampedValue - min) / static_cast<float>(max - min);

// The 'hue' coordinate in HSV is a polar coordinate, so it 'wraps'.
// Purple starts after blue and is close enough to red to be a bit unclear,
// so we want to go from blue to red. Purple starts around .6666667,
// so we want to normalize to [0, .6666667].
//
constexpr float range = 2.f / 3.f;
hue *= range;

// We want blue to be close and red to be far, so we need to reflect the
// hue across the middle of the range.
//
hue = range - hue;

float fRed = 0.f;
float fGreen = 0.f;
float fBlue = 0.f;
ColorConvertHSVtoRGB(hue, 1.f, 1.f, fRed, fGreen, fBlue);

result.Red = static_cast<uint8_t>(fRed * PixelMax);
result.Green = static_cast<uint8_t>(fGreen * PixelMax);
result.Blue = static_cast<uint8_t>(fBlue * PixelMax);

return result;
}

};
}

Pixel.h

#pragma once
#include <cmath>

// Helper structs/typedefs to cast buffers to
//
namespace sen
{
struct Pixel
{
uint8_t Blue;
uint8_t Green;
uint8_t Red;
uint8_t Alpha;
};

using DepthPixel = uint16_t;
}

ImageProperties.h

#pragma once
#include <utility>
#include <k4a/k4a.hpp>

#include "Pixel.h"

namespace sen
{
inline void ColorConvertHSVtoRGB(float h, float s, float v, float& out_r, float& out_g, float& out_b)
{
if (s == 0.0f)
{
// gray
out_r = out_g = out_b = v;
return;
}

h = fmodf(h, 1.0f) / (60.0f / 360.0f);
int i = (int)h;
float f = h - (float)i;
float p = v * (1.0f - s);
float q = v * (1.0f - s * f);
float t = v * (1.0f - s * (1.0f - f));

switch (i)
{
case 0: out_r = v; out_g = t; out_b = p; break;
case 1: out_r = q; out_g = v; out_b = p; break;
case 2: out_r = p; out_g = v; out_b = t; break;
case 3: out_r = p; out_g = q; out_b = v; break;
case 4: out_r = t; out_g = p; out_b = v; break;
case 5: default: out_r = v; out_g = p; out_b = q; break;
}
}

inline std::pair<uint16_t, uint16_t> GetDepthModeRange(const k4a_depth_mode_t depthMode)
{
switch (depthMode)
{
case K4A_DEPTH_MODE_NFOV_2X2BINNED:
return { (uint16_t)500, (uint16_t)5800 };
case K4A_DEPTH_MODE_NFOV_UNBINNED:
return { (uint16_t)500, (uint16_t)4000 };
case K4A_DEPTH_MODE_WFOV_2X2BINNED:
return { (uint16_t)250, (uint16_t)3000 };
case K4A_DEPTH_MODE_WFOV_UNBINNED:
return { (uint16_t)250, (uint16_t)2500 };

case K4A_DEPTH_MODE_PASSIVE_IR:
default:
throw std::logic_error("Invalid depth mode!");
}
}


using DepthPixelVisualizationFunction = Pixel(const DepthPixel& value, const DepthPixel& min, const DepthPixel& max);

void ColorizeDepthImage(const k4a::image& depthImage,
DepthPixelVisualizationFunction visualizationFn,
std::pair<uint16_t, uint16_t> expectedValueRange,
std::vector<Pixel>* buffer);
}

测试结果,以图标在目标图上划定矩形区域,输出距离值

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_#include_07

四、增加TCP数据传输

更新main.cpp文件代码

#include <k4a/k4a.hpp>

#include <fstream>
#include <iostream>
#include <vector>
#include <array>

#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui_c.h>


#include "Pixel.h"
#include "DepthPixel.h"
#include "ImageProperties.h"


#include<iostream>
#include<WinSock2.h>
#pragma comment(lib, "ws2_32.lib")
#define CONNECT_NUM_MAX 10




using namespace cv;
using namespace sen;

//Rect select;
bool mousedown_flag = false; //鼠标按下的标识符
bool select_flag = false; //选择区域的标识符
Point origin;
Mat frame;




int ave_depth(Rect& select, const k4a_image_t& depthImage)
{
int frame_depth = 0;
const int width = k4a_image_get_width_pixels(depthImage);
const int height = k4a_image_get_height_pixels(depthImage);
int bon_w = select.x + select.width;
int bon_h = select.y + select.height;
const uint16_t* depthData = reinterpret_cast<const uint16_t*>(k4a_image_get_buffer(depthImage));
unsigned long sum_depth = 0;
if (select.y != bon_h && select.x != bon_w) {
for (int h = select.y; h <= bon_h; ++h)
{
for (int w = select.x; w <= bon_w; ++w)
{
const size_t currentPixel = static_cast<size_t>(h * width + w);
sum_depth += (unsigned long)depthData[currentPixel];
if (h == bon_h && w == bon_w) {
long points_num = (select.width + 1) * (select.height + 1);
long ave_depth = sum_depth / points_num;
std::cout << ave_depth << std::endl;
frame_depth = ave_depth;
}
}
}
}
return frame_depth;
}


int main(int argc, char** argv)
{

//加载套接字库
WSADATA wsaData;
int iRet = 0;
iRet = WSAStartup(MAKEWORD(2, 2), &wsaData);
if (iRet != 0)
{
std::cout << "WSAStartup(MAKEWORD(2, 2), &wsaData) execute failed!" << std::endl;;
return -1;
}
if (2 != LOBYTE(wsaData.wVersion) || 2 != HIBYTE(wsaData.wVersion))
{
WSACleanup();
std::cout << "WSADATA version is not correct!" << std::endl;
return -1;
}

//创建套接字
SOCKET serverSocket = socket(AF_INET, SOCK_STREAM, 0);
if (serverSocket == INVALID_SOCKET)
{
std::cout << "serverSocket = socket(AF_INET, SOCK_STREAM, 0) execute failed!" << std::endl;
return -1;
}

//初始化服务器地址族变量
SOCKADDR_IN addrSrv;
addrSrv.sin_addr.S_un.S_addr = htonl(INADDR_ANY);
addrSrv.sin_family = AF_INET;
addrSrv.sin_port = htons(8888);

//绑定
iRet = ::bind(serverSocket, (SOCKADDR*)&addrSrv, sizeof(SOCKADDR));
if (iRet == SOCKET_ERROR)
{
std::cout << "bind(serverSocket, (SOCKADDR*)&addrSrv, sizeof(SOCKADDR)) execute failed!" << std::endl;
return -1;
}

//监听
iRet = listen(serverSocket, CONNECT_NUM_MAX);
if (iRet == SOCKET_ERROR)
{
std::cout << "listen(serverSocket, 10) execute failed!" << std::endl;
return -1;
}

//等待连接_接收_发送
SOCKADDR_IN clientAddr;
int len = sizeof(SOCKADDR);
SOCKET connSocket = accept(serverSocket, (SOCKADDR*)&clientAddr, &len);
if (connSocket == INVALID_SOCKET)
{
std::cout << "accept(serverSocket, (SOCKADDR*)&clientAddr, &len) execute failed!" << std::endl;
return -1;
}


const uint32_t deviceCount = k4a_device_get_installed_count();
if (deviceCount == 0)
{
std::cout << "no azure kinect devices detected!" << std::endl;
}

k4a_device_configuration_t config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
config.camera_fps = K4A_FRAMES_PER_SECOND_15;
config.depth_mode = K4A_DEPTH_MODE_NFOV_UNBINNED;
config.color_format = K4A_IMAGE_FORMAT_COLOR_BGRA32;
config.color_resolution = K4A_COLOR_RESOLUTION_720P;
config.synchronized_images_only = true;

std::cout << "Started opening K4A device..." << std::endl;
k4a_device_t device = nullptr;
k4a_device_open(0, &device);
k4a_device_start_cameras(device, &config);
std::cout << "Finished opening K4A device." << std::endl;

std::vector<Pixel> depthTextureBuffer;

k4a_capture_t capture;

k4a_image_t depthImage;

cv::Mat depthFrame;

namedWindow("kinect depth map master", 1);
//setMouseCallback("kinect depth map master", onMouse, 0);
//select = Rect(50, 50, 0, 0);

while (1)
{
if (k4a_device_get_capture(device, &capture, K4A_WAIT_INFINITE) == K4A_WAIT_RESULT_SUCCEEDED)
{
{
depthImage = k4a_capture_get_depth_image(capture);

ColorizeDepthImage(depthImage, DepthPixelColorizer::ColorizeBlueToRed, GetDepthModeRange(config.depth_mode), &depthTextureBuffer);

depthFrame = cv::Mat(k4a_image_get_height_pixels(depthImage), k4a_image_get_width_pixels(depthImage), CV_8UC4, depthTextureBuffer.data());

//画出矩形框
Rect new_select = Rect(depthFrame.cols / 3, depthFrame.rows / 3, depthFrame.cols / 4, depthFrame.rows / 4);
rectangle(depthFrame, new_select, Scalar(0, 0, 255), 1, 8, 0);//能够实时显示在画矩形窗口时的痕迹
int frame_depth=ave_depth(new_select, depthImage);//计算框选区域的平均深度




char sendBuf[100];
sprintf_s(sendBuf, " %d", frame_depth);
send(connSocket, sendBuf, strlen(sendBuf) + 1, 0);

char recvBuf[100];
recv(connSocket, recvBuf, 100, 0);
printf("%s\n", recvBuf);



cv::imshow("kinect depth map master", depthFrame);

k4a_image_release(depthImage);
}

}
if (waitKey(30) == 27 || waitKey(30) == 'q')
{
k4a_capture_release(capture);
k4a_device_close(device);
break;
}
}
closesocket(connSocket);


return 0;
}

使用tcp客户端测试软件

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_json_08

代码修改tcp服务端和客户端

服务端

#include<iostream>
#include<WinSock2.h>
#pragma comment(lib, "ws2_32.lib")
#define CONNECT_NUM_MAX 10

using namespace std;

int main()
{
//加载套接字库
WSADATA wsaData;
int iRet = 0;
iRet = WSAStartup(MAKEWORD(2, 2), &wsaData);
if (iRet != 0)
{
cout << "WSAStartup(MAKEWORD(2, 2), &wsaData) execute failed!" << endl;;
return -1;
}
if (2 != LOBYTE(wsaData.wVersion) || 2 != HIBYTE(wsaData.wVersion))
{
WSACleanup();
cout << "WSADATA version is not correct!" << endl;
return -1;
}

//创建套接字
SOCKET serverSocket = socket(AF_INET, SOCK_STREAM, 0);
if (serverSocket == INVALID_SOCKET)
{
cout << "serverSocket = socket(AF_INET, SOCK_STREAM, 0) execute failed!" << endl;
return -1;
}

//初始化服务器地址族变量
SOCKADDR_IN addrSrv;
addrSrv.sin_addr.S_un.S_addr = htonl(INADDR_ANY);
addrSrv.sin_family = AF_INET;
addrSrv.sin_port = htons(6000);

//绑定
iRet = bind(serverSocket, (SOCKADDR*)&addrSrv, sizeof(SOCKADDR));
if (iRet == SOCKET_ERROR)
{
cout << "bind(serverSocket, (SOCKADDR*)&addrSrv, sizeof(SOCKADDR)) execute failed!" << endl;
return -1;
}

//监听
iRet = listen(serverSocket, CONNECT_NUM_MAX);
if (iRet == SOCKET_ERROR)
{
cout << "listen(serverSocket, 10) execute failed!" << endl;
return -1;
}

//等待连接_接收_发送
SOCKADDR_IN clientAddr;
int len = sizeof(SOCKADDR);
while (1)
{
SOCKET connSocket = accept(serverSocket, (SOCKADDR*)&clientAddr, &len);
if (connSocket == INVALID_SOCKET)
{
cout << "accept(serverSocket, (SOCKADDR*)&clientAddr, &len) execute failed!" << endl;
return -1;
}

char sendBuf[100];
sprintf_s(sendBuf, "Welcome %s", inet_ntoa(clientAddr.sin_addr));
send(connSocket, sendBuf, strlen(sendBuf)+1, 0);

char recvBuf[100];
recv(connSocket, recvBuf, 100, 0);
printf("%s\n", recvBuf);
closesocket(connSocket);
}

system("pause");
return 0;
}

客户端

#include<iostream>
#include <winsock2.h>
using namespace std;

#pragma comment(lib, "ws2_32.lib")
int main()
{
//加载套接字库
WSADATA wsaData;
int iRet =0;
iRet = WSAStartup(MAKEWORD(2, 2), &wsaData);
if (iRet != 0)
{
cout << "WSAStartup(MAKEWORD(2, 2), &wsaData) execute failed!" << endl;
return -1;
}
if (2 != LOBYTE(wsaData.wVersion) || 2 != HIBYTE(wsaData.wVersion))
{
WSACleanup();
cout << "WSADATA version is not correct!" << endl;
return -1;
}

//创建套接字
SOCKET clientSocket = socket(AF_INET, SOCK_STREAM, 0);
if (clientSocket == INVALID_SOCKET)
{
cout << "clientSocket = socket(AF_INET, SOCK_STREAM, 0) execute failed!" << endl;
return -1;
}

//初始化服务器端地址族变量
SOCKADDR_IN srvAddr;
srvAddr.sin_addr.S_un.S_addr = inet_addr("127.0.0.1");
srvAddr.sin_family = AF_INET;
srvAddr.sin_port = htons(6000);

//连接服务器
iRet = connect(clientSocket, (SOCKADDR*)&srvAddr, sizeof(SOCKADDR));
if (0 != iRet)
{
cout << "connect(clientSocket, (SOCKADDR*)&srvAddr, sizeof(SOCKADDR)) execute failed!" << endl;
return -1;
}

//接收消息
char recvBuf[100];
recv(clientSocket, recvBuf, 100, 0);
printf("%s\n", recvBuf);

//发送消息
char sendBuf[100];
sprintf_s(sendBuf, "Hello, This is client %s", "兔子");
send(clientSocket, sendBuf, strlen(sendBuf)+1, 0);

//清理
closesocket(clientSocket);
WSACleanup();

system("pause");
return 0;
}

环境配置

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_深度学习_09

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_json_10

测试过程开启串口软件当做客户端,绑定端口,先开启服务端vs执行,在绑定端口,设置自动发送

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_深度学习_11

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_#include_12

测试结果

43、Kinect 深度相机初体验和vs2022测距输出(增加TCP传输)_sed_13

参考:

​https://github.com/Microsoft/Azure-Kinect-Sensor-SDK​

​Azure Kinect DK 文档 | Microsoft Learn​

​c++ Windows Socket实现最简单的C/S网络通信(TCP)​