UE接海康摄像头

1.下载工作

1.1 去海康官网下载SDK 海康开放平台 (hikvision.com)

因为我是windows开发.所以下载"设备网络SDK_Win64 V6.1.9.4_build20220412"。

解压之后的文件目录

1.2下载FFmpeg用于转码.

我是github上找的一个UE4 FFmpeg项目. GitHub - whoissunshijia/ue4-ffmpeg: Record game screen and push RTMP in UE4.22


我只用了Plugins目录下的UFFmpeg.

ps 应该是我的UE版本和下载的FFmpeg使用的UE版本不一致.所以有一些函数调用上的不同,根据报错修改一下就好.

需要下载的工作就完成了.

2 制作

2.1 需要在项目的build.cs中引入UFFmpeg插件.

        PublicDependencyModuleNames.AddRange(new string[] { "Core", "CoreUObject", "Engine", "InputCore", "LevelSequence", "UFFmpeg", });

2.2 引入第一步下载的海康SDK库文件目录下的lib文件

我用了GdiPlus.lib HCCore.lib HCNetSDK.lib PlayCtrl.lib,拷贝到自己的UE项目文件下.

在UE项目的build.cs中引入.

            PublicAdditionalLibraries.Add("yourSDKPath/HCCore.lib");
            PublicAdditionalLibraries.Add("yourSDKPath/HCNetSDK.lib");
            PublicAdditionalLibraries.Add("yourSDKPath/PlayCtrl.lib");
            PublicAdditionalLibraries.Add("yourSDKPath/GdiPlus.lib");

build.cs文件就完成了.

// Fill out your copyright notice in the Description page of Project Settings.
using System.IO;
using UnrealBuildTool;
public class UETest_503 : ModuleRules
    public UETest_503(ReadOnlyTargetRules Target) : base(Target)
        PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs;
        PublicDependencyModuleNames.AddRange(new string[] { "Core", "CoreUObject", "Engine", "InputCore", "LevelSequence",
                    "UFFmpeg",
        PrivateDependencyModuleNames.AddRange(new string[] { });
        if (Target.Platform == UnrealTargetPlatform.Win64)
            PublicAdditionalLibraries.Add("yourSDKPath/HCCore.lib");
            PublicAdditionalLibraries.Add("yourSDKPath/HCNetSDK.lib");
            PublicAdditionalLibraries.Add("yourSDKPath/PlayCtrl.lib");
            PublicAdditionalLibraries.Add("yourSDKPath/GdiPlus.lib");
        // Uncomment if you are using Slate UI
        // PrivateDependencyModuleNames.AddRange(new string[] { "Slate", "SlateCore" });
        // Uncomment if you are using online features
        // PrivateDependencyModuleNames.Add("OnlineSubsystem");
        // To include OnlineSubsystemSteam, add it to the plugins section in your uproject file with the Enabled attribute set to true

2.3 引入海康SDK头文件.

3 制作UE Actor显示视频流

3.1 创建HaikangCameraActor 继承Actor

连接摄像头先init,之后playvideo,之后拿到数据,转成texture,之后设置UMG中的Image实时更新

HaikangCameraActor 源码贴出来了.

.h

// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "GameFramework/Actor.h"
#include "C:\DocC\uetest_503\Plugins\UFFmpeg\Source\UFFmpeg\Public\FFmpegDirector.h"
#include "HaikangCameraActor.generated.h"
UCLASS()
class UETEST_503_API AHaikangCameraActor : public AActor
    GENERATED_BODY()
public:
    // Sets default values for this actor's properties
    AHaikangCameraActor();
protected:
    // Called when the game starts or when spawned
    virtual void BeginPlay() override;
    //virtual void BeginDestroy() override;
    UFUNCTION(BlueprintCallable, Category = "AHaikangCameraActor")
        void PlayCamera();
    UFUNCTION(BlueprintImplementableEvent, Category = "AHaikangCameraActor")
        void ChangeTexture(UTexture2D* input_texture);
    void Init();
    void VideoPlay();
public:
    // Called every frame
    virtual void Tick(float DeltaTime) override;
private:
    long IUserID;
    long m_lPlayHandle;
    UINT	m_nDevPort;
};

.cpp

// Fill out your copyright notice in the Description page of Project Settings.
#define WIN32_LEAN_AND_MEAN
#include "HaikangCameraActor.h"
#include "../External/HCNetSDK.h"
#include "../External/PlayM4.h"
#include "Misc/MessageDialog.h"
#include "../Utils/Utils_FunctionLibrary.h"
#include <stdio.h>
#include <iostream>
// Sets default values
AHaikangCameraActor::AHaikangCameraActor()
    // Set this actor to call Tick() every frame.  You can turn this off to improve performance if you don't need it.
    PrimaryActorTick.bCanEverTick = true;
void AHaikangCameraActor::PlayCamera()
    Init();
    VideoPlay();
void AHaikangCameraActor::Init()
    //std::string id;
    //std::string password = "";
    //std::string user = "admin";
    //int port = 8000;
    bool ret_value = NET_DVR_Init();
    int ret_error_num;
    if (ret_value == false)
        ret_error_num = NET_DVR_GetLastError();
        UE_LOG(LogTemp, Warning, TEXT("error number:%d"), ret_error_num);
        return;
    ret_value = NET_DVR_SetConnectTime(2000, 1);
    if (ret_value == false)
        ret_error_num = NET_DVR_GetLastError();
        UE_LOG(LogTemp, Warning, TEXT("error number:%d"), ret_error_num);
        return;
    ret_value = NET_DVR_SetReconnect(10000, true);
    if (ret_value == false)
        ret_error_num = NET_DVR_GetLastError();
        UE_LOG(LogTemp, Warning, TEXT("error number:%d"), ret_error_num);
        return;
    //login_id = NET_DVR_Login_V30(const_cast<char*>(id.c_str()), port, const_cast<char*>(user.c_str()), const_cast<char*>(password.c_str()), &DeviceInfoTmp);
    FString DeviceIp{ "192.168.91.66" };
    FString account{ "admin" };
    FString password{ "" };
    char* ip = TCHAR_TO_ANSI(*DeviceIp);
    m_nDevPort = 8000;
    char* accountstr = TCHAR_TO_ANSI(*account);
    char* passwordstr = TCHAR_TO_ANSI(*password);
    NET_DVR_DEVICEINFO_V30 devInfo;
    IUserID = NET_DVR_Login_V30(ip, m_nDevPort, accountstr, passwordstr, &devInfo);
    if (IUserID < 0) {
        UE_LOG(LogTemp, Warning, TEXT("Login failed"));
    else {
        UE_LOG(LogTemp, Warning, TEXT("Login successfully"));
#undef UpdateResource
FILE* Videofile = NULL;
char filename[100];
int iPicNum = 0;
void CALLBACK g_DecCBFun(long nPort, char* pBuf, long nSize, FRAME_INFO* pFrameInfo, long nReserved1, long nReserved2)
    long lFrameType = pFrameInfo->nType;
    if (lFrameType == T_YV12)
        //if (Videofile == NULL)
        //    sprintf(filename, "VideoYV12.yuv");
        //    Videofile = fopen(filename, "wb");
        //fwrite(pBuf, nSize, 1, Videofile);
        DelegateSingleton::getInstance().pBuf = pBuf;
        std::cout << pBuf << std::endl;
        DelegateSingleton::getInstance().nWidth = pFrameInfo->nWidth;
        DelegateSingleton::getInstance().nHeight = pFrameInfo->nHeight;
        DelegateSingleton::getInstance().lFrameType = pFrameInfo->nType;
static long nGetPort = -1;
void CALLBACK g_RealDataCallBack(LONG IRealHandle, DWORD dwDataType, BYTE* pBuffer, DWORD dwBufSize, void* pUser)
    DWORD dRet = 0;
    BOOL inData = FALSE;
    switch (dwDataType)
    case NET_DVR_SYSHEAD:
        if (!PlayM4_GetPort(&nGetPort))
            break;
        if (!PlayM4_OpenStream(nGetPort, pBuffer, dwBufSize, 1024 * 1024))
            dRet = PlayM4_GetLastError(nGetPort);
            break;
        //设置解码回调函数 只解码不显示
        if (!PlayM4_SetDecCallBack(nGetPort, g_DecCBFun))
            dRet = PlayM4_GetLastError(nGetPort);
            break;
        //设置解码回调函数 解码且显示
        //if (!PlayM4_SetDecCallBackEx(nGetPort, g_DecCBFun, NULL, NULL))
        //    dRet = PlayM4_GetLastError(nGetPort);
        //    break;
        //打开视频解码
        if (!PlayM4_Play(nGetPort, NULL))
            dRet = PlayM4_GetLastError(nGetPort);
            break;
        //打开音频解码, 需要码流是复合流
        if (!PlayM4_PlaySound(nGetPort))
            dRet = PlayM4_GetLastError(nGetPort);
            break;
        break;
    case NET_DVR_STREAMDATA:
        inData = PlayM4_InputData(nGetPort, pBuffer, dwBufSize);
        while (!inData)
            Sleep(10);
            inData = PlayM4_InputData(nGetPort, pBuffer, dwBufSize);
            UE_LOG(LogTemp, Warning, TEXT("PlayM4_InputData failed \n"));
        break;
    default:
        inData = PlayM4_InputData(nGetPort, pBuffer, dwBufSize);
        while (!inData)
            Sleep(10);
            inData = PlayM4_InputData(nGetPort, pBuffer, dwBufSize);
            UE_LOG(LogTemp, Warning, TEXT("PlayM4_InputData failed \n"));
        break;
void AHaikangCameraActor::VideoPlay()
    NET_DVR_CLIENTINFO ClientInfo;
    ClientInfo.lChannel = 1; //Channel number 设备通道号
    ClientInfo.hPlayWnd = NULL;  //窗口为空,设备SDK不解码只取流
    ClientInfo.lLinkMode = 0;    //Main Stream
    ClientInfo.sMultiCastIP = NULL;
    //预览取流 
    m_lPlayHandle = NET_DVR_RealPlay_V30(IUserID, &ClientInfo, g_RealDataCallBack, NULL, TRUE);
    if (m_lPlayHandle < 0)
        UE_LOG(LogTemp, Warning, TEXT("error m_lPlayHandle:%d"), m_lPlayHandle);
        return;
// Called when the game starts or when spawned
void AHaikangCameraActor::BeginPlay()
    Super::BeginPlay();
bool YV12ToBGR32_FFmpeg(unsigned char* pYUV, unsigned char* pBGR24, int width, int height)
    if (width < 1 || height < 1 || pYUV == NULL || pBGR24 == NULL)
        return false;
    AVPicture pFrameYUV, pFrameBGR;
    avpicture_fill(&pFrameYUV, pYUV, (AVPixelFormat)AV_PIX_FMT_YUV420P, width, height);
    //U,V互换
    uint8_t* ptmp = pFrameYUV.data[1];
    pFrameYUV.data[1] = pFrameYUV.data[2];
    pFrameYUV.data[2] = ptmp;
    avpicture_fill(&pFrameBGR, pBGR24, (AVPixelFormat)AV_PIX_FMT_BGRA, width, height);
    struct SwsContext* imgCtx = NULL;
    imgCtx = sws_getContext(width, height, (AVPixelFormat)AV_PIX_FMT_YUV420P, width, height, (AVPixelFormat)AV_PIX_FMT_BGRA, SWS_BILINEAR, 0, 0, 0);
    if (imgCtx != NULL) {
        sws_scale(imgCtx, pFrameYUV.data, pFrameYUV.linesize, 0, height, pFrameBGR.data, pFrameBGR.linesize);
        if (imgCtx) {
            sws_freeContext(imgCtx);
            imgCtx = NULL;
        return true;
    else {
        sws_freeContext(imgCtx);
        imgCtx = NULL;
        return false;
// Called every frame
void AHaikangCameraActor::Tick(float DeltaTime)
    Super::Tick(DeltaTime);
    if (DelegateSingleton::getInstance().lFrameType == T_YV12)
        long nWidth = DelegateSingleton::getInstance().nWidth;
        long nHeight = DelegateSingleton::getInstance().nHeight;
        int sizeRGB = nWidth * nHeight * 4;
        char* pBGR32 = new char[sizeRGB];
        memset(pBGR32, 0, sizeRGB);
        YV12ToBGR32_FFmpeg((unsigned char*)DelegateSingleton::getInstance().pBuf, (unsigned char*)pBGR32, nWidth, nHeight);
        UTexture2D* texture = UTexture2D::CreateTransient(nWidth, nHeight, PF_B8G8R8A8);
        // Fill in the source data from the file
        void* TextureData = texture->PlatformData->Mips[0].BulkData.Lock(LOCK_READ_WRITE);
        FMemory::Memcpy(TextureData, pBGR32, sizeRGB);
        texture->PlatformData->Mips[0].BulkData.Unlock();
        // Update the rendering resource from data.
        texture->UpdateResource();
        ChangeTexture(texture);