由于公司做了一个展馆项目,甲方要在程序内接入监控视频,而且是接入600个,果断没有头绪,好在ue4给出了官方实列,再开几个线程就 le.废话不多说直接上代码。
先把OpenCV的插件接上去,接其他应用SDK下篇文章介绍。
在Actor使用的方法.
如果使用主线程解析视频的话,进程启动会卡一段时间,一个监控有时候就会卡顿半分钟,六个会更卡,所以果断开了六个线程.
UFUNCTION(BlueprintCallable, Category = Webcam)
void OpenWebCamera(TArray< FString> urlAr);
UFUNCTION(BlueprintCallable, Category = Webcam)
void OpenLocalCamera(TArray< int32> ID);
UFUNCTION(BlueprintCallable, Category = Webcam)
void CloseCamera();
TArray< RSTDataHandle*>Camer1RSTDataHandleArr;
TArray<FRunnableThread*> m_RecvThreadArr;
virtual void BeginDestroy()override;
//根据RSTP连接的数量创建线程
gamezaivoid ACamReader::OpenWebCamera(TArray< FString> urlAr)
{
for (int32 i = 0; i < 6; i++)
{
RSTDataHandle*tempSTDataHandle = new RSTDataHandle();
Camer1RSTDataHandleArr.Add(tempSTDataHandle);
FRunnableThread *tempRunnableThread = FRunnableThread::Create(new FReceiveThread(tempSTDataHandle, urlAr[i]), TEXT("RecvThread"), 128 * 1024, TPri_AboveNormal, FPlatformAffinity::GetPoolThreadMask());
m_RecvThreadArr.Add(tempRunnableThread);
}
}
void ACamReader::OpenLocalCamera(TArray< int32> IDArr)
{
if (Camer1RSTDataHandleArr.Num() > 0)
{
for (int32 i = 0; i < Camer1RSTDataHandleArr.Num(); i++)
{
Camer1RSTDataHandleArr[i]->OpenLocalCamera(IDArr[i]);
}
}
}
void ACamReader::CloseCamera()
{
if (Camer1RSTDataHandleArr.Num() > 0)
{
for (int32 i = 0; i < Camer1RSTDataHandleArr.Num(); i++)
{
Camer1RSTDataHandleArr[i]->CloseCamera();
}
}
//
}
//获取取得的视频图片
UTexture2D* ACamReader::GetTexture2D(int Index)
{
if (Camer1RSTDataHandleArr.Num() > 0)
{
for (int32 i = 0; i < Camer1RSTDataHandleArr.Num(); i++)
{
if (Index==i)
{
if (Camer1RSTDataHandleArr[i]->GetThisUTexture2D())
{
return Camer1RSTDataHandleArr[i]->GetThisUTexture2D();
}
}
}
}
return nullptr;
}
TArray<UTexture2D*> ACamReader::GetAllTexture2D()
{
if (Camer1RSTDataHandleArr.Num() > 0)
{
for (int32 i = 0; i < Camer1RSTDataHandleArr.Num(); i++)
{
if (Camer1RSTDataHandleArr[i]->GetThisUTexture2D())
{
CurrenTextureArr.Add(Camer1RSTDataHandleArr[i]->GetThisUTexture2D());
}
}
}
return CurrenTextureArr;
}
void AWebcamReader::InitStream()
{
if (Camer1RSTDataHandleArr.Num() > 0)
{
for (int32 i = 0; i < Camer1RSTDataHandleArr.Num(); i++)
{
Camer1RSTDataHandleArr[i]->InitStream();
}
}
}
//这个地方在Editor里面运行会有随机崩溃,最好用宏定义在debuggame下测试,这里用的是老代码就不占了.
void AWebcamReader::BeginDestroy()
{
Super::BeginDestroy();
if (Camer1RSTDataHandleArr.Num() > 0)
{
for (int32 i = 0; i < Camer1RSTDataHandleArr.Num(); i++)
{
delete Camer1RSTDataHandleArr[i];
Camer1RSTDataHandleArr[i] = nullptr;
}
}
Camer1RSTDataHandleArr.Reset();
if (m_RecvThreadArr.Num() > 0)
{
for (int32 i = 0; i < m_RecvThreadArr.Num(); i++)
{
m_RecvThreadArr[i]->Kill(true);
delete m_RecvThreadArr[i];
m_RecvThreadArr[i] = nullptr;
}
}
m_RecvThreadArr.Reset();
}
4.下面看一下在线程里干了什么事.
//构造一个数据类
FReceiveThread::FReceiveThread(RSTDataHandle *sTDataHandle, FString url):STDataHandle(sTDataHandle),URL(url)
{
}
FReceiveThread::~FReceiveThread()
{
stopping = true;
delete STDataHandle;
STDataHandle = nullptr;
}
bool FReceiveThread::Init()
{
stopping = false;
return true;
}
//更新数据
uint32 FReceiveThread::Run()
{
STDataHandle->OpenWebCamera(URL);
//接收数据包
while (!stopping) //线程计数器控制
{
STDataHandle->Tick(0.01f);
//Sleep一下吧,要不然占用CPU过高,
FPlatformProcess::Sleep(0.01f);
}
return 1;
}
5.线程里干的事很简单吧,下面看一下数据类里面的数据怎么处理的.
E数据// Fill out your copyright notice in the Description page of Project Settings.
#include "RSTDataHandle.h"
RSTDataHandle::RSTDataHandle()
{
VideoTexture = nullptr;
CameraID = 0;
isStreamOpen = false;
VideoSize = FVector2D(0, 0);
ShouldResize = false;
ResizeDeminsions = FVector2D(320, 240);
stream = cv::VideoCapture();
frame = cv::Mat();
Mutex = new FCriticalSection();
}
RSTDataHandle::~RSTDataHandle()
{
}
//够简单吧,就干这么多事,其他的数据都是UE官方插件做好的,你只负责调用即可.
void RSTDataHandle::Tick(float DeltaTime)
{
UpdateFrame();
DoProcessing();
UpdateTexture();
OnNextVideoFrame();
}
void RSTDataHandle::OnNextVideoFrame()
{
}
void RSTDataHandle::UpdateFrame()
{
if (stream.isOpened())
{
stream.read(frame);
if (ShouldResize)
{
cv::resize(frame, frame, size);
}
}
else {
isStreamOpen = false;
}
}
void RSTDataHandle::DoProcessing()
{
}
void RSTDataHandle::UpdateTexture()
{
if (isStreamOpen && frame.data)
{
// Copy Mat data to Data array
for (int y = 0; y < VideoSize.Y; y++)
{
for (int x = 0; x < VideoSize.X; x++)
{
int i = x + (y * VideoSize.X);
Data[i].B = frame.data[i * 3 + 0];
Data[i].G = frame.data[i * 3 + 1];
Data[i].R = frame.data[i * 3 + 2];
}
}
// Update texture 2D
UpdateTextureRegions(VideoTexture, (int32)0, (uint32)1, VideoUpdateTextureRegion, (uint32)(4 * VideoSize.X), (uint32)4, (uint8*)Data.GetData(), false);
}
}
void RSTDataHandle::OpenWebCamera(FString url)
{
stream = cv::VideoCapture(TCHAR2STRING(url.GetCharArray().GetData()));
if (stream.isOpened())
{
InitStream();
UE_LOG(LogTemp, Warning, TEXT("stream.OpenSucess"));
}
else
{
OpenWebCamera(url);
UE_LOG(LogTemp, Warning, TEXT("stream.isOpened()"));
}
}
void RSTDataHandle::OpenLocalCamera(int32 ID)
{
CameraID = ID;
stream.open(CameraID);
InitStream();
}
void RSTDataHandle::CloseCamera()
{
if (stream.isOpened()) {
stream.release();
}
}
UTexture2D* RSTDataHandle::GetThisUTexture2D()
{
FScopeLock ScopeLock(Mutex);
return VideoTexture;
}
void RSTDataHandle::InitStream()
{
if (stream.isOpened() && !isStreamOpen)
{
// Initialize stream
isStreamOpen = true;
UpdateFrame();
VideoSize = FVector2D(frame.cols, frame.rows);
size = cv::Size(ResizeDeminsions.X, ResizeDeminsions.Y);
VideoTexture = UTexture2D::CreateTransient(VideoSize.X, VideoSize.Y);
VideoTexture->UpdateResource();
VideoUpdateTextureRegion = new FUpdateTextureRegion2D(0, 0, 0, 0, VideoSize.X, VideoSize.Y);
// Initialize data array
Data.Init(FColor(0, 0, 0, 255), VideoSize.X * VideoSize.Y);
// Do first frame
DoProcessing();
UpdateTexture();
OnNextVideoFrame();
}
}
void RSTDataHandle::UpdateTextureRegions(UTexture2D* Texture, int32 MipIndex, uint32 NumRegions, FUpdateTextureRegion2D* Regions, uint32 SrcPitch, uint32 SrcBpp, uint8* SrcData, bool bFreeData)
{
if (Texture->Resource)
{
struct FUpdateTextureRegionsData
{
FTexture2DResource* Texture2DResource;
int32 MipIndex;
uint32 NumRegions;
FUpdateTextureRegion2D* Regions;
uint32 SrcPitch;
uint32 SrcBpp;
uint8* SrcData;
};
FUpdateTextureRegionsData* RegionData = new FUpdateTextureRegionsData;
RegionData->Texture2DResource = (FTexture2DResource*)Texture->Resource;
RegionData->MipIndex = MipIndex;
RegionData->NumRegions = NumRegions;
RegionData->Regions = Regions;
RegionData->SrcPitch = SrcPitch;
RegionData->SrcBpp = SrcBpp;
RegionData->SrcData = SrcData;
ENQUEUE_UNIQUE_RENDER_COMMAND_TWOPARAMETER(
UpdateTextureRegionsData,
FUpdateTextureRegionsData*, RegionData, RegionData,
bool, bFreeData, bFreeData,
{
for (uint32 RegionIndex = 0; RegionIndex < RegionData->NumRegions; ++RegionIndex)
{
int32 CurrentFirstMip = RegionData->Texture2DResource->GetCurrentFirstMip();
if (RegionData->MipIndex >= CurrentFirstMip)
{
RHIUpdateTexture2D(
RegionData->Texture2DResource->GetTexture2DRHI(),
RegionData->MipIndex - CurrentFirstMip,
RegionData->Regions[RegionIndex],
RegionData->SrcPitch,
RegionData->SrcData
+ RegionData->Regions[RegionIndex].SrcY * RegionData->SrcPitch
+ RegionData->Regions[RegionIndex].SrcX * RegionData->SrcBpp
);
}
}
if (bFreeData)
{
FMemory::Free(RegionData->Regions);
FMemory::Free(RegionData->SrcData);
}
delete RegionData;
});
}
}
6.接监控也并不难吧,只是里面的细节比较多.这里就不上监控的图细聊了.
7.还有如果监控那边设置的分辨率过高,用的是主码, ,码率过高,的话接多个监控CPU直接100,电脑会很卡的.当时测试一个监控,后来测试6个,电脑卡的直接要关机了,以为自己的代码有问题,纠结了很久才知道是监控设置的问题.如果不要求视频是高清的,同时接六个一点都不比VLC效果差,还有一个坑,一个监控可能会有5路或者六路的限制.