创建c++项目,项目build.cs中开启模块:

// Copyright Epic Games, Inc. All Rights Reserved.

using UnrealBuildTool;

public class OpencvT : ModuleRules
{
	public OpencvT(ReadOnlyTargetRules Target) : base(Target)
	{
		PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs;
	
		PublicDependencyModuleNames.AddRange(new string[] 
		{ 
			"Core", 
			"CoreUObject", 
			"Engine", 
			"InputCore", 
			"EnhancedInput",
			"OpenCV",
			"OpenCVHelper",
			"UMG",
			"Slate",
			"SlateCore",
			"ImageWrapper" 
		});

		PrivateDependencyModuleNames.AddRange(new string[] {  });

	}
}

编译器中创建userwidget的c++类,BP_userwidget的蓝图用户界面,BP_userwidget中添加image小部件,在userwidget.h中添加绑定image,userwidget.cpp中不用添加其他代码。

// Fill out your copyright notice in the Description page of Project Settings.

#pragma once

#include "CoreMinimal.h"
#include "Blueprint/UserWidget.h"
#include "Components/Image.h"
#include "MyUserWidget.generated.h"

/**
 * 
 */
UCLASS()
class OPENCVT_API UMyUserWidget : public UUserWidget
{
	GENERATED_BODY()

	UMyUserWidget(const FObjectInitializer& ObjectInitializer);
	
	virtual void NativeConstruct() override;
	
public:
	UPROPERTY(BlueprintReadWrite, meta = (BindWidget))
	UImage* cv_Image;
};

创建actor c++类,添加头文件,创建userwidget实例,用来设置绑定的image,创建bp_用户界面实例,添加到视口,将actor类放入场景中,运行。

需要注意:cv加载数据为空或不正确时会引起崩溃

// Fill out your copyright notice in the Description page of Project Settings.

#pragma once

#include "CoreMinimal.h"
#include "GameFramework/Actor.h"
#include "MyOpenCVClass.generated.h"

UCLASS()
class OPENCVT_API AMyOpenCVClass : public AActor
{
	GENERATED_BODY()

public:
	// Sets default values for this actor's properties
	AMyOpenCVClass();

protected:
	// Called when the game starts or when spawned
	virtual void BeginPlay() override;

public:
	// Called every frame
	virtual void Tick(float DeltaTime) override;

	UPROPERTY(EditAnywhere)
	UMaterialInterface* BaseMaterial;

	UPROPERTY(BlueprintReadOnly,VisibleAnywhere)
	UTexture2D* test2d;

    //这里需要在编辑器内将bp_用户界面添加到NewWidgetBlueprint
	UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "UI")
	TSubclassOf<UUserWidget> NewWidgetBlueprint;
};
#include "OpencvT/Public/MyOpenCVClass.h"

#if WITH_OPENCV
#include "PreOpenCVHeaders.h"
#include "opencv2/opencv.hpp"
#include "PostOpenCVHeaders.h"
#endif

//UpdateResource()需要的头文件
#include "ImageWrapperHelper.h"

#include "MyUserWidget.h"

// Sets default values
AMyOpenCVClass::AMyOpenCVClass()
{
	PrimaryActorTick.bCanEverTick = true;
}

// Called when the game starts or when spawned
void AMyOpenCVClass::BeginPlay()
{
	Super::BeginPlay();

	FString filepath = TEXT("d:/abc.png");
	UE_LOG(LogTemp, Warning, TEXT("Trying to load image from: %s"), *filepath);

	// 使用 OpenCV 读取图像 (确保读取为 BGR 格式)
	cv::Mat img = cv::imread(TCHAR_TO_UTF8(*filepath));
	if (img.empty())
	{
		UE_LOG(LogTemp, Warning, TEXT("Failed to load image from: %s"), *filepath);
		return;
	}
	UE_LOG(LogTemp, Warning, TEXT("Successfully loaded image from: %s"), *filepath);

	// 将 BGR 图像转换为 BGRA 格式
	cv::Mat imgBGRA;
	cv::cvtColor(img, imgBGRA, cv::COLOR_BGR2BGRA);

	// 创建 UTexture2D
	UTexture2D* Texture2D = UTexture2D::CreateTransient(imgBGRA.cols, imgBGRA.rows, PF_B8G8R8A8);
	if (!Texture2D)
	{
		UE_LOG(LogTemp, Warning, TEXT("texture CreateTransient failed"));
		return;
	}

	// 将 cv::Mat 数据逐行复制到 UTexture2D,确保内存对齐
	FTexture2DMipMap& MipMap = Texture2D->GetPlatformData()->Mips[0];
	void* TextureData = MipMap.BulkData.Lock(LOCK_READ_WRITE);
	int32 SrcPitch = imgBGRA.cols * imgBGRA.elemSize();
	int32 DstPitch = MipMap.SizeX * 4;

	for (int32 Row = 0; Row < imgBGRA.rows; ++Row)
	{
		FMemory::Memcpy((uint8*)TextureData + Row * DstPitch, imgBGRA.ptr(Row), SrcPitch);
	}

	MipMap.BulkData.Unlock();
	Texture2D->UpdateResource();

	// 将 Texture2D 传递给 UI 部件
	test2d = Texture2D;

	// 确保 test2d 不为 null
	if (test2d)
	{
		UE_LOG(LogTemp, Warning, TEXT("test2d is valid, ready to set in UI"));
	}
	else
	{
		UE_LOG(LogTemp, Warning, TEXT("test2d is not valid"));
	}

	// 创建并设置 UI 小部件
	UUserWidget* bpwidget = CreateWidget<UUserWidget>(GetWorld(), NewWidgetBlueprint);
	UMyUserWidget* MyUserWidget = Cast<UMyUserWidget>(bpwidget);
	if (MyUserWidget && MyUserWidget->cv_Image)
	{
		MyUserWidget->cv_Image->SetBrushFromTexture(test2d);
		MyUserWidget->cv_Image->Brush.ImageSize = FVector2D(test2d->GetSizeX(), test2d->GetSizeY());
		
	}

	bpwidget->AddToViewport();
}

// Called every frame
void AMyOpenCVClass::Tick(float DeltaTime)
{
	Super::Tick(DeltaTime);
}

opencv读取MP4并播放的方法:UI部分不用修改,只需要修改OpenCV部分与tick部分

#include "OpencvT/Public/MyOpenCVClass.h"

#if WITH_OPENCV
#include "PreOpenCVHeaders.h"
#include "opencv2/opencv.hpp"
#include "PostOpenCVHeaders.h"
#endif

#include "ImageWrapperHelper.h"
#include "MyUserWidget.h"

// 构造函数:设置默认值
AMyOpenCVClass::AMyOpenCVClass()
{
	PrimaryActorTick.bCanEverTick = true;
}

// OpenCV VideoCapture
cv::VideoCapture VideoCapture;
float FrameTime;
bool bIsVideoPlaying = false;
float AccumulatedTime = 0.0f;

cv::Mat Frame;

// Called when the game starts or when spawned
void AMyOpenCVClass::BeginPlay()
{
	Super::BeginPlay();

	// 设置视频文件路径
	FString filepath = TEXT("d:/JSC.mp4");
	UE_LOG(LogTemp, Warning, TEXT("尝试加载视频文件: %s"), *filepath);

	// 使用 OpenCV 读取视频
	VideoCapture.open(TCHAR_TO_UTF8(*filepath));
	if (!VideoCapture.isOpened())
	{
		UE_LOG(LogTemp, Warning, TEXT("无法加载视频文件: %s"), *filepath);
		return;
	}
	UE_LOG(LogTemp, Warning, TEXT("成功加载视频文件: %s"), *filepath);

	// 从视频文件中获取帧速率
	double FPS = VideoCapture.get(cv::CAP_PROP_FPS);
	if (FPS <= 0)
	{
		UE_LOG(LogTemp, Warning, TEXT("无法获取帧速率,默认设置为30 FPS"));
		FPS = 30.0; // 如果获取失败,默认设置为30 FPS
	}
	FrameTime = 1.0f / FPS;
	bIsVideoPlaying = true;
	UE_LOG(LogTemp, Warning, TEXT("视频帧速率: %f FPS"), FPS);

	// 初始化纹理
	int width = static_cast<int>(VideoCapture.get(cv::CAP_PROP_FRAME_WIDTH));
	int height = static_cast<int>(VideoCapture.get(cv::CAP_PROP_FRAME_HEIGHT));

	UTexture2D* Texture2D = UTexture2D::CreateTransient(width, height, PF_B8G8R8A8);
	if (!Texture2D)
	{
		UE_LOG(LogTemp, Warning, TEXT("纹理创建失败"));
		return;
	}

	test2d = Texture2D;

	// 创建并设置 UI 小部件
	UUserWidget* bpwidget = CreateWidget<UUserWidget>(GetWorld(), NewWidgetBlueprint);
	UMyUserWidget* MyUserWidget = Cast<UMyUserWidget>(bpwidget);
	if (MyUserWidget && MyUserWidget->cv_Image)
	{
		MyUserWidget->cv_Image->SetBrushFromTexture(test2d);
		MyUserWidget->cv_Image->Brush.ImageSize = FVector2D(test2d->GetSizeX(), test2d->GetSizeY());
	}

	bpwidget->AddToViewport();
}

// Called every frame
void AMyOpenCVClass::Tick(float DeltaTime)
{
	Super::Tick(DeltaTime);

	if (!VideoCapture.isOpened() || !bIsVideoPlaying)
	{
		return;
	}

	// 累积时间
	AccumulatedTime += DeltaTime;

	// 检查是否需要更新视频帧
	if (AccumulatedTime >= FrameTime)
	{
		if (VideoCapture.read(Frame))
		{
			// 转换 BGR 图像为 BGRA 格式
			cv::Mat FrameBGRA;
			cv::cvtColor(Frame, FrameBGRA, cv::COLOR_BGR2BGRA);

			// 将 cv::Mat 数据逐行复制到 UTexture2D,确保内存对齐
			FTexture2DMipMap& MipMap = test2d->GetPlatformData()->Mips[0];
			void* TextureData = MipMap.BulkData.Lock(LOCK_READ_WRITE);
			int32 SrcPitch = FrameBGRA.cols * FrameBGRA.elemSize();
			int32 DstPitch = MipMap.SizeX * 4;

			for (int32 Row = 0; Row < FrameBGRA.rows; ++Row)
			{
				FMemory::Memcpy(static_cast<uint8*>(TextureData) + Row * DstPitch, FrameBGRA.ptr(Row), SrcPitch);
			}

			MipMap.BulkData.Unlock();
			test2d->UpdateResource();
		}

		// 重置累积时间,但保留超出部分以保持同步
		AccumulatedTime -= FrameTime;
	}
}

// Called when the actor is destroyed or game ends
void AMyOpenCVClass::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
	Super::EndPlay(EndPlayReason);

	// 停止视频捕捉并清理资源
	if (VideoCapture.isOpened())
	{
		VideoCapture.release();
		UE_LOG(LogTemp, Warning, TEXT("VideoCapture released successfully"));
	}

	bIsVideoPlaying = false;
}

OpenCV调用摄像头的方法:

#include "OpencvT/Public/MyOpenCVClass.h"

#if WITH_OPENCV
#include "PreOpenCVHeaders.h"
#include "opencv2/opencv.hpp"
#include "PostOpenCVHeaders.h"
#endif

#include "ImageWrapperHelper.h"
#include "MyUserWidget.h"

// 构造函数:设置默认值
AMyOpenCVClass::AMyOpenCVClass()
{
	PrimaryActorTick.bCanEverTick = true;
}

// OpenCV VideoCapture
cv::VideoCapture VideoCapture;
float FrameTime;
bool bIsVideoPlaying = false;
float AccumulatedTime = 0.0f;

cv::Mat Frame;

// Called when the game starts or when spawned
void AMyOpenCVClass::BeginPlay()
{
	Super::BeginPlay();

	// 打开摄像头(通常使用索引 0 表示默认摄像头)
	int CameraIndex = 0;
	UE_LOG(LogTemp, Warning, TEXT("尝试打开摄像头: %d"), CameraIndex);

	// 使用 OpenCV 打开摄像头
	VideoCapture.open(CameraIndex);
	if (!VideoCapture.isOpened())
	{
		UE_LOG(LogTemp, Warning, TEXT("无法打开摄像头: %d"), CameraIndex);
		return;
	}
	UE_LOG(LogTemp, Warning, TEXT("成功打开摄像头: %d"), CameraIndex);

	// 获取摄像头的帧速率(注意:某些摄像头可能不支持帧速率查询)
	double FPS = VideoCapture.get(cv::CAP_PROP_FPS);
	if (FPS <= 0)
	{
		UE_LOG(LogTemp, Warning, TEXT("无法获取摄像头帧速率,默认设置为30 FPS"));
		FPS = 30.0; // 如果获取失败,默认设置为30 FPS
	}
	FrameTime = 1.0f / FPS;
	bIsVideoPlaying = true;
	UE_LOG(LogTemp, Warning, TEXT("摄像头帧速率: %f FPS"), FPS);

	// 初始化纹理
	int width = static_cast<int>(VideoCapture.get(cv::CAP_PROP_FRAME_WIDTH));
	int height = static_cast<int>(VideoCapture.get(cv::CAP_PROP_FRAME_HEIGHT));

	UTexture2D* Texture2D = UTexture2D::CreateTransient(width, height, PF_B8G8R8A8);
	if (!Texture2D)
	{
		UE_LOG(LogTemp, Warning, TEXT("纹理创建失败"));
		return;
	}

	test2d = Texture2D;

	// 创建并设置 UI 小部件
	UUserWidget* bpwidget = CreateWidget<UUserWidget>(GetWorld(), NewWidgetBlueprint);
	UMyUserWidget* MyUserWidget = Cast<UMyUserWidget>(bpwidget);
	if (MyUserWidget && MyUserWidget->cv_Image)
	{
		MyUserWidget->cv_Image->SetBrushFromTexture(test2d);
		MyUserWidget->cv_Image->Brush.ImageSize = FVector2D(test2d->GetSizeX(), test2d->GetSizeY());
	}

	bpwidget->AddToViewport();
}

// Called every frame
void AMyOpenCVClass::Tick(float DeltaTime)
{
	Super::Tick(DeltaTime);

	if (!VideoCapture.isOpened() || !bIsVideoPlaying)
	{
		return;
	}

	// 累积时间
	AccumulatedTime += DeltaTime;

	// 检查是否需要更新摄像头帧
	if (AccumulatedTime >= FrameTime)
	{
		if (VideoCapture.read(Frame))
		{
			// 转换 BGR 图像为 BGRA 格式
			cv::Mat FrameBGRA;
			cv::cvtColor(Frame, FrameBGRA, cv::COLOR_BGR2BGRA);

			// 将 cv::Mat 数据逐行复制到 UTexture2D,确保内存对齐
			FTexture2DMipMap& MipMap = test2d->GetPlatformData()->Mips[0];
			void* TextureData = MipMap.BulkData.Lock(LOCK_READ_WRITE);
			int32 SrcPitch = FrameBGRA.cols * FrameBGRA.elemSize();
			int32 DstPitch = MipMap.SizeX * 4;

			for (int32 Row = 0; Row < FrameBGRA.rows; ++Row)
			{
				FMemory::Memcpy(static_cast<uint8*>(TextureData) + Row * DstPitch, FrameBGRA.ptr(Row), SrcPitch);
			}

			MipMap.BulkData.Unlock();
			test2d->UpdateResource();
		}

		// 重置累积时间,但保留超出部分以保持同步
		AccumulatedTime -= FrameTime;
	}
}

// Called when the actor is destroyed or game ends
void AMyOpenCVClass::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
	Super::EndPlay(EndPlayReason);

	// 停止摄像头捕捉并清理资源
	if (VideoCapture.isOpened())
	{
		UE_LOG(LogTemp, Warning, TEXT("Attempting to release VideoCapture..."));
		VideoCapture.release();
		UE_LOG(LogTemp, Warning, TEXT("VideoCapture released successfully"));
	}
	else
	{
		UE_LOG(LogTemp, Warning, TEXT("VideoCapture was not open when EndPlay was called."));
	}

	// 处理其他资源,例如 UI 小部件
	if (test2d)
	{
		// 在此处处理任何需要清理的纹理资源
		UE_LOG(LogTemp, Warning, TEXT("Releasing texture resources..."));
		test2d->ReleaseResource();
		test2d = nullptr;
	}

	bIsVideoPlaying = false;
}
使用ncnn时会与ReleaseResource()函数冲突需要加入#undef UpdateResource取消冲突宏定义
#include "ncnn/net.h"
// 取消冲突宏定义
#undef UpdateResource
使用test2d->ReleaseResource();函数时需要加入#undef UpdateResource

点赞(0) 打赏

评论列表 共有 0 条评论

暂无评论

微信公众账号

微信扫一扫加关注

发表
评论
返回
顶部