518 lines
18 KiB
C++
518 lines
18 KiB
C++
|
|
#include "ANSOPENVINOCL.h"
|
||
|
|
#include "Utility.h"
|
||
|
|
namespace ANSCENTER
|
||
|
|
{
|
||
|
|
bool OPENVINOCL::OptimizeModel(bool fp16, std::string& optimizedModelFolder) {
|
||
|
|
std::lock_guard<std::recursive_mutex> lock(_mutex);
|
||
|
|
if (!ANSODBase::OptimizeModel(fp16, optimizedModelFolder)) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
if (FileExist(_modelFilePath)) {
|
||
|
|
std::string modelName = GetFileNameWithoutExtension(_modelFilePath);
|
||
|
|
std::string binaryModelName = modelName + ".bin";
|
||
|
|
std::string modelFolder = GetParentFolder(_modelFilePath);
|
||
|
|
std::string optimizedModelPath = CreateFilePath(modelFolder, binaryModelName);
|
||
|
|
if (FileExist(optimizedModelPath)) {
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::OptimizeModel", "This model is optimized. No need other optimization.", __FILE__, __LINE__);
|
||
|
|
optimizedModelFolder = modelFolder;
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
this->_logger.LogFatal("OPENVINOCL::OptimizeModel", "This model can not be optimized.", __FILE__, __LINE__);
|
||
|
|
optimizedModelFolder = modelFolder;
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
this->_logger.LogFatal("OPENVINOCL::OptimizeModel", "This model is not exist. Please check the model path again.", __FILE__, __LINE__);
|
||
|
|
optimizedModelFolder = "";
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
bool OPENVINOCL::LoadModel(const std::string& modelZipFilePath, const std::string& modelZipPassword) {
|
||
|
|
std::lock_guard<std::recursive_mutex> lock(_mutex);
|
||
|
|
try {
|
||
|
|
bool result = ANSODBase::LoadModel(modelZipFilePath, modelZipPassword);
|
||
|
|
if (!result) return false;
|
||
|
|
// 0. Check if the configuration file exist
|
||
|
|
if (FileExist(_modelConfigFile)) {
|
||
|
|
ModelType modelType;
|
||
|
|
std::vector<int> inputShape;
|
||
|
|
_classes = ANSUtilityHelper::GetConfigFileContent(_modelConfigFile, modelType, inputShape);
|
||
|
|
if (inputShape.size() == 2) {
|
||
|
|
if (inputShape[0] > 0)_modelConfig.inpHeight = inputShape[0];
|
||
|
|
if (inputShape[1] > 0)_modelConfig.inpWidth = inputShape[1];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
else {// This is old version of model zip file
|
||
|
|
std::string onnxfile = CreateFilePath(_modelFolder, "train_last.xml");//yolov8n.xml
|
||
|
|
if (std::filesystem::exists(onnxfile)) {
|
||
|
|
_modelFilePath = onnxfile;
|
||
|
|
_classFilePath = CreateFilePath(_modelFolder, "classes.names");
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::Initialize. Loading OpenVINO weight", _modelFilePath, __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
this->_logger.LogError("OPENVINOCL::Initialize. Model file is not exist", _modelFilePath, __FILE__, __LINE__);
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
std::ifstream isValidFileName(_classFilePath);
|
||
|
|
if (!isValidFileName)
|
||
|
|
{
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::Initialize. Load classes from string", _classFilePath, __FILE__, __LINE__);
|
||
|
|
LoadClassesFromString();
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::Initialize. Load classes from file", _classFilePath, __FILE__, __LINE__);
|
||
|
|
LoadClassesFromFile();
|
||
|
|
}
|
||
|
|
}
|
||
|
|
// Load Model from Here
|
||
|
|
InitialModel();
|
||
|
|
_isInitialized = true;
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
catch (std::exception& e) {
|
||
|
|
this->_logger.LogFatal("OPENVINOCL::LoadModel", e.what(), __FILE__, __LINE__);
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
bool OPENVINOCL::LoadModelFromFolder(std::string licenseKey, ModelConfig modelConfig, std::string modelName, std::string className, const std::string& modelFolder, std::string& labelMap) {
|
||
|
|
std::lock_guard<std::recursive_mutex> lock(_mutex);
|
||
|
|
try {
|
||
|
|
bool result = ANSODBase::LoadModelFromFolder(licenseKey, modelConfig, modelName, className, modelFolder, labelMap);
|
||
|
|
if (!result) return false;
|
||
|
|
std::string _modelName = modelName;
|
||
|
|
if (_modelName.empty()) {
|
||
|
|
_modelName = "train_last";
|
||
|
|
}
|
||
|
|
std::string modelFullName = _modelName + ".xml";
|
||
|
|
// Parsing for YOLO only here
|
||
|
|
_modelConfig = modelConfig;
|
||
|
|
_modelConfig.detectionType = ANSCENTER::DetectionType::CLASSIFICATION;
|
||
|
|
_modelConfig.modelType = ModelType::OPENVINO;
|
||
|
|
_modelConfig.inpHeight = 224;
|
||
|
|
_modelConfig.inpWidth = 224;
|
||
|
|
if (_modelConfig.modelMNSThreshold < 0.2)
|
||
|
|
_modelConfig.modelMNSThreshold = 0.5;
|
||
|
|
if (_modelConfig.modelConfThreshold < 0.2)
|
||
|
|
_modelConfig.modelConfThreshold = 0.5;
|
||
|
|
// 0. Check if the configuration file exist
|
||
|
|
if (FileExist(_modelConfigFile)) {
|
||
|
|
ModelType modelType;
|
||
|
|
std::vector<int> inputShape;
|
||
|
|
_classes = ANSUtilityHelper::GetConfigFileContent(_modelConfigFile, modelType, inputShape);
|
||
|
|
if (inputShape.size() == 2) {
|
||
|
|
if (inputShape[0] > 0)_modelConfig.inpHeight = inputShape[0];
|
||
|
|
if (inputShape[1] > 0)_modelConfig.inpWidth = inputShape[1];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
else {// This is old version of model zip file
|
||
|
|
std::string onnxfile = CreateFilePath(_modelFolder, modelFullName);//yolov8n.xml
|
||
|
|
if (std::filesystem::exists(onnxfile)) {
|
||
|
|
_modelFilePath = onnxfile;
|
||
|
|
_classFilePath = CreateFilePath(_modelFolder, className);
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::Initialize. Loading OpenVINO weight", _modelFilePath, __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
this->_logger.LogError("OPENVINOCL::Initialize. Model file is not exist", _modelFilePath, __FILE__, __LINE__);
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
std::ifstream isValidFileName(_classFilePath);
|
||
|
|
if (!isValidFileName)
|
||
|
|
{
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::Initialize. Load classes from string", _classFilePath, __FILE__, __LINE__);
|
||
|
|
LoadClassesFromString();
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::Initialize. Load classes from file", _classFilePath, __FILE__, __LINE__);
|
||
|
|
LoadClassesFromFile();
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// 1. Load labelMap and engine
|
||
|
|
labelMap.clear();
|
||
|
|
if (!_classes.empty())
|
||
|
|
labelMap = VectorToCommaSeparatedString(_classes);
|
||
|
|
|
||
|
|
// Load Model from Here
|
||
|
|
InitialModel();
|
||
|
|
_isInitialized = true;
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
catch (std::exception& e) {
|
||
|
|
this->_logger.LogFatal("OPENVINOCL::LoadModel", e.what(), __FILE__, __LINE__);
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
cv::Mat OPENVINOCL::PreProcessing(const cv::Mat& source) {
|
||
|
|
if (source.empty()) {
|
||
|
|
std::cerr << "Error: Empty image provided to PreProcessing" << std::endl;
|
||
|
|
return cv::Mat();
|
||
|
|
}
|
||
|
|
|
||
|
|
// Convert grayscale to 3-channel BGR
|
||
|
|
cv::Mat processedImage;
|
||
|
|
if (source.channels() == 1) {
|
||
|
|
cv::cvtColor(source, processedImage, cv::COLOR_GRAY2BGR);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
processedImage = source;
|
||
|
|
}
|
||
|
|
|
||
|
|
int col = processedImage.cols;
|
||
|
|
int row = processedImage.rows;
|
||
|
|
int maxSize = std::max(col, row);
|
||
|
|
|
||
|
|
// Create a square padded image with a black background
|
||
|
|
cv::Mat result = cv::Mat::zeros(maxSize, maxSize, CV_8UC3);
|
||
|
|
|
||
|
|
// Copy the original image to the top-left corner of the square matrix
|
||
|
|
processedImage.copyTo(result(cv::Rect(0, 0, col, row)));
|
||
|
|
|
||
|
|
return result;
|
||
|
|
}
|
||
|
|
bool OPENVINOCL::Initialize(std::string licenseKey, ModelConfig modelConfig, const std::string& modelZipFilePath, const std::string& modelZipPassword, std::string& labelMap) {
|
||
|
|
std::lock_guard<std::recursive_mutex> lock(_mutex);
|
||
|
|
try {
|
||
|
|
std::string openVINOVersion = ov::get_openvino_version().buildNumber;
|
||
|
|
//this->_logger.LogDebug("OPENVINOCL::Initialize. OpenVINO version", openVINOVersion, __FILE__, __LINE__);
|
||
|
|
bool result = ANSODBase::Initialize(licenseKey, modelConfig, modelZipFilePath, modelZipPassword, labelMap);
|
||
|
|
if (!result) return false;
|
||
|
|
// Parsing for YOLO only here
|
||
|
|
_modelConfig = modelConfig;
|
||
|
|
_modelConfig.detectionType = ANSCENTER::DetectionType::CLASSIFICATION;
|
||
|
|
_modelConfig.modelType = ModelType::OPENVINO;
|
||
|
|
_modelConfig.inpHeight = 224;
|
||
|
|
_modelConfig.inpWidth = 224;
|
||
|
|
if (_modelConfig.modelMNSThreshold < 0.2)
|
||
|
|
_modelConfig.modelMNSThreshold = 0.5;
|
||
|
|
if (_modelConfig.modelConfThreshold < 0.2)
|
||
|
|
_modelConfig.modelConfThreshold = 0.5;
|
||
|
|
// 0. Check if the configuration file exist
|
||
|
|
if (FileExist(_modelConfigFile)) {
|
||
|
|
ModelType modelType;
|
||
|
|
std::vector<int> inputShape;
|
||
|
|
_classes = ANSUtilityHelper::GetConfigFileContent(_modelConfigFile, modelType, inputShape);
|
||
|
|
if (inputShape.size() == 2) {
|
||
|
|
if (inputShape[0] > 0)_modelConfig.inpHeight = inputShape[0];
|
||
|
|
if (inputShape[1] > 0)_modelConfig.inpWidth = inputShape[1];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
else {// This is old version of model zip file
|
||
|
|
std::string onnxfile = CreateFilePath(_modelFolder, "train_last.xml");//yolov8n.xml
|
||
|
|
if (std::filesystem::exists(onnxfile)) {
|
||
|
|
_modelFilePath = onnxfile;
|
||
|
|
_classFilePath = CreateFilePath(_modelFolder, "classes.names");
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::Initialize. Loading OpenVINO weight", _modelFilePath, __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
this->_logger.LogError("OPENVINOCL::Initialize. Model file is not exist", _modelFilePath, __FILE__, __LINE__);
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
std::ifstream isValidFileName(_classFilePath);
|
||
|
|
if (!isValidFileName)
|
||
|
|
{
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::Initialize. Load classes from string", _classFilePath, __FILE__, __LINE__);
|
||
|
|
LoadClassesFromString();
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
this->_logger.LogDebug("OPENVINOCL::Initialize. Load classes from file", _classFilePath, __FILE__, __LINE__);
|
||
|
|
LoadClassesFromFile();
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// 1. Load labelMap and engine
|
||
|
|
labelMap.clear();
|
||
|
|
if (!_classes.empty())
|
||
|
|
labelMap = VectorToCommaSeparatedString(_classes);
|
||
|
|
|
||
|
|
// Load Model from Here
|
||
|
|
InitialModel();
|
||
|
|
_isInitialized = true;
|
||
|
|
return true;
|
||
|
|
|
||
|
|
}
|
||
|
|
catch (std::exception& e) {
|
||
|
|
this->_logger.LogFatal("OPENVINOCL::Initialize", e.what(), __FILE__, __LINE__);
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
std::vector<Object> OPENVINOCL::RunInference(const cv::Mat& input) {
|
||
|
|
return RunInference(input, "CustomCam");
|
||
|
|
}
|
||
|
|
std::vector<Object> OPENVINOCL::RunInference(const cv::Mat& input,const std::string& camera_id)
|
||
|
|
{
|
||
|
|
std::lock_guard<std::recursive_mutex> lock(_mutex);
|
||
|
|
|
||
|
|
// Early validation
|
||
|
|
if (!_licenseValid) {
|
||
|
|
_logger.LogError("OPENVINOCL::RunInference", "Invalid License",
|
||
|
|
__FILE__, __LINE__);
|
||
|
|
return {};
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!_isInitialized) {
|
||
|
|
_logger.LogError("OPENVINOCL::RunInference", "Model is not initialized",
|
||
|
|
__FILE__, __LINE__);
|
||
|
|
return {};
|
||
|
|
}
|
||
|
|
|
||
|
|
if (input.empty() || input.cols < 20 || input.rows < 20) {
|
||
|
|
_logger.LogError("OPENVINOCL::RunInference", "Input image is invalid",
|
||
|
|
__FILE__, __LINE__);
|
||
|
|
return {};
|
||
|
|
}
|
||
|
|
|
||
|
|
try {
|
||
|
|
// Step 1: Resize directly to target size (no clone needed!)
|
||
|
|
cv::Mat resized;
|
||
|
|
cv::resize(input, resized, cv::Size(224, 224), 0, 0, cv::INTER_LINEAR);
|
||
|
|
|
||
|
|
// Step 2: Preprocessing
|
||
|
|
cv::Mat letterbox_img = PreProcessing(resized);
|
||
|
|
|
||
|
|
// Step 3: Create blob
|
||
|
|
cv::Mat blob = cv::dnn::blobFromImage(letterbox_img, 1.0 / 255.0,
|
||
|
|
cv::Size(224, 224), cv::Scalar(), true);
|
||
|
|
|
||
|
|
// Step 4: Set input tensor
|
||
|
|
auto input_port = compiled_model_.input();
|
||
|
|
ov::Tensor input_tensor(input_port.get_element_type(),
|
||
|
|
input_port.get_shape(), blob.ptr(0));
|
||
|
|
inference_request_.set_input_tensor(input_tensor);
|
||
|
|
|
||
|
|
// Step 5: Run inference
|
||
|
|
inference_request_.start_async();
|
||
|
|
inference_request_.wait();
|
||
|
|
|
||
|
|
// Step 6: Get output
|
||
|
|
auto output = inference_request_.get_output_tensor(0);
|
||
|
|
auto output_shape = output.get_shape();
|
||
|
|
|
||
|
|
if (output_shape.size() < 2) {
|
||
|
|
_logger.LogError("OPENVINOCL::RunInference", "Invalid output shape",
|
||
|
|
__FILE__, __LINE__);
|
||
|
|
return {};
|
||
|
|
}
|
||
|
|
|
||
|
|
// Step 7: Find max class (no vector copy needed!)
|
||
|
|
const float* output_buffer = output.data<const float>();
|
||
|
|
const size_t num_classes = output_shape[1];
|
||
|
|
|
||
|
|
auto max_ptr = std::max_element(output_buffer, output_buffer + num_classes);
|
||
|
|
const int class_id = static_cast<int>(std::distance(output_buffer, max_ptr));
|
||
|
|
const float score = *max_ptr;
|
||
|
|
|
||
|
|
// Step 8: Create result object
|
||
|
|
Object clsResult;
|
||
|
|
clsResult.classId = class_id;
|
||
|
|
clsResult.confidence = score;
|
||
|
|
clsResult.cameraId = camera_id;
|
||
|
|
|
||
|
|
// Set class name
|
||
|
|
const int classNameSize = static_cast<int>(_classes.size());
|
||
|
|
if (!_classes.empty()) {
|
||
|
|
clsResult.className = (class_id < classNameSize)
|
||
|
|
? _classes[class_id]
|
||
|
|
: _classes.back();
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
clsResult.className = "Unknown";
|
||
|
|
}
|
||
|
|
|
||
|
|
// Set bounding box (full image for classification)
|
||
|
|
clsResult.box = cv::Rect(0, 0, input.cols, input.rows);
|
||
|
|
|
||
|
|
// Set polygon
|
||
|
|
clsResult.polygon = ANSUtilityHelper::RectToNormalizedPolygon(
|
||
|
|
clsResult.box, input.cols, input.rows
|
||
|
|
);
|
||
|
|
|
||
|
|
// Return single-element vector
|
||
|
|
return { std::move(clsResult) };
|
||
|
|
|
||
|
|
}
|
||
|
|
catch (const std::exception& e) {
|
||
|
|
_logger.LogFatal("OPENVINOCL::RunInference", e.what(), __FILE__, __LINE__);
|
||
|
|
return {};
|
||
|
|
}
|
||
|
|
catch (...) {
|
||
|
|
_logger.LogFatal("OPENVINOCL::RunInference", "Unknown exception",
|
||
|
|
__FILE__, __LINE__);
|
||
|
|
return {};
|
||
|
|
}
|
||
|
|
}
|
||
|
|
OPENVINOCL::~OPENVINOCL() {
|
||
|
|
try {
|
||
|
|
if (FolderExist(_modelFolder)) {
|
||
|
|
if (!DeleteFolder(_modelFolder)) {
|
||
|
|
this->_logger.LogError("OPENVINOCL::~OPENVINOCL", "Failed to delete OpenVINO Models", __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (std::exception& e) {
|
||
|
|
this->_logger.LogError("OPENVINOCL::~OPENVINOCL()", "Failed to release OPENVINO Models", __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
bool OPENVINOCL::Destroy() {
|
||
|
|
try {
|
||
|
|
if (FolderExist(_modelFolder)) {
|
||
|
|
if (!DeleteFolder(_modelFolder)) {
|
||
|
|
this->_logger.LogError("OPENVINOCL::Destroy", "Failed to delete OpenVINO Models", __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
catch (std::exception& e) {
|
||
|
|
this->_logger.LogError("OPENVINOCL::Destroy()", "Failed to release OPENVINO Models", __FILE__, __LINE__);
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
//private
|
||
|
|
void OPENVINOCL::InitialModel() {
|
||
|
|
try {
|
||
|
|
// Step 1: Initialize OpenVINO Runtime Core
|
||
|
|
ov::Core core;
|
||
|
|
|
||
|
|
// Step 2: Load Model
|
||
|
|
// Get available devices
|
||
|
|
std::vector<std::string> available_devices = core.get_available_devices();
|
||
|
|
bool device_found = false;
|
||
|
|
|
||
|
|
// Search for NPU
|
||
|
|
auto it = std::find(available_devices.begin(), available_devices.end(), "NPU");
|
||
|
|
if (it != available_devices.end()) {
|
||
|
|
core.set_property("NPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY));
|
||
|
|
core.set_property("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY));
|
||
|
|
compiled_model_ = core.compile_model(_modelFilePath, "AUTO:NPU,GPU");
|
||
|
|
device_found = true;
|
||
|
|
//this->_logger.LogDebug("OPENVINOCL::InitialModel", "Using NPU for inference.", __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
|
||
|
|
// If NPU not found, search for GPU
|
||
|
|
if (!device_found) {
|
||
|
|
it = std::find(available_devices.begin(), available_devices.end(), "GPU");
|
||
|
|
if (it != available_devices.end()) {
|
||
|
|
core.set_property("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY));
|
||
|
|
compiled_model_ = core.compile_model(_modelFilePath, "GPU");
|
||
|
|
device_found = true;
|
||
|
|
//this->_logger.LogDebug("OPENVINOCL::InitialModel", "Using GPU for inference.", __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// If GPU not found, search for GPU.0
|
||
|
|
if (!device_found) {
|
||
|
|
it = std::find(available_devices.begin(), available_devices.end(), "GPU.0");
|
||
|
|
if (it != available_devices.end()) {
|
||
|
|
core.set_property("GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY));
|
||
|
|
compiled_model_ = core.compile_model(_modelFilePath, "GPU");
|
||
|
|
device_found = true;
|
||
|
|
//this->_logger.LogDebug("OPENVINOCL::InitialModel", "Using GPU for inference.", __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
// If neither NPU nor GPU found, default to CPU
|
||
|
|
if (!device_found) {
|
||
|
|
core.set_property("CPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY));
|
||
|
|
compiled_model_ = core.compile_model(_modelFilePath, "CPU");
|
||
|
|
//this->_logger.LogDebug("OPENVINOCL::InitialModel", "Using CPU for inference.", __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
|
||
|
|
// Step 3: Create Inference Request
|
||
|
|
inference_request_ = compiled_model_.create_infer_request();
|
||
|
|
|
||
|
|
}
|
||
|
|
catch (const std::exception& e) {
|
||
|
|
this->_logger.LogFatal("OPENVINOCL::InitialModel", e.what(), __FILE__, __LINE__);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
//std::vector<Object> OPENVINOCL::RunInference(const cv::Mat& input, const std::string& camera_id) {
|
||
|
|
// std::lock_guard<std::recursive_mutex> lock(_mutex);
|
||
|
|
// std::vector<Object> outputs;
|
||
|
|
|
||
|
|
// if (!_licenseValid) {
|
||
|
|
// this->_logger.LogError("OPENVINOCL::RunInference", "Invalid License", __FILE__, __LINE__);
|
||
|
|
// return outputs;
|
||
|
|
// }
|
||
|
|
// if (!_isInitialized) {
|
||
|
|
// this->_logger.LogError("OPENVINOCL::RunInference", "Model is not initialized", __FILE__, __LINE__);
|
||
|
|
// return outputs;
|
||
|
|
// }
|
||
|
|
|
||
|
|
// try {
|
||
|
|
// // Step 0: Prepare input
|
||
|
|
// if (input.empty()) {
|
||
|
|
// this->_logger.LogError("OPENVINOCL::RunInference", "Input image is empty", __FILE__, __LINE__);
|
||
|
|
// return outputs;
|
||
|
|
// }
|
||
|
|
// if ((input.cols < 20) || (input.rows < 20)) return outputs;
|
||
|
|
// cv::Mat img = input.clone();
|
||
|
|
// cv::resize(img, img, cv::Size(224, 224));
|
||
|
|
// cv::Mat letterbox_img = PreProcessing(img);
|
||
|
|
// cv::Mat blob = cv::dnn::blobFromImage(letterbox_img, 1.0 / 255.0, cv::Size(224, 224), cv::Scalar(), true);
|
||
|
|
|
||
|
|
// // Step 1: Feed blob to the network
|
||
|
|
// auto input_port = compiled_model_.input();
|
||
|
|
// ov::Tensor input_tensor(input_port.get_element_type(), input_port.get_shape(), blob.ptr(0));
|
||
|
|
// inference_request_.set_input_tensor(input_tensor);
|
||
|
|
|
||
|
|
// // Step 3: Start inference
|
||
|
|
// inference_request_.start_async();
|
||
|
|
// inference_request_.wait();
|
||
|
|
|
||
|
|
// // Step 4: Get output
|
||
|
|
// auto output = inference_request_.get_output_tensor(0);
|
||
|
|
// auto output_shape = output.get_shape();
|
||
|
|
|
||
|
|
// if (output_shape.size() < 2) {
|
||
|
|
// this->_logger.LogError("OPENVINOCL::RunInference", "Invalid output shape", __FILE__, __LINE__);
|
||
|
|
// return outputs;
|
||
|
|
// }
|
||
|
|
|
||
|
|
// // Step 5: Post-processing
|
||
|
|
// const float* output_buffer = output.data<const float>();
|
||
|
|
// std::vector<float> result(output_buffer, output_buffer + output_shape[1]);
|
||
|
|
|
||
|
|
// auto max_idx = std::max_element(result.begin(), result.end());
|
||
|
|
// int class_id = static_cast<int>(std::distance(result.begin(), max_idx));
|
||
|
|
// float score = *max_idx;
|
||
|
|
// int classNameSize = static_cast<int>(_classes.size());
|
||
|
|
// Object clsResult;
|
||
|
|
// clsResult.classId = class_id;
|
||
|
|
// //clsResult.className = (class_id < _classes.size()) ? _classes[class_id] : "";
|
||
|
|
// if (!_classes.empty()) {
|
||
|
|
// if (clsResult.classId < classNameSize) {
|
||
|
|
// clsResult.className = _classes[clsResult.classId];
|
||
|
|
// }
|
||
|
|
// else {
|
||
|
|
// clsResult.className = _classes[classNameSize - 1]; // Use last valid class name if out of range
|
||
|
|
// }
|
||
|
|
// }
|
||
|
|
// else {
|
||
|
|
// clsResult.className = "Unknown"; // Fallback if _classes is empty
|
||
|
|
// }
|
||
|
|
|
||
|
|
// clsResult.confidence = score;
|
||
|
|
// if (input.cols > 20 && input.rows > 20) {
|
||
|
|
// clsResult.box = cv::Rect(10, 10, input.cols - 20, input.rows - 20);
|
||
|
|
// }
|
||
|
|
// else {
|
||
|
|
// clsResult.box = cv::Rect(0, 0, input.cols, input.rows);
|
||
|
|
// }
|
||
|
|
// clsResult.polygon = ANSUtilityHelper::RectToNormalizedPolygon(clsResult.box, input.cols, input.rows);
|
||
|
|
// clsResult.cameraId = camera_id;
|
||
|
|
// outputs.push_back(clsResult);
|
||
|
|
|
||
|
|
// return outputs;
|
||
|
|
// }
|
||
|
|
// catch (const std::exception& e) {
|
||
|
|
// this->_logger.LogFatal("OPENVINOCL::RunInference", e.what(), __FILE__, __LINE__);
|
||
|
|
// }
|
||
|
|
// catch (...) {
|
||
|
|
// this->_logger.LogFatal("OPENVINOCL::RunInference", "Unknown exception occurred", __FILE__, __LINE__);
|
||
|
|
// }
|
||
|
|
|
||
|
|
// return outputs;
|
||
|
|
//}
|