Files
ANSCORE/modules/ANSLPR/ANSLPR_RT.cpp

777 lines
30 KiB
C++
Raw Normal View History

2026-03-28 16:54:11 +11:00
#include "ANSLPR_RT.h"
namespace ANSCENTER {
ANSALPR_RT::ANSALPR_RT() {
_licenseValid = false;
_globalViewId = 0;
_focusedOnLPRId = 0;
_platesTypesClassifierId = 0;
}
ANSALPR_RT::~ANSALPR_RT() {
CloseReferences();
}
bool ANSALPR_RT::Destroy() {
CloseReferences();
return true;
}
bool ANSALPR_RT::CloseReferences() {
try {
bool session_closed = CloseDetector(_globalViewId);
if (!session_closed) {
this->_logger->LogFatal("ANSALPR_RT::CloseReferences", "Cannot close global view model reference", __FILE__, __LINE__);
}
else {
_globalViewId = 0;
}
session_closed = CloseDetector(_focusedOnLPRId);
if (!session_closed) {
this->_logger->LogFatal("ANSALPR_RT::CloseReferences", "Cannot close focused on LRP model reference", __FILE__, __LINE__);
}
else {
_focusedOnLPRId = 0;
}
session_closed = ClosePlatesTypesClassifier(_platesTypesClassifierId);
if (!session_closed) {
this->_logger->LogFatal("ANSALPR_RT::CloseReferences", "Cannot close plate type classifer model reference", __FILE__, __LINE__);
}
else {
_platesTypesClassifierId = 0;
}
return true;
}
catch (std::exception& e) {
this->_logger->LogFatal("ANSALPR_RT::CloseReferences", e.what(), __FILE__, __LINE__);
return false;
}
}
bool ANSALPR_RT::Initialize(const std::string& licenseKey, const std::string& modelZipFilePath, const std::string& modelZipPassword) {
try {
_licenseKey = licenseKey;
_licenseValid = false;
CheckLicense();
if (!_licenseValid) {
this->_logger->LogError("ANSALPR::Initialize.", "License is not valid.", __FILE__, __LINE__);
return false;
}
// Extract model folder
// 0. Check if the modelZipFilePath exist?
if (!FileExist(modelZipFilePath)) {
this->_logger->LogFatal("ANSALPR::Initialize", "Model zip file is not exist", __FILE__, __LINE__);
}
// 1. Unzip model zip file to a special location with folder name as model file (and version)
std::string outputFolder;
std::vector<std::string> passwordArray;
if (!modelZipPassword.empty()) passwordArray.push_back(modelZipPassword);
passwordArray.push_back("AnsDemoModels20@!");
passwordArray.push_back("Sh7O7nUe7vJ/417W0gWX+dSdfcP9hUqtf/fEqJGqxYL3PedvHubJag==");
passwordArray.push_back("3LHxGrjQ7kKDJBD9MX86H96mtKLJaZcTYXrYRdQgW8BKGt7enZHYMg==");
std::string modelName = GetFileNameWithoutExtension(modelZipFilePath);
//this->_logger->LogInfo("ANSFDBase::Initialize. Model name", modelName);
size_t vectorSize = passwordArray.size();
for (size_t i = 0; i < vectorSize; i++) {
if (ExtractPasswordProtectedZip(modelZipFilePath, passwordArray[i], modelName, _modelFolder, false))
break; // Break the loop when the condition is met.
}
// 2. Check if the outputFolder exist
if (!FolderExist(_modelFolder)) {
this->_logger->LogError("ANSFDBase::Initialize. Output model folder is not exist", _modelFolder, __FILE__, __LINE__);
return false; // That means the model file is not exist or the password is not correct
}
//3. Get License plate models
_globalModelFileName = CreateFilePath(_modelFolder, "anslpr_alpr_focused_on_lp.onnx");
if (!FileExist(_globalModelFileName)) {
this->_logger->LogError("ANSALPR::Initialize. Global view model does not exist", _globalModelFileName, __FILE__, __LINE__);
return false;
}
_focusedOnLPRModelFileName = CreateFilePath(_modelFolder, "anslpr_alpr_global_view.onnx");
if (!FileExist(_focusedOnLPRModelFileName)) {
this->_logger->LogError("ANSALPR::Initialize. Focused On LRP model does not exist", _focusedOnLPRModelFileName, __FILE__, __LINE__);
return false;
}
_platesTypesClassifierFileName = CreateFilePath(_modelFolder, "plates_types_7.onnx");
if (!FileExist(_platesTypesClassifierFileName)) {
this->_logger->LogError("ANSALPR::Initialize.LRP classifier model does not exist", _platesTypesClassifierFileName, __FILE__, __LINE__);
return false;
}
_platesTypesLabelsFileName = CreateFilePath(_modelFolder, "plates_types_7.txt");
if (!FileExist(_platesTypesLabelsFileName)) {
this->_logger->LogError("ANSALPR::Initialize.LRP classifier label model does not exist", _platesTypesLabelsFileName, __FILE__, __LINE__);
return false;
}
// Load models
size_t len = _globalModelFileName.size();
//step 1 : Initializes a new detector by loading its model file. In return, you get a unique id. The repo comes with two models namely anslpr_alpr_focused_on_lpand anslpr_alpr_global_view.
//So you have to call this function twice to initialize both models.
_globalViewId = InitYoloDetector(len, _globalModelFileName.c_str());
if (_globalViewId <= 0) {
this->_logger->LogError("ANSALPR::Initialize.", "Global view model cannot be loaded", __FILE__, __LINE__);
return false;
}
len = _focusedOnLPRModelFileName.size();
_focusedOnLPRId = InitYoloDetector(len, _focusedOnLPRModelFileName.c_str());
if (_focusedOnLPRId <= 0) {
this->_logger->LogError("ANSALPR::Initialize.", "Focused on LPR model cannot be loaded", __FILE__, __LINE__);
return false;
}
len = _platesTypesClassifierFileName.size();
_platesTypesClassifierId = InitPlatesClassifer(len, _platesTypesClassifierFileName.c_str(), _platesTypesLabelsFileName.size(), _platesTypesLabelsFileName.c_str());
if (_platesTypesClassifierId <= 0) {
this->_logger->LogError("ANSALPR::Initialize.", "Plate type classifier model cannot be loaded", __FILE__, __LINE__);
return false;
}
return true;
}
catch (std::exception& e) {
this->_logger->LogFatal("ANSALPR::Initialize", e.what(), __FILE__, __LINE__);
return false;
}
}
bool ANSALPR_RT::Inference(const cv::Mat& input, std::string& lprResult)
{
if (!_licenseValid) {
lprResult = "";
return false;
}
try {
const size_t lpn_len = 15;
char* lpn = new char[lpn_len + 1];
std::vector<ALPRObject> output;
output.clear();
cv::Rect bbox;
bool detected = TwoStageLPRPlatesTypeDetection(
input.cols,//width of image
input.rows,//height of image i.e. the specified dimensions of the image
input.channels(),// pixel type : 1 (8 bpp greyscale image) 3 (RGB 24 bpp image) or 4 (RGBA 32 bpp image)
input.data,
input.step,// source image bytes buffer
_globalViewId,
_focusedOnLPRId,//id : unique interger to identify the detector to be used
_platesTypesClassifierId,//unique id to identify the platestype classifier
lpn_len,
lpn, bbox);
lprResult = lpn;
if (detected) {
ALPRObject result;
result.classId = 0;
result.className = lpn;
result.confidence = 1.0;
result.box = bbox;
output.push_back(result);
}
lprResult = VectorDetectionToJsonString(output);
return detected;
}
catch (std::exception& e) {
this->_logger->LogFatal("ANSALPR::CheckStatus", e.what(), __FILE__, __LINE__);
lprResult = "";
return false;
};
}
bool ANSALPR_RT::Inference(const cv::Mat& input, const std::vector<cv::Rect>& Bbox, std::string& lprResult) {
if (!_licenseValid) {
lprResult = "";
return false;
}
try {
const size_t lpn_len = 15;
char* lpn = new char[lpn_len + 1];
std::vector<ALPRObject> output;
output.clear();
bool detected = false;
if (Bbox.size() > 0) {
cv::Mat frame = input.clone();
for (std::vector<cv::Rect>::iterator it = Bbox.begin(); it != Bbox.end(); it++) {
int x1, y1, x2, y2;
x1 = (*it).x;
y1 = (*it).y;
x2 = (*it).x + (*it).width;
y2 = (*it).y + (*it).height;
// Get cropped objects
cv::Rect objectPos(cv::Point(x1, y1), cv::Point(x2, y2));
cv::Mat croppedObject = frame(objectPos);
cv::Rect bbox;
detected = TwoStageLPRPlatesTypeDetection(
croppedObject.cols,//width of image
croppedObject.rows,//height of image i.e. the specified dimensions of the image
croppedObject.channels(),// pixel type : 1 (8 bpp greyscale image) 3 (RGB 24 bpp image) or 4 (RGBA 32 bpp image)
croppedObject.data,
croppedObject.step,// source image bytes buffer
_globalViewId,
_focusedOnLPRId,//id : unique interger to identify the detector to be used
_platesTypesClassifierId,//unique id to identify the platestype classifier
lpn_len,
lpn, bbox);
lprResult = lpn;
if (detected) {
ALPRObject result;
result.classId = 0;
result.className = lpn;
result.confidence = 1.0;
result.box.x = bbox.x + x1;
result.box.y = bbox.y + y1;
result.box.width = bbox.width;
result.box.height = bbox.height;
output.push_back(result);
}
}
lprResult = VectorDetectionToJsonString(output);
}
else {
cv::Rect bbox;
detected = TwoStageLPRPlatesTypeDetection(
input.cols,//width of image
input.rows,//height of image i.e. the specified dimensions of the image
input.channels(),// pixel type : 1 (8 bpp greyscale image) 3 (RGB 24 bpp image) or 4 (RGBA 32 bpp image)
input.data,
input.step,// source image bytes buffer
_globalViewId,
_focusedOnLPRId,//id : unique interger to identify the detector to be used
_platesTypesClassifierId,//unique id to identify the platestype classifier
lpn_len,
lpn, bbox);
lprResult = lpn;
if (detected) {
ALPRObject result;
result.classId = 0;
result.className = lpn;
result.confidence = 1.0;
result.box = bbox;
output.push_back(result);
}
lprResult = VectorDetectionToJsonString(output);
}
return detected;
}
catch (std::exception& e) {
this->_logger->LogFatal("ANSALPR::CheckStatus", e.what(), __FILE__, __LINE__);
lprResult = "";
return false;
};
}
// Private:
std::string ANSALPR_RT::VectorDetectionToJsonString(const std::vector<ALPRObject>& dets) {
boost::property_tree::ptree root;
boost::property_tree::ptree detectedObjects;
for (int i = 0; i < dets.size(); i++) {
boost::property_tree::ptree detectedNode;
detectedNode.put("class_id", dets[i].classId);
detectedNode.put("class_name", dets[i].className);
detectedNode.put("prob", dets[i].confidence);
detectedNode.put("x", dets[i].box.x);
detectedNode.put("y", dets[i].box.y);
detectedNode.put("width", dets[i].box.width);
detectedNode.put("height", dets[i].box.height);
detectedNode.put("mask", "");//Todo: convert masks to mask with comma seperated dets[i].mask);
detectedNode.put("extra_info", "");
// we might add masks into this using comma seperated string
detectedObjects.push_back(std::make_pair("", detectedNode));
}
root.add_child("results", detectedObjects);
std::ostringstream stream;
boost::property_tree::write_json(stream, root, false);
std::string trackingResult = stream.str();
return trackingResult;
}
void ANSALPR_RT::CheckStatus(OrtStatus* status) {
try {
if (status != nullptr) {
const char* msg = g_ort->GetErrorMessage(status);
this->_logger->LogError("ANSALPR::CheckStatus", msg, __FILE__, __LINE__);
g_ort->ReleaseStatus(status);
exit(1);
}
}
catch (std::exception& e) {
this->_logger->LogFatal("ANSALPR::CheckStatus", e.what(), __FILE__, __LINE__);
}
}
std::list<Yolov5_alpr_onxx_detector*>::const_iterator ANSALPR_RT::GetDetector(unsigned int id, const std::list<Yolov5_alpr_onxx_detector*>& detectors,
const std::list<unsigned int>& detectors_ids)
{
assert(detectors_ids.size() == detectors.size());
std::list<Yolov5_alpr_onxx_detector*>::const_iterator it(detectors.begin());
std::list<unsigned int>::const_iterator it_id(detectors_ids.begin());
while (it != detectors.end() && it_id != detectors_ids.end()) {
if (*it_id == id) {
return it;
}
else {
it_id++;
it++;
}
}
return detectors.end();
}
std::list<Plates_types_classifier*>::const_iterator ANSALPR_RT::GetPlatesTypesClassifier(unsigned int id, const std::list<Plates_types_classifier*>& plates_types_classifiers,
const std::list<unsigned int>& plates_types_classifiers_ids) {
assert(plates_types_classifiers_ids.size() == plates_types_classifiers.size());
std::list<Plates_types_classifier*>::const_iterator it(plates_types_classifiers.begin());
std::list<unsigned int>::const_iterator it_id(plates_types_classifiers_ids.begin());
while (it != plates_types_classifiers.end() && it_id != plates_types_classifiers_ids.end()) {
if (*it_id == id) {
return it;
}
else {
it_id++;
it++;
}
}
return plates_types_classifiers.end();
}
unsigned int ANSALPR_RT::GetNewId(const std::list<unsigned int>& detectors_ids) {
if (detectors_ids.size()) {
auto result = std::minmax_element(detectors_ids.begin(), detectors_ids.end());
return *result.second + 1;
}
else return 1;
}
bool ANSALPR_RT::CloseDetector(unsigned int id, std::list<Ort::Env*>& _envs, std::list<Ort::SessionOptions*>& _lsessionOptions, std::list<Yolov5_alpr_onxx_detector*>& _detectors,
std::list<unsigned int>& _detectors_ids) {
assert(_detectors_ids.size() == _detectors.size()
&& _detectors_ids.size() == _envs.size()
&& _detectors_ids.size() == _lsessionOptions.size());
std::list<Yolov5_alpr_onxx_detector*>::iterator it(_detectors.begin());
std::list<unsigned int>::iterator it_id(_detectors_ids.begin());
std::list<Ort::SessionOptions*>::iterator it_sessionOptions(_lsessionOptions.begin());
std::list<Ort::Env*>::iterator it_envs(_envs.begin());
while (it != _detectors.end() && it_id != _detectors_ids.end()
&& it_envs != _envs.end() && it_sessionOptions != _lsessionOptions.end()
) {
if (*it_id == id) {
if (*it != nullptr) delete* it;
if (*it_sessionOptions != nullptr) delete* it_sessionOptions;
if (*it_envs != nullptr) delete* it_envs;
it_envs = _envs.erase(it_envs);
it_sessionOptions = _lsessionOptions.erase(it_sessionOptions);
it = _detectors.erase(it);
it_id = _detectors_ids.erase(it_id);
return true;
}
else {
it_sessionOptions++;
it_envs++;
it_id++;
it++;
}
}
return false;
}
bool ANSALPR_RT::CloseDetector(unsigned int id, std::list<Ort::Env*>& _envs, std::list<Ort::SessionOptions*>& _lsessionOptions, std::list<Plates_types_classifier*>& _detectors,
std::list<unsigned int>& _detectors_ids) {
assert(_detectors_ids.size() == _detectors.size()
&& _detectors_ids.size() == _envs.size()
&& _detectors_ids.size() == _lsessionOptions.size());
std::list<Plates_types_classifier*>::iterator it(_detectors.begin());
std::list<unsigned int>::iterator it_id(_detectors_ids.begin());
std::list<Ort::SessionOptions*>::iterator it_sessionOptions(_lsessionOptions.begin());
std::list<Ort::Env*>::iterator it_envs(_envs.begin());
while (it != _detectors.end() && it_id != _detectors_ids.end()
&& it_envs != _envs.end() && it_sessionOptions != _lsessionOptions.end()
) {
if (*it_id == id) {
if (*it != nullptr) delete* it;
if (*it_sessionOptions != nullptr) delete* it_sessionOptions;
if (*it_envs != nullptr) delete* it_envs;
it_envs = _envs.erase(it_envs);
it_sessionOptions = _lsessionOptions.erase(it_sessionOptions);
it = _detectors.erase(it);
it_id = _detectors_ids.erase(it_id);
return true;
}
else {
it_sessionOptions++;
it_envs++;
it_id++;
it++;
}
}
return false;
}
// Private interface
unsigned int ANSALPR_RT::InitYoloDetector(unsigned int len, const char* model_file)
{
assert(detectors_ids.size() == detectors.size());
const std::string model_filename(model_file, len);
if (!model_filename.size() || !std::filesystem::exists(model_filename)
|| !std::filesystem::is_regular_file(model_filename)
)
{
this->_logger->LogError("ANSALPR::InitYoloDetector. Model file is not regular file.", model_filename, __FILE__, __LINE__);
return 0;
}
//step 2 declare an onnx runtime environment
std::string instanceName{ "image-classification-inference" };
// https://github.com/microsoft/onnxruntime/blob/rel-1.6.0/include/onnxruntime/core/session/onnxruntime_c_api.h#L123
Ort::Env* penv = new Ort::Env(OrtLoggingLevel::ORT_LOGGING_LEVEL_WARNING, instanceName.c_str());
if (penv != nullptr) {
//step 3 declare options for the runtime environment
Ort::SessionOptions* psessionOptions = new Ort::SessionOptions();
if (psessionOptions != nullptr) {
psessionOptions->SetIntraOpNumThreads(1);
// Sets graph optimization level
// Available levels are
// ORT_DISABLE_ALL -> To disable all optimizations
// ORT_ENABLE_BASIC -> To enable basic optimizations (Such as redundant node
// removals) ORT_ENABLE_EXTENDED -> To enable extended optimizations
// (Includes level 1 + more complex optimizations like node fusions)
// ORT_ENABLE_ALL -> To Enable All possible optimizations
psessionOptions->SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_EXTENDED);
#ifdef ANSLPR_USE_CUDA
// Optionally add more execution providers via session_options
// E.g. for CUDA include cuda_provider_factory.h and uncomment the following line:
// nullptr for Status* indicates success
OrtStatus* status = OrtSessionOptionsAppendExecutionProvider_CUDA(*psessionOptions, 0);
//or status =nullptr; //if you don t have CUDA
if (status == nullptr) {
#endif //ANSLPR_USE_CUDA
Yolov5_alpr_onxx_detector* onnx_net = nullptr;
#ifdef _WIN32
//step 4 declare an onnx session (ie model), by giving references to the runtime environment, session options and file path to the model
std::wstring widestr = std::wstring(model_filename.begin(), model_filename.end());
onnx_net = new Yolov5_alpr_onxx_detector(*penv, widestr.c_str(), *psessionOptions);
#else
onnx_net = new Yolov5_alpr_onxx_detector(*penv, model_filename.c_str(), *psessionOptions);
#endif
if (onnx_net != nullptr && penv != nullptr && psessionOptions != nullptr) {
std::unique_lock<std::mutex> lck(mtxMutex, std::defer_lock);
lck.lock();
detectors_envs.push_back(penv);
l_detectors_sessionOptions.push_back(psessionOptions);
detectors.push_back(onnx_net);
unsigned int id = GetNewId(detectors_ids);
detectors_ids.push_back(id);
lck.unlock();
return id;
}
else {
this->_logger->LogError("ANSALPR::InitYoloDetector. Error while creating onnxruntime session with file", model_filename.c_str(), __FILE__, __LINE__);
return 0;
}
#ifdef ANSLPR_USE_CUDA
}
else {
CheckStatus(status);
this->_logger->LogError("ANSALPR::InitYoloDetector.", "Cuda error", __FILE__, __LINE__);
return 0;
}
#endif //ANSLPR_USE_CUDA
}
else {
this->_logger->LogError("ANSALPR::InitYoloDetector.", "Error while creating SessionOptions", __FILE__, __LINE__);
return 0;
}
}
else {
this->_logger->LogError("ANSALPR::InitYoloDetector.", "Error while creating while creating session environment (Ort::Env)", __FILE__, __LINE__);
return 0;
}
}
unsigned int ANSALPR_RT::InitPlatesClassifer(unsigned int len_models_filename, const char* model_file, unsigned int len_labels_filename, const char* labels_file)
{
assert(plates_types_classifier_ids.size() == plates_types_classifiers.size());
const std::string model_filename(model_file, len_models_filename);
const std::string labels_filename(labels_file, len_labels_filename);
if (!model_filename.size() || !std::filesystem::exists(model_filename)
|| !std::filesystem::is_regular_file(model_filename)
|| !labels_filename.size() || !std::filesystem::exists(labels_filename)
|| !std::filesystem::is_regular_file(labels_filename)
)
{
this->_logger->LogDebug("ANSALPR::InitPlatesClassifer. Model file is not regular file.", model_filename, __FILE__, __LINE__);
return 0;
}
//step 2 declare an onnx runtime environment
std::string instanceName{ "image-classification-inference" };
// https://github.com/microsoft/onnxruntime/blob/rel-1.6.0/include/onnxruntime/core/session/onnxruntime_c_api.h#L123
Ort::Env* penv = new Ort::Env(OrtLoggingLevel::ORT_LOGGING_LEVEL_WARNING, instanceName.c_str());
if (penv != nullptr) {
//step 3 declare options for the runtime environment
Ort::SessionOptions* psessionOptions = new Ort::SessionOptions();
if (psessionOptions != nullptr) {
psessionOptions->SetIntraOpNumThreads(1);
// Sets graph optimization level
// Available levels are
// ORT_DISABLE_ALL -> To disable all optimizations
// ORT_ENABLE_BASIC -> To enable basic optimizations (Such as redundant node
// removals) ORT_ENABLE_EXTENDED -> To enable extended optimizations
// (Includes level 1 + more complex optimizations like node fusions)
// ORT_ENABLE_ALL -> To Enable All possible optimizations
psessionOptions->SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_EXTENDED);
#ifdef ANSLPR_USE_CUDA
// Optionally add more execution providers via session_options
// E.g. for CUDA include cuda_provider_factory.h and uncomment the following line:
// nullptr for Status* indicates success
OrtStatus* status = OrtSessionOptionsAppendExecutionProvider_CUDA(*psessionOptions, 0);
//or status =nullptr; //if you don t have CUDA
if (status == nullptr) {
#endif //ANSLPR_USE_CUDA
Plates_types_classifier* onnx_net = nullptr;
#ifdef _WIN32
//step 4 declare an onnx session (ie model), by giving references to the runtime environment, session options and file path to the model
std::wstring widestr = std::wstring(model_filename.begin(), model_filename.end());
onnx_net = new Plates_types_classifier(*penv, widestr.c_str(), *psessionOptions, labels_filename);
#else
onnx_net = new Plates_types_classifier(*penv, model_filename.c_str(), *psessionOptions, labels_filename);
#endif
if (onnx_net != nullptr && penv != nullptr && psessionOptions != nullptr) {
std::unique_lock<std::mutex> lck(mtxMutex, std::defer_lock);
lck.lock();
plates_types_envs.push_back(penv);
l_plates_types_classifier_sessionOptions.push_back(psessionOptions);
plates_types_classifiers.push_back(onnx_net);
unsigned int id = GetNewId(plates_types_classifier_ids);
plates_types_classifier_ids.push_back(id);
lck.unlock();
return id;
}
else {
this->_logger->LogError("ANSALPR::InitPlatesClassifer. Error while creating onnxruntime session with file.", model_filename, __FILE__, __LINE__);
return 0;
}
#ifdef ANSLPR_USE_CUDA
}
else {
CheckStatus(status);
this->_logger->LogError("ANSALPR::InitPlatesClassifer.", "Cuda error", __FILE__, __LINE__);
return 0;
}
#endif //ANSLPR_USE_CUDA
}
else {
this->_logger->LogError("ANSALPR::InitPlatesClassifer.", "Error while creating SessionOptions", __FILE__, __LINE__);
return 0;
}
}
else {
this->_logger->LogError("ANSALPR::InitPlatesClassifer.", "Error while creating session environment (Ort::Env)", __FILE__, __LINE__);
return 0;
}
}
bool ANSALPR_RT::TwoStageALPR(const int width,//width of image
const int height,//height of image i.e. the specified dimensions of the image
const int pixOpt,// pixel type : 1 (8 bpp greyscale image) 3 (RGB 24 bpp image) or 4 (RGBA 32 bpp image)
void* pbData, unsigned int step,// source image bytes buffer
unsigned int id_global_view,
unsigned int id_focused_on_lp,
unsigned int lpn_len,
char* lpn, cv::Rect& bbox)
{
if ((pixOpt != 1) && (pixOpt != 3) && (pixOpt != 4) || height <= 0 || width <= 0 || pbData == nullptr) {
this->_logger->LogError("ANSALPR::TwoStageLPR.", "Condition on image (pixOpt != 1) && (pixOpt != 3) && (pixOpt != 4) || height <= 0 || width <= 0 || pbData == nullptr not met", __FILE__, __LINE__);
return false;
}
else {
cv::Mat destMat;
if (pixOpt == 1)
{
destMat = cv::Mat(height, width, CV_8UC1, pbData, step);
}
if (pixOpt == 3)
{
destMat = cv::Mat(height, width, CV_8UC3, pbData, step);
}
if (pixOpt == 4)
{
destMat = cv::Mat(height, width, CV_8UC4, pbData, step);
}
std::list<Yolov5_alpr_onxx_detector*>::const_iterator it_global_view = GetDetector(id_global_view, detectors, detectors_ids);
if (it_global_view != detectors.end()) {
std::list<Yolov5_alpr_onxx_detector*>::const_iterator it_focused_on_lp = GetDetector(id_focused_on_lp, detectors, detectors_ids);
std::string lpn_str;
std::list<cv::Rect> ROIs;
std::unique_lock<std::mutex> lck(mtxMutex, std::defer_lock);
if (it_focused_on_lp != detectors.end()) {
lck.lock();
//for normal plates
ROIs = (*it_global_view)->TwoStage_LPR(*(*it_focused_on_lp), destMat, lpn_str);
//for small plates
lck.unlock();
}
else {
this->_logger->LogError("ANSALPR::TwoStageLPR.", "id_focused_on_lp does not point to a valid detector", __FILE__, __LINE__);
lck.lock();
//for normal plates
ROIs = (*it_global_view)->TwoStage_LPR(*(*it_global_view), destMat, lpn_str);
//for small plates
lck.unlock();
}
std::string::const_iterator it_lpn(lpn_str.begin());
int i = 0;
while (it_lpn != lpn_str.end() && i < lpn_len - 1) {
lpn[i] = *it_lpn;
i++; it_lpn++;
}
while (i < lpn_len) {
lpn[i] = '\0';
i++;
}
bbox = GetGlobalROI(ROIs);
return (lpn_str.length() > 0);
}
else {
this->_logger->LogError("ANSALPR::TwoStageLPR.", "id_global_view does not point to a valid detector", __FILE__, __LINE__);
return false;
}
}
}
cv::Rect ANSALPR_RT::GetGlobalROI(std::list<cv::Rect> ROIs) {
cv::Rect result;
if (ROIs.size() > 0) result = ROIs.front();
return result;
/*std::list<cv::Rect>::iterator it;
cv::Rect v = ROIs.front();
int x_min, y_min, w_min, h_min;
int x_max, y_max, w_max, h_max;
x_min = v.x;
x_max = v.x;
y_min = v.y;
y_max = v.y;
w_min = v.width;
w_max = v.width;
h_min = v.height;
h_max = v.height;
for (it = ROIs.begin(); it != ROIs.end(); ++it) {
if (x_min > it->x) { x_min = it->x; }
if (x_max < it->x) { x_max = it->x; }
if (y_min > it->y) { y_min = it->y; }
if (y_max < it->y) { y_max = it->y; }
if (w_min > it->width) { w_min = it->width; }
if (w_max < it->width) { w_max = it->width; }
if (h_min > it->height) { w_min = it->height; }
if (h_max < it->height) { w_max = it->height; }
}
v.x = x_min - 2;
v.y = y_min - 2;
v.width = x_max + w_max - x_min + 2;
v.height = h_max + 2;
return v;*/
}
bool ANSALPR_RT::TwoStageLPRPlatesTypeDetection(const int width,//width of image
const int height,//height of image i.e. the specified dimensions of the image
const int pixOpt,// pixel type : 1 (8 bpp greyscale image) 3 (RGB 24 bpp image) or 4 (RGBA 32 bpp image)
void* pbData, unsigned int step,// source image bytes buffer
unsigned int id_global_view,
unsigned int id_focused_on_lp,
unsigned int id_plates_types_classifier,
unsigned int lpn_len, char* lpn, cv::Rect& bbox)
{
if ((pixOpt != 1) && (pixOpt != 3) && (pixOpt != 4) || height <= 0 || width <= 0 || pbData == nullptr) {
this->_logger->LogError("ANSALPR::TwoStageLPRPlatesTypeDetection.", "Condition on image (pixOpt != 1) && (pixOpt != 3) && (pixOpt != 4) || height <= 0 || width <= 0 || pbData == nullptr not met", __FILE__, __LINE__);
return false;
}
else {
cv::Mat destMat;
if (pixOpt == 1)
{
destMat = cv::Mat(height, width, CV_8UC1, pbData, step);
}
if (pixOpt == 3)
{
destMat = cv::Mat(height, width, CV_8UC3, pbData, step);
}
if (pixOpt == 4)
{
destMat = cv::Mat(height, width, CV_8UC4, pbData, step);
}
std::list<Plates_types_classifier*>::const_iterator it_plates_types_classifier = GetPlatesTypesClassifier(id_plates_types_classifier,
plates_types_classifiers, plates_types_classifier_ids);
if (it_plates_types_classifier != plates_types_classifiers.end()) {
std::list<Yolov5_alpr_onxx_detector*>::const_iterator it_global_view = GetDetector(id_global_view, detectors, detectors_ids);
if (it_global_view != detectors.end()) {
std::list<Yolov5_alpr_onxx_detector*>::const_iterator it_focused_on_lp = GetDetector(id_focused_on_lp, detectors, detectors_ids);
std::string lpn_str;
std::list<cv::Rect> ROIs;
std::unique_lock<std::mutex> lck(mtxMutex, std::defer_lock);
if (it_focused_on_lp != detectors.end()) {
lck.lock();
//for normal plates
ROIs = (*it_global_view)->TwoStageLPR(*(*it_focused_on_lp), *(*it_plates_types_classifier), destMat, lpn_str);
//for small plates
lck.unlock();
}
else {
this->_logger->LogError("ANSALPR::TwoStageLPRPlatesTypeDetection.", "id_focused_on_lp does not point to a valid detector", __FILE__, __LINE__);
lck.lock();
//for normal plates
ROIs = (*it_global_view)->TwoStageLPR(*(*it_global_view), *(*it_plates_types_classifier), destMat, lpn_str);
//for small plates
lck.unlock();
}
std::string::const_iterator it_lpn(lpn_str.begin());
int i = 0;
while (it_lpn != lpn_str.end() && i < lpn_len - 1) {
lpn[i] = *it_lpn;
i++; it_lpn++;
}
while (i < lpn_len) {
lpn[i] = '\0';
i++;
}
bbox = GetGlobalROI(ROIs);
return (lpn_str.length() > 0);
}
else {
this->_logger->LogError("ANSALPR::TwoStageLPRPlatesTypeDetection.", "id_global_view does not point to a valid detector", __FILE__, __LINE__);
return false;
}
}
else {
this->_logger->LogError("ANSALPR::TwoStageLPRPlatesTypeDetection.", "id_plates_types_classifier does not point to a valid detector", __FILE__, __LINE__);
return TwoStageALPR(width,//width of image
height,//height of image i.e. the specified dimensions of the image
pixOpt,// pixel type : 1 (8 bpp greyscale image) 3 (RGB 24 bpp image) or 4 (RGBA 32 bpp image)
pbData, step,// source image bytes buffer
id_global_view, id_focused_on_lp, lpn_len, lpn, bbox);
}
}
}
bool ANSALPR_RT::CloseDetector(unsigned int id)
{
assert(detectors_ids.size() == detectors.size());
std::unique_lock<std::mutex> lck(mtxMutex, std::defer_lock);
lck.lock();
bool session_closed = CloseDetector(id, detectors_envs, l_detectors_sessionOptions, detectors, detectors_ids);
lck.unlock();
return session_closed;
}
bool ANSALPR_RT::ClosePlatesTypesClassifier(unsigned int id)
{
assert(plates_types_classifier_ids.size() == plates_types_classifiers.size());
std::unique_lock<std::mutex> lck(mtxMutex, std::defer_lock);
lck.lock();
bool session_closed = CloseDetector(id, plates_types_envs, l_plates_types_classifier_sessionOptions, plates_types_classifiers, plates_types_classifier_ids);
lck.unlock();
return session_closed;
}
}