Fix mutex lock issues

This commit is contained in:
2026-04-13 19:48:32 +10:00
parent 56a8f09adf
commit 844d7396b2
30 changed files with 445 additions and 575 deletions

View File

@@ -58,6 +58,7 @@ namespace ANSCENTER
}
bool TENSORRTCL::LoadModel(const std::string& modelZipFilePath, const std::string& modelZipPassword) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
ModelLoadingGuard mlg(_modelLoading);
try {
bool result = ANSODBase::LoadModel(modelZipFilePath, modelZipPassword);
if (!result) return false;
@@ -152,6 +153,7 @@ namespace ANSCENTER
}
bool TENSORRTCL::LoadModelFromFolder(std::string licenseKey, ModelConfig modelConfig, std::string modelName, std::string className, const std::string& modelFolder, std::string& labelMap) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
ModelLoadingGuard mlg(_modelLoading);
try {
bool result = ANSODBase::LoadModelFromFolder(licenseKey, modelConfig, modelName, className, modelFolder, labelMap);
if (!result) return false;
@@ -257,6 +259,7 @@ namespace ANSCENTER
const bool engineAlreadyLoaded = _modelLoadValid && _isInitialized && m_trtEngine != nullptr;
_modelLoadValid = false;
std::lock_guard<std::recursive_mutex> lock(_mutex);
ModelLoadingGuard mlg(_modelLoading);
try {
bool result = ANSODBase::Initialize(licenseKey, modelConfig, modelZipFilePath, modelZipPassword, labelMap);
if (!result) return false;
@@ -360,33 +363,7 @@ namespace ANSCENTER
std::vector<Object> TENSORRTCL::RunInference(const cv::Mat& inputImgBGR,const std::string& camera_id)
{
// Validate state under brief lock
{
std::lock_guard<std::recursive_mutex> lock(_mutex);
if (!_modelLoadValid) {
_logger.LogError("TENSORRTCL::RunInference",
"Cannot load the TensorRT model. Please check if it exists",
__FILE__, __LINE__);
return {};
}
if (!_licenseValid) {
_logger.LogError("TENSORRTCL::RunInference",
"Runtime license is not valid or expired. Please contact ANSCENTER",
__FILE__, __LINE__);
return {};
}
if (!_isInitialized) {
_logger.LogError("TENSORRTCL::RunInference",
"Model is not initialized",
__FILE__, __LINE__);
return {};
}
if (inputImgBGR.empty() || inputImgBGR.cols < 5 || inputImgBGR.rows < 5) {
return {};
}
}
if (!PreInferenceCheck("TENSORRTCL::RunInference")) return {};
try {
return DetectObjects(inputImgBGR, camera_id);
}
@@ -397,28 +374,7 @@ namespace ANSCENTER
}
std::vector<std::vector<Object>> TENSORRTCL::RunInferencesBatch(const std::vector<cv::Mat>& inputs, const std::string& camera_id) {
// Validate state under brief lock
{
std::lock_guard<std::recursive_mutex> lock(_mutex);
if (!_modelLoadValid) {
this->_logger.LogFatal("TENSORRTCL::RunInferencesBatch",
"Cannot load the TensorRT model. Please check if it exists", __FILE__, __LINE__);
return {};
}
if (!_licenseValid) {
this->_logger.LogFatal("TENSORRTCL::RunInferencesBatch",
"Runtime license is not valid or expired. Please contact ANSCENTER", __FILE__, __LINE__);
return {};
}
if (!_isInitialized) {
this->_logger.LogFatal("TENSORRTCL::RunInferencesBatch",
"Engine not initialized", __FILE__, __LINE__);
return {};
}
if (inputs.empty()) return {};
}
if (!PreInferenceCheck("TENSORRTCL::RunInferencesBatch")) return {};
try {
return DetectObjectsBatch(inputs, camera_id);
}