Fix ALPR Batch and memory leak

This commit is contained in:
2026-04-15 09:23:05 +10:00
parent 7778f8c214
commit b05c49ad93
9 changed files with 686 additions and 83 deletions

View File

@@ -85,6 +85,19 @@ public:
OCRHandleGuard& operator=(const OCRHandleGuard&) = delete;
};
// RAII guard — sets the per-thread current GPU frame pointer and always
// clears it on scope exit, even if the wrapped inference call throws.
// Without this, a throwing RunInference leaves tl_currentGpuFrame pointing
// at a GpuFrameData that may be freed before the next call on this thread,
// causing use-after-free or stale NV12 data on subsequent frames.
class GpuFrameScope {
public:
explicit GpuFrameScope(GpuFrameData* f) { tl_currentGpuFrame() = f; }
~GpuFrameScope() { tl_currentGpuFrame() = nullptr; }
GpuFrameScope(const GpuFrameScope&) = delete;
GpuFrameScope& operator=(const GpuFrameScope&) = delete;
};
BOOL APIENTRY DllMain(HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
@@ -265,13 +278,40 @@ extern "C" ANSOCR_API int CreateANSOCRHandleEx(ANSCENTER::ANSOCRBase** Handle,
if (useDilation == 1)modelConfig.useDilation = true;
modelConfig.limitSideLen = limitSideLen;
int result = (*Handle)->Initialize(licenseKey, modelConfig, modelFilePath, modelFileZipPassword, engineMode);
if (!result) {
// Initialize failed — tear down the engine we just registered so
// the caller, who sees a 0 return and (typically) does not call
// ReleaseANSOCRHandle, does not leak the engine + registry entry.
if (UnregisterOCRHandle(*Handle)) {
try { (*Handle)->Destroy(); } catch (...) {}
delete *Handle;
}
*Handle = nullptr;
return 0;
}
return result;
}
}
catch (std::exception& e) {
// Partially-constructed engine may already be registered — unwind it
// so the leak does not accumulate across repeated failed Create calls.
if (Handle && *Handle) {
if (UnregisterOCRHandle(*Handle)) {
try { (*Handle)->Destroy(); } catch (...) {}
delete *Handle;
}
*Handle = nullptr;
}
return 0;
}
catch (...) {
if (Handle && *Handle) {
if (UnregisterOCRHandle(*Handle)) {
try { (*Handle)->Destroy(); } catch (...) {}
delete *Handle;
}
*Handle = nullptr;
}
return 0;
}
}
@@ -804,10 +844,11 @@ extern "C" ANSOCR_API int RunInferenceComplete_LV(
if (originalWidth == 0 || originalHeight == 0) return -2;
tl_currentGpuFrame() = gpuFrame;
std::vector<ANSCENTER::OCRObject> outputs = engine->RunInference(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
std::vector<ANSCENTER::OCRObject> outputs;
{
GpuFrameScope _gfs(gpuFrame);
outputs = engine->RunInference(localImage, cameraId);
}
bool getJpeg = (getJpegString == 1);
std::string stImage;
@@ -897,36 +938,37 @@ extern "C" ANSOCR_API int RunInferencesComplete_LV(ANSCENTER::ANSOCRBase** Handl
const double scaleFactor = (maxImageSize > 0) ? static_cast<double>(originalWidth) / maxImageSize : 1.0;
tl_currentGpuFrame() = gpuFrame;
if (bBox.empty()) {
objectDetectionResults = engine->RunInference(localImage, cameraId);
}
else {
for (const auto& rect : bBox) {
cv::Rect scaledRect;
scaledRect.x = static_cast<int>(rect.x * scaleFactor);
scaledRect.y = static_cast<int>(rect.y * scaleFactor);
scaledRect.width = static_cast<int>(rect.width * scaleFactor);
scaledRect.height = static_cast<int>(rect.height * scaleFactor);
{
GpuFrameScope _gfs(gpuFrame);
if (bBox.empty()) {
objectDetectionResults = engine->RunInference(localImage, cameraId);
}
else {
for (const auto& rect : bBox) {
cv::Rect scaledRect;
scaledRect.x = static_cast<int>(rect.x * scaleFactor);
scaledRect.y = static_cast<int>(rect.y * scaleFactor);
scaledRect.width = static_cast<int>(rect.width * scaleFactor);
scaledRect.height = static_cast<int>(rect.height * scaleFactor);
scaledRect &= cv::Rect(0, 0, originalWidth, originalHeight);
if (scaledRect.width <= 0 || scaledRect.height <= 0)
continue;
scaledRect &= cv::Rect(0, 0, originalWidth, originalHeight);
if (scaledRect.width <= 0 || scaledRect.height <= 0)
continue;
const cv::Mat croppedImage = localImage(scaledRect);
std::vector<ANSCENTER::OCRObject> croppedDetectionResults = engine->RunInference(croppedImage, cameraId);
const cv::Mat croppedImage = localImage(scaledRect);
std::vector<ANSCENTER::OCRObject> croppedDetectionResults = engine->RunInference(croppedImage, cameraId);
for (auto& obj : croppedDetectionResults) {
obj.box.x = (obj.box.x + scaledRect.x) / scaleFactor;
obj.box.y = (obj.box.y + scaledRect.y) / scaleFactor;
obj.box.width /= scaleFactor;
obj.box.height /= scaleFactor;
for (auto& obj : croppedDetectionResults) {
obj.box.x = (obj.box.x + scaledRect.x) / scaleFactor;
obj.box.y = (obj.box.y + scaledRect.y) / scaleFactor;
obj.box.width /= scaleFactor;
obj.box.height /= scaleFactor;
objectDetectionResults.push_back(std::move(obj));
objectDetectionResults.push_back(std::move(obj));
}
}
}
}
tl_currentGpuFrame() = nullptr;
std::string stDetectionResult = ANSCENTER::ANSOCRUtility::OCRDetectionToJsonString(objectDetectionResults);
if (stDetectionResult.empty()) return 0;
@@ -1087,9 +1129,11 @@ extern "C" ANSOCR_API int RunInferenceComplete_LV_V2(
if (originalWidth == 0 || originalHeight == 0) return -2;
tl_currentGpuFrame() = gpuFrame;
std::vector<ANSCENTER::OCRObject> outputs = engine->RunInference(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
std::vector<ANSCENTER::OCRObject> outputs;
{
GpuFrameScope _gfs(gpuFrame);
outputs = engine->RunInference(localImage, cameraId);
}
bool getJpeg = (getJpegString == 1);
std::string stImage;
@@ -1181,36 +1225,37 @@ extern "C" ANSOCR_API int RunInferencesComplete_LV_V2(uint64_t handleVal, cv::Ma
const double scaleFactor = (maxImageSize > 0) ? static_cast<double>(originalWidth) / maxImageSize : 1.0;
tl_currentGpuFrame() = gpuFrame;
if (bBox.empty()) {
objectDetectionResults = engine->RunInference(localImage, cameraId);
}
else {
for (const auto& rect : bBox) {
cv::Rect scaledRect;
scaledRect.x = static_cast<int>(rect.x * scaleFactor);
scaledRect.y = static_cast<int>(rect.y * scaleFactor);
scaledRect.width = static_cast<int>(rect.width * scaleFactor);
scaledRect.height = static_cast<int>(rect.height * scaleFactor);
{
GpuFrameScope _gfs(gpuFrame);
if (bBox.empty()) {
objectDetectionResults = engine->RunInference(localImage, cameraId);
}
else {
for (const auto& rect : bBox) {
cv::Rect scaledRect;
scaledRect.x = static_cast<int>(rect.x * scaleFactor);
scaledRect.y = static_cast<int>(rect.y * scaleFactor);
scaledRect.width = static_cast<int>(rect.width * scaleFactor);
scaledRect.height = static_cast<int>(rect.height * scaleFactor);
scaledRect &= cv::Rect(0, 0, originalWidth, originalHeight);
if (scaledRect.width <= 0 || scaledRect.height <= 0)
continue;
scaledRect &= cv::Rect(0, 0, originalWidth, originalHeight);
if (scaledRect.width <= 0 || scaledRect.height <= 0)
continue;
const cv::Mat croppedImage = localImage(scaledRect);
std::vector<ANSCENTER::OCRObject> croppedDetectionResults = engine->RunInference(croppedImage, cameraId);
const cv::Mat croppedImage = localImage(scaledRect);
std::vector<ANSCENTER::OCRObject> croppedDetectionResults = engine->RunInference(croppedImage, cameraId);
for (auto& obj : croppedDetectionResults) {
obj.box.x = (obj.box.x + scaledRect.x) / scaleFactor;
obj.box.y = (obj.box.y + scaledRect.y) / scaleFactor;
obj.box.width /= scaleFactor;
obj.box.height /= scaleFactor;
for (auto& obj : croppedDetectionResults) {
obj.box.x = (obj.box.x + scaledRect.x) / scaleFactor;
obj.box.y = (obj.box.y + scaledRect.y) / scaleFactor;
obj.box.width /= scaleFactor;
obj.box.height /= scaleFactor;
objectDetectionResults.push_back(std::move(obj));
objectDetectionResults.push_back(std::move(obj));
}
}
}
}
tl_currentGpuFrame() = nullptr;
std::string stDetectionResult = ANSCENTER::ANSOCRUtility::OCRDetectionToJsonString(objectDetectionResults);
if (stDetectionResult.empty()) return 0;