Files
ANSCORE/modules/ANSFR/dllmain.cpp

2007 lines
86 KiB
C++
Raw Normal View History

2026-03-28 16:54:11 +11:00
// dllmain.cpp : Defines the entry point for the DLL application.
#include "pch.h"
#include "ANSFR.h"
#include "NV12PreprocessHelper.h" // tl_currentGpuFrame()
#include "ANSGpuFrameRegistry.h" // gpu_frame_lookup(cv::Mat*)
#include <opencv2/imgcodecs.hpp>
#include "ANSOVFaceDetector.h"
#include "SCRFDFaceDetector.h"
#include "FaceNet.h"
#include "ANSFaceRecognizer.h"
#include "ANSLibsLoader.h"
#include "engine/TRTEngineCache.h"
#include "engine/EnginePoolManager.h"
2026-03-28 16:54:11 +11:00
#include <memory>
#include <climits>
2026-03-28 16:54:11 +11:00
#include <unordered_map>
#include <condition_variable>
#include <cstdint>
#include <atomic>
// Each DLL that instantiates Engine<T> templates needs its own definition
// of g_forceNoPool (referenced by EngineBuildLoadNetwork.inl).
// ANSODEngine exports its own; ANSFR needs a local copy.
std::atomic<bool> g_forceNoPool{false};
#include <boost/uuid/uuid.hpp>
#include <boost/uuid/uuid_generators.hpp>
#include <boost/uuid/uuid_io.hpp>
// DebugView: filter on "[ANSFR]" — gated by ANSCORE_DEBUGVIEW in ANSLicense.h.
2026-03-28 16:54:11 +11:00
// Handle registry with refcount — prevents use-after-free when
// ReleaseANSRFHandle is called while inference is still running.
2026-04-19 14:47:29 +10:00
// destructionStarted: set by the first Unregister caller; blocks new Acquires
// and makes subsequent Unregister calls return false without deleting.
// Prevents double-free when Release is raced on the same handle.
struct FREntry { int refcount; bool destructionStarted; };
static std::unordered_map<ANSCENTER::ANSFacialRecognition*, FREntry>& FRHandleRegistry() {
static std::unordered_map<ANSCENTER::ANSFacialRecognition*, FREntry> s;
2026-03-28 16:54:11 +11:00
return s;
}
static std::mutex& FRHandleRegistryMutex() {
static std::mutex m;
return m;
}
static std::condition_variable& FRHandleRegistryCV() {
static std::condition_variable cv;
return cv;
}
static void RegisterFRHandle(ANSCENTER::ANSFacialRecognition* h) {
std::lock_guard<std::mutex> lk(FRHandleRegistryMutex());
2026-04-19 14:47:29 +10:00
FRHandleRegistry()[h] = { 1, false };
ANS_DBG("ANSFR","Register: handle=%p (uint=%llu) registrySize=%zu",
(void*)h, (unsigned long long)(uintptr_t)h, FRHandleRegistry().size());
2026-03-28 16:54:11 +11:00
}
static ANSCENTER::ANSFacialRecognition* AcquireFRHandle(ANSCENTER::ANSFacialRecognition* h) {
std::lock_guard<std::mutex> lk(FRHandleRegistryMutex());
auto it = FRHandleRegistry().find(h);
if (it == FRHandleRegistry().end()) {
ANS_DBG("ANSFR","Acquire FAIL: handle=%p (uint=%llu) NOT in registry. registrySize=%zu",
(void*)h, (unsigned long long)(uintptr_t)h, FRHandleRegistry().size());
size_t i = 0;
for (auto& kv : FRHandleRegistry()) {
ANS_DBG("ANSFR"," registry[%zu] = %p (uint=%llu) refcount=%d destructionStarted=%d",
i++, (void*)kv.first, (unsigned long long)(uintptr_t)kv.first,
kv.second.refcount, kv.second.destructionStarted ? 1 : 0);
}
return nullptr;
}
if (it->second.destructionStarted) {
ANS_DBG("ANSFR","Acquire FAIL: handle=%p is being destroyed (destructionStarted=true)", (void*)h);
return nullptr;
}
2026-04-19 14:47:29 +10:00
it->second.refcount++;
ANS_DBG("ANSFR","Acquire OK: handle=%p refcount=%d", (void*)h, it->second.refcount);
2026-03-28 16:54:11 +11:00
return h;
}
static bool ReleaseFRHandleRef(ANSCENTER::ANSFacialRecognition* h) {
std::lock_guard<std::mutex> lk(FRHandleRegistryMutex());
auto it = FRHandleRegistry().find(h);
if (it == FRHandleRegistry().end()) return false;
2026-04-19 14:47:29 +10:00
it->second.refcount--;
if (it->second.refcount <= 0) {
2026-03-28 16:54:11 +11:00
FRHandleRegistryCV().notify_all();
}
2026-04-19 14:47:29 +10:00
return false; // Only Unregister deletes.
2026-03-28 16:54:11 +11:00
}
static bool UnregisterFRHandle(ANSCENTER::ANSFacialRecognition* h) {
std::unique_lock<std::mutex> lk(FRHandleRegistryMutex());
auto it = FRHandleRegistry().find(h);
if (it == FRHandleRegistry().end()) {
ANS_DBG("ANSFR","Unregister: handle=%p NOT in registry (already gone)", (void*)h);
return false;
}
2026-04-19 14:47:29 +10:00
if (it->second.destructionStarted) {
ANS_DBG("ANSFR","Unregister: handle=%p already being destroyed by another thread, returning false", (void*)h);
2026-04-19 14:47:29 +10:00
return false; // Another thread already owns the delete.
}
ANS_DBG("ANSFR","Unregister: handle=%p starting (refcount before=%d)", (void*)h, it->second.refcount);
2026-04-19 14:47:29 +10:00
it->second.destructionStarted = true;
it->second.refcount--;
2026-03-28 16:54:11 +11:00
bool ok = FRHandleRegistryCV().wait_for(lk, std::chrono::seconds(30), [&]() {
auto it2 = FRHandleRegistry().find(h);
2026-04-19 14:47:29 +10:00
return it2 == FRHandleRegistry().end() || it2->second.refcount <= 0;
2026-03-28 16:54:11 +11:00
});
if (!ok) {
ANS_DBG("ANSFR","WARNING: Unregister timed out waiting for in-flight inference on handle=%p", (void*)h);
2026-03-28 16:54:11 +11:00
OutputDebugStringA("WARNING: UnregisterFRHandle timed out waiting for in-flight inference\n");
}
FRHandleRegistry().erase(h);
return true;
}
// RAII guard — ensures ReleaseFRHandleRef is always called, preventing
// refcount leaks that would cause UnregisterFRHandle to deadlock.
class FRHandleGuard {
ANSCENTER::ANSFacialRecognition* engine;
public:
explicit FRHandleGuard(ANSCENTER::ANSFacialRecognition* e) : engine(e) {}
~FRHandleGuard() { if (engine) ReleaseFRHandleRef(engine); }
ANSCENTER::ANSFacialRecognition* get() const { return engine; }
explicit operator bool() const { return engine != nullptr; }
FRHandleGuard(const FRHandleGuard&) = delete;
FRHandleGuard& operator=(const FRHandleGuard&) = delete;
};
// Determine maxSlotsPerGpu based on GPU topology:
// non-NVIDIA (AMD/Intel/CPU) → 1 (no TensorRT pool, never grows)
// 1 NVIDIA GPU → 1 (single slot, no round-robin needed)
// >1 GPU, VRAM<24GB → 1 (round-robin: 1 slot per GPU)
// >1 GPU, VRAM≥24GB → -1 (elastic: on-demand slot growth)
//
// IMPORTANT: Must be gated on CheckHardwareInformation() first — calling
// cudaGetDeviceCount/cudaSetDevice/cudaMemGetInfo on non-NVIDIA hardware
// wakes up the CUDA runtime unnecessarily and, combined with DirectML on
// AMD, has been observed to trigger amdkmdag instability. Return 1 early
// on anything that isn't a detected NVIDIA GPU so the TRT pool is never
// exercised on those machines.
static int GetPoolMaxSlotsPerGpu() {
static int s_result = INT_MIN;
static std::mutex s_mutex;
std::lock_guard<std::mutex> lk(s_mutex);
if (s_result != INT_MIN) return s_result;
const ANSCENTER::EngineType detected =
ANSCENTER::ANSLicenseHelper::CheckHardwareInformation();
if (detected != ANSCENTER::EngineType::NVIDIA_GPU) {
s_result = 1;
std::cout << "Info [FR GPU]: engineType=" << static_cast<int>(detected)
<< " — not NVIDIA, TRT pool disabled (slot=1), skipping CUDA probe"
<< std::endl;
return s_result;
}
int gpuCount = 0;
cudaGetDeviceCount(&gpuCount);
if (gpuCount <= 1) {
s_result = 1;
std::cout << "Info [FR GPU]: Single GPU — pool mode: 1 slot, no round-robin" << std::endl;
return s_result;
}
constexpr size_t kLargeVramBytes = 24ULL * 1024 * 1024 * 1024; // 24 GB
size_t totalMem = 0, freeMem = 0;
cudaSetDevice(0);
cudaMemGetInfo(&freeMem, &totalMem);
if (totalMem >= kLargeVramBytes) {
s_result = -1;
std::cout << "Info [FR GPU]: " << gpuCount << " GPUs, VRAM >= 24 GB — pool mode: elastic" << std::endl;
} else {
s_result = 1;
std::cout << "Info [FR GPU]: " << gpuCount << " GPUs, VRAM < 24 GB — pool mode: round-robin" << std::endl;
}
return s_result;
}
2026-03-28 16:54:11 +11:00
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
) noexcept
{
switch (ul_reason_for_call)
{
case DLL_PROCESS_ATTACH:
// Pin the DLL so it is never unmapped while idle-timer threads are
// still running. During LabVIEW shutdown the CLR/COM teardown can
// unload DLLs before all threads exit → crash at unmapped code.
{
HMODULE hSelf = nullptr;
GetModuleHandleExW(
GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS |
GET_MODULE_HANDLE_EX_FLAG_PIN,
reinterpret_cast<LPCWSTR>(&DllMain),
&hSelf);
}
break;
case DLL_THREAD_ATTACH:
case DLL_THREAD_DETACH:
break;
case DLL_PROCESS_DETACH:
// ExitProcess: OS killed worker threads, CUDA context is dead.
// Set flag so Engine/Pool destructors skip CUDA cleanup.
if (lpReserved != nullptr) {
g_processExiting().store(true, std::memory_order_relaxed);
break;
}
// Dynamic FreeLibrary — threads are still alive, safe to clean up.
2026-03-28 16:54:11 +11:00
try {
std::vector<ANSCENTER::ANSFacialRecognition*> leakedHandles;
{
std::lock_guard<std::mutex> lk(FRHandleRegistryMutex());
for (auto& [h, _] : FRHandleRegistry())
leakedHandles.push_back(h);
FRHandleRegistry().clear();
}
for (auto* h : leakedHandles) {
try { h->Destroy(); delete h; } catch (...) {}
}
try { EnginePoolManager<float>::instance().clearAll(); } catch (...) {}
try { TRTEngineCache::instance().clearAll(); } catch (...) {}
2026-03-28 16:54:11 +11:00
} catch (...) {}
break;
}
return TRUE;
}
// Helper: safely copy a std::string into a LabVIEW LStrHandle.
// Returns 1 on success, 0 on failure (empty string or allocation error).
static int CopyToLStrHandle(LStrHandle handle, const std::string& str) noexcept {
if (str.empty() || handle == nullptr) return 0;
const auto size = static_cast<int32>(str.length());
MgErr error = DSSetHandleSize(handle, sizeof(int32) + size * sizeof(uChar));
if (error != noErr) return 0;
(*handle)->cnt = size;
memcpy((*handle)->str, str.c_str(), static_cast<size_t>(size));
return 1;
}
extern "C" ANSFR_API int CreateANSRFHandle(ANSCENTER::ANSFacialRecognition** Handle,
const char* licenseKey,
const char* configFilePath,
const char* databaseFilePath,
const char* recogniserFilePath,
const char* detectorFilePath,
int precisionType,
float knownPersonThreshold,
int enableAgeGender,
int enableFaceEmotions,
int enableHeadPose,
int minFaceSize,
float faceDetectorThreshold,
int enableFaceLiveness,
2026-03-28 16:54:11 +11:00
int enableAntiSpoofing)
{
ANS_DBG("ANSFR","CreateANSRFHandle called: HandlePtr=%p, *Handle(in)=%p, precision=%d, knownThr=%f, ageGender=%d, emotions=%d, headPose=%d, minFace=%d, faceThr=%f, liveness=%d, antiSpoof=%d, configFile=%s, dbFile=%s",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), precisionType, knownPersonThreshold,
enableAgeGender, enableFaceEmotions, enableHeadPose, minFaceSize, faceDetectorThreshold,
enableFaceLiveness, enableAntiSpoofing,
configFilePath ? configFilePath : "(null)", databaseFilePath ? databaseFilePath : "(null)");
2026-03-28 16:54:11 +11:00
try {
// Ensure all shared DLLs (OpenCV, OpenVINO, TRT, ORT) are pre-loaded
ANSCENTER::ANSLibsLoader::Initialize();
if (!Handle || !licenseKey || !configFilePath || !databaseFilePath || !recogniserFilePath) {
ANS_DBG("ANSFR","CreateANSRFHandle FAIL: required pointer is null");
return -1;
}
2026-03-28 16:54:11 +11:00
// Log the detected vendor path so field triage between NVIDIA / AMD /
// Intel / CPU machines is trivial from the debug log. Mirrors the
// vendorTag logging already in ANSLPR_OD::LoadEngine and ANSOCR
// CreateANSOCRHandleEx.
{
ANSCENTER::EngineType detected =
ANSCENTER::ANSLicenseHelper::CheckHardwareInformation();
const char* vendorTag =
detected == ANSCENTER::EngineType::NVIDIA_GPU ? "NVIDIA_GPU (TensorRT + CUDA preproc, SCRFD face detector)" :
detected == ANSCENTER::EngineType::AMD_GPU ? "AMD_GPU (ONNX Runtime / DirectML, OV face detector, NV12/CUDA DISABLED)" :
detected == ANSCENTER::EngineType::OPENVINO_GPU ? "OPENVINO_GPU (OpenVINO, OV face detector, NV12/CUDA DISABLED)" :
"CPU (ONNX Runtime / OpenVINO CPU, NV12/CUDA DISABLED)";
char buf[224];
snprintf(buf, sizeof(buf),
"[ANSFR] CreateANSRFHandle: detected engineType=%d [%s]\n",
static_cast<int>(detected), vendorTag);
OutputDebugStringA(buf);
std::cout << buf;
}
// Pure constructor: ignore *Handle(in). LabVIEW's CLF Node marshalling
// reuses the same temp buffer per call site, so *Handle(in) often holds
// leftover bytes from the previous Create's output even when the actual
// LabVIEW wire is a different, freshly-allocated instance. Inspecting
// *Handle(in) and destroying what we "see" tears down legitimate
// parallel instances. (Same reasoning as CreateANSAWSHandle.)
// Trade-off: a true double-Create on the same wire leaks the prior
// handle -- caller's bug; the alternative is far worse.
*Handle = nullptr;
2026-03-28 16:54:11 +11:00
// std::unique_ptr ensures automatic cleanup if Initialize() throws
auto ptr = std::make_unique<ANSCENTER::ANSFacialRecognition>();
ANS_DBG("ANSFR","CreateANSRFHandle: allocated handle=%p (uint=%llu), calling Initialize...",
(void*)ptr.get(), (unsigned long long)(uintptr_t)ptr.get());
2026-03-28 16:54:11 +11:00
const bool _enableFaceLiveness = (enableFaceLiveness == 1);
const bool _enableAntiSpoofing = (enableAntiSpoofing == 1);
ptr->SetMaxSlotsPerGpu(GetPoolMaxSlotsPerGpu());
2026-03-28 16:54:11 +11:00
int result = ptr->Initialize(licenseKey,
configFilePath,
databaseFilePath,
recogniserFilePath,
detectorFilePath ? detectorFilePath : "",
precisionType,
knownPersonThreshold,
enableAgeGender,
enableFaceEmotions,
enableHeadPose,
minFaceSize,
faceDetectorThreshold,
_enableFaceLiveness,
_enableAntiSpoofing);
if (result < 0) {
ANS_DBG("ANSFR","CreateANSRFHandle FAIL: Initialize returned %d, handle being freed", result);
2026-03-28 16:54:11 +11:00
*Handle = nullptr;
return result;
}
// Transfer ownership to caller on success
*Handle = ptr.release();
RegisterFRHandle(*Handle);
ANS_DBG("ANSFR","CreateANSRFHandle OK: handle=%p (uint=%llu) result=%d",
(void*)*Handle, (unsigned long long)(uintptr_t)*Handle, result);
2026-03-28 16:54:11 +11:00
return result;
}
catch (const std::exception& e) {
ANS_DBG("ANSFR","CreateANSRFHandle EXCEPTION (std::exception): %s", e.what());
2026-03-28 16:54:11 +11:00
return -1;
}
}
extern "C" ANSFR_API int LoadANSRFEngine(ANSCENTER::ANSFacialRecognition** Handle) {
ANS_DBG("ANSFR","LoadANSRFEngine: HandlePtr=%p, *Handle=%p", (void*)Handle, (void*)(Handle ? *Handle : nullptr));
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
bool result = (*Handle)->LoadEngine();
if (result == true) return 1;
else return 0;
}
catch (const std::exception& e) {
ANS_DBG("ANSFR","LoadANSRFEngine EXCEPTION: %s", e.what());
2026-03-28 16:54:11 +11:00
return -1;
}
}
static int ReleaseANSRFHandle_Impl(ANSCENTER::ANSFacialRecognition** Handle) {
try {
if (!Handle || !*Handle) {
ANS_DBG("ANSFR","Release: HandlePtr or *Handle is null, no-op");
return 1;
}
ANSCENTER::ANSFacialRecognition* h = *Handle;
ANS_DBG("ANSFR","Release called: handle=%p (uint=%llu)", (void*)h, (unsigned long long)(uintptr_t)h);
if (!UnregisterFRHandle(h)) {
ANS_DBG("ANSFR","Release: Unregister returned false (already gone or being destroyed by another thread), handle=%p", (void*)h);
2026-03-28 16:54:11 +11:00
*Handle = nullptr;
return 1; // Not in registry — already freed
}
h->Destroy();
delete h;
2026-03-28 16:54:11 +11:00
*Handle = nullptr;
ANS_DBG("ANSFR","Release OK: handle=%p deleted, registry now has %zu entries",
(void*)h, FRHandleRegistry().size());
2026-03-28 16:54:11 +11:00
return 1;
}
catch (...) {
ANS_DBG("ANSFR","Release EXCEPTION (unknown)");
2026-03-28 16:54:11 +11:00
if (Handle) *Handle = nullptr;
return 0;
}
}
extern "C" ANSFR_API int ReleaseANSRFHandle(ANSCENTER::ANSFacialRecognition** Handle) {
__try {
return ReleaseANSRFHandle_Impl(Handle);
}
__except (EXCEPTION_EXECUTE_HANDLER) {
ANS_DBG("ANSFR","ReleaseANSRFHandle: SEH exception caught");
if (Handle) *Handle = nullptr;
2026-03-28 16:54:11 +11:00
return 0;
}
}
extern "C" ANSFR_API std::string RunANSRFInference(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength) {
ANS_DBG("ANSFR","RunANSRFInference: HandlePtr=%p, *Handle=%p, bufLen=%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength);
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFInferenceBinary(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_bytes, unsigned int width, unsigned int height) {
ANS_DBG("ANSFR","RunANSRFInferenceBinary: HandlePtr=%p, *Handle=%p, %ux%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), width, height);
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone();
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFRecognition(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength) {
ANS_DBG("ANSFR","RunANSRFRecognition: HandlePtr=%p, *Handle=%p, bufLen=%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength);
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFRecognitionBinary(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_bytes, unsigned int width, unsigned int height) {
ANS_DBG("ANSFR","RunANSRFRecognitionBinary: HandlePtr=%p, *Handle=%p, %ux%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), width, height);
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone();
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFDetectorBinary(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_bytes, unsigned int width, unsigned int height) {
ANS_DBG("ANSFR","RunANSRFDetectorBinary: HandlePtr=%p, *Handle=%p, %ux%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), width, height);
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone();
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Detect(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFDetector(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength) {
ANS_DBG("ANSFR","RunANSRFDetector: HandlePtr=%p, *Handle=%p, bufLen=%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength);
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Detect(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
//// For LabVIEW API
extern "C" ANSFR_API int RunInference_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunInference_LV: HandlePtr=%p, *Handle=%p, bufLen=%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFInference(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
ANS_DBG("ANSFR","RunInference_LV EXCEPTION: %s", e.what());
2026-03-28 16:54:11 +11:00
return -1;
}
}
extern "C" ANSFR_API int RunInferenceWithCamId_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunInferenceWithCamId_LV: HandlePtr=%p, *Handle=%p, bufLen=%u, cam=%s",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength, cameraId ? cameraId : "(null)");
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunDetector_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult)
{
ANS_DBG("ANSFR","RunDetector_LV: HandlePtr=%p, *Handle=%p, bufLen=%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFDetector(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
ANS_DBG("ANSFR","RunDetector_LV EXCEPTION: %s", e.what());
2026-03-28 16:54:11 +11:00
return -1;
}
}
extern "C" ANSFR_API int RunDetectorWithCamId_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunDetectorWithCamId_LV: HandlePtr=%p, *Handle=%p, bufLen=%u, cam=%s",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength, cameraId ? cameraId : "(null)");
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Detect(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunRecognition_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunRecognition_LV: HandlePtr=%p, *Handle=%p, bufLen=%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFRecognition(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
ANS_DBG("ANSFR","RunRecognition_LV EXCEPTION: %s", e.what());
2026-03-28 16:54:11 +11:00
return -1;
}
}
extern "C" ANSFR_API int RunRecognitionWithCamId_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunRecognitionWithCamId_LV: HandlePtr=%p, *Handle=%p, bufLen=%u, cam=%s",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength, cameraId ? cameraId : "(null)");
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunFaceDetection_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunFaceDetection_LV: HandlePtr=%p, *Handle=%p, bufLen=%u, cam=%s",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength, cameraId ? cameraId : "(null)");
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(frame, cameraId);
frame.release();
std::string st = engine->FaceToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API std::string RunANSRFFaceDetector(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_bytes, unsigned int width, unsigned int height)
{
ANS_DBG("ANSFR","RunANSRFFaceDetector: HandlePtr=%p, *Handle=%p, %ux%u", (void*)Handle, (void*)(Handle ? *Handle : nullptr), width, height);
2026-03-28 16:54:11 +11:00
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone();
if (frame.empty()) return "";
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(frame, "0000");
frame.release();
std::string st = engine->FaceToJsonString(outputs);
return st;
}
catch (...) { return ""; }
}
// User management
extern "C" ANSFR_API int InsertUser(ANSCENTER::ANSFacialRecognition** Handle, const char* userCode, const char* userName) {
ANS_DBG("ANSFR","InsertUser: HandlePtr=%p, *Handle=%p, userCode=%s",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userCode ? userCode : "(null)");
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !userCode || !userName) return -1;
int result = (*Handle)->InsertUser(userCode, userName);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
2026-04-08 13:45:52 +10:00
// Helper: repair mixed-encoding LabVIEW LStrHandle to clean UTF-16LE.
// LabVIEW text controls may produce a mix of UTF-16LE pairs, embedded UTF-8
// multi-byte sequences, and lone space bytes (0x20 without 0x00 high byte).
// This normalizes everything to proper UTF-16LE pairs.
// Input: BOM-stripped raw bytes. Output: clean UTF-16LE vector.
static std::vector<unsigned char> RepairLabVIEWUTF16LE_Local(const unsigned char* data, int len) {
std::vector<unsigned char> repaired;
if (!data || len <= 0) return repaired;
repaired.reserve(len + 32);
auto emitU16 = [&](uint16_t cp) {
repaired.push_back(static_cast<unsigned char>(cp & 0xFF));
repaired.push_back(static_cast<unsigned char>((cp >> 8) & 0xFF));
};
for (int i = 0; i < len; ) {
unsigned char b = data[i];
// 1. Detect embedded UTF-8 multi-byte sequences
// 2-byte UTF-8: C2-DF followed by 80-BF
if (b >= 0xC2 && b <= 0xDF && i + 1 < len) {
unsigned char b1 = data[i + 1];
if ((b1 & 0xC0) == 0x80) {
uint32_t cp = ((b & 0x1F) << 6) | (b1 & 0x3F);
emitU16(static_cast<uint16_t>(cp));
i += 2; continue;
}
}
// 3-byte UTF-8: E0-EF followed by 80-BF 80-BF
if (b >= 0xE0 && b <= 0xEF && i + 2 < len) {
unsigned char b1 = data[i + 1], b2 = data[i + 2];
if ((b1 & 0xC0) == 0x80 && (b2 & 0xC0) == 0x80) {
uint32_t cp = ((b & 0x0F) << 12) | ((b1 & 0x3F) << 6) | (b2 & 0x3F);
if (cp >= 0x0800 && (cp < 0xD800 || cp > 0xDFFF)) {
emitU16(static_cast<uint16_t>(cp));
i += 3; continue;
}
}
}
// 4-byte UTF-8: F0-F4 followed by 80-BF 80-BF 80-BF
if (b >= 0xF0 && b <= 0xF4 && i + 3 < len) {
unsigned char b1 = data[i + 1], b2 = data[i + 2], b3 = data[i + 3];
if ((b1 & 0xC0) == 0x80 && (b2 & 0xC0) == 0x80 && (b3 & 0xC0) == 0x80) {
uint32_t cp = ((b & 0x07) << 18) | ((b1 & 0x3F) << 12)
| ((b2 & 0x3F) << 6) | (b3 & 0x3F);
if (cp >= 0x10000 && cp <= 0x10FFFF) {
cp -= 0x10000;
emitU16(static_cast<uint16_t>(0xD800 + (cp >> 10)));
emitU16(static_cast<uint16_t>(0xDC00 + (cp & 0x3FF)));
i += 4; continue;
}
}
}
// 2. Normal UTF-16LE pair (low byte + 0x00 high byte)
if (i + 1 < len && data[i + 1] == 0x00) {
repaired.push_back(data[i]); repaired.push_back(0x00); i += 2;
}
// 3. Lone space byte — LabVIEW dropped the 0x00 high byte
else if (b == 0x20 && (i + 1 >= len || data[i + 1] != 0x00)) {
repaired.push_back(0x20); repaired.push_back(0x00); i += 1;
}
// 4. Non-ASCII UTF-16LE pair
else if (i + 1 < len) {
repaired.push_back(data[i]); repaired.push_back(data[i + 1]); i += 2;
}
// 5. Trailing odd byte — skip
else { i++; }
}
return repaired;
}
// Helper: convert LStrHandle (mixed UTF-8/UTF-16LE or system codepage) to UTF-8 string
static std::string LStrHandleToUTF8(LStrHandle handle) {
if (!handle) return "";
int byteLen = (*handle)->cnt;
if (byteLen <= 0) return "";
const unsigned char* data = reinterpret_cast<const unsigned char*>((*handle)->str);
// Check for BOM or 0x00 bytes → UTF-16LE (possibly mixed with UTF-8)
bool isUtf16le = false;
if (byteLen >= 2 && data[0] == 0xFF && data[1] == 0xFE) isUtf16le = true;
if (!isUtf16le) {
for (int i = 0; i < byteLen; i++) {
if (data[i] == 0x00) { isUtf16le = true; break; }
}
}
if (isUtf16le) {
const unsigned char* convData = data;
int convLen = byteLen;
if (convLen >= 2 && convData[0] == 0xFF && convData[1] == 0xFE) { convData += 2; convLen -= 2; }
if (convLen <= 0) return "";
// Repair mixed encoding (UTF-8 islands, lone spaces) → clean UTF-16LE
auto repaired = RepairLabVIEWUTF16LE_Local(convData, convLen);
#ifdef _WIN32
int wideLen = static_cast<int>(repaired.size()) / 2;
const wchar_t* wideStr = reinterpret_cast<const wchar_t*>(repaired.data());
int utf8Len = WideCharToMultiByte(CP_UTF8, 0, wideStr, wideLen, nullptr, 0, nullptr, nullptr);
if (utf8Len > 0) {
std::string utf8(utf8Len, 0);
WideCharToMultiByte(CP_UTF8, 0, wideStr, wideLen, &utf8[0], utf8Len, nullptr, nullptr);
return utf8;
}
#endif
return std::string(reinterpret_cast<const char*>(repaired.data()), repaired.size());
} else {
// No 0x00 bytes — try UTF-8 first, fall back to system codepage.
// IsValidUTF8: check if bytes form valid UTF-8 with at least one multi-byte sequence.
auto IsValidUTF8 = [](const unsigned char* d, int l) -> bool {
bool hasMulti = false;
for (int j = 0; j < l; ) {
unsigned char c = d[j];
if (c <= 0x7F) { j++; }
else if (c >= 0xC2 && c <= 0xDF) {
if (j + 1 >= l || (d[j + 1] & 0xC0) != 0x80) return false;
hasMulti = true; j += 2;
} else if (c >= 0xE0 && c <= 0xEF) {
if (j + 2 >= l || (d[j + 1] & 0xC0) != 0x80 || (d[j + 2] & 0xC0) != 0x80) return false;
hasMulti = true; j += 3;
} else if (c >= 0xF0 && c <= 0xF4) {
if (j + 3 >= l || (d[j + 1] & 0xC0) != 0x80 || (d[j + 2] & 0xC0) != 0x80 || (d[j + 3] & 0xC0) != 0x80) return false;
hasMulti = true; j += 4;
} else { return false; }
}
return hasMulti;
};
if (IsValidUTF8(data, byteLen)) {
return std::string(reinterpret_cast<const char*>(data), byteLen);
}
#ifdef _WIN32
int wideLen = MultiByteToWideChar(CP_ACP, 0, reinterpret_cast<const char*>(data), byteLen, nullptr, 0);
if (wideLen > 0) {
std::wstring wideStr(wideLen, 0);
MultiByteToWideChar(CP_ACP, 0, reinterpret_cast<const char*>(data), byteLen, &wideStr[0], wideLen);
int utf8Len = WideCharToMultiByte(CP_UTF8, 0, wideStr.c_str(), wideLen, nullptr, 0, nullptr, nullptr);
if (utf8Len > 0) {
std::string utf8(utf8Len, 0);
WideCharToMultiByte(CP_UTF8, 0, wideStr.c_str(), wideLen, &utf8[0], utf8Len, nullptr, nullptr);
return utf8;
}
}
#endif
return std::string(reinterpret_cast<const char*>(data), byteLen);
}
}
extern "C" ANSFR_API int InsertUser_LV(ANSCENTER::ANSFacialRecognition** Handle, const char* userCode, LStrHandle userName) {
ANS_DBG("ANSFR","InsertUser_LV: HandlePtr=%p, *Handle=%p, userCode=%s",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userCode ? userCode : "(null)");
2026-04-08 13:45:52 +10:00
try {
if (!Handle || !*Handle || !userCode || !userName) return -1;
std::string utf8Name = LStrHandleToUTF8(userName);
if (utf8Name.empty()) return -1;
return (*Handle)->InsertUser(userCode, utf8Name);
}
catch (const std::exception& e) { return -1; }
}
extern "C" ANSFR_API int UpdateUser_LV(ANSCENTER::ANSFacialRecognition** Handle, int userId, const char* userCode, LStrHandle userName) {
ANS_DBG("ANSFR","UpdateUser_LV: HandlePtr=%p, *Handle=%p, userId=%d, userCode=%s",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId, userCode ? userCode : "(null)");
2026-04-08 13:45:52 +10:00
try {
if (!Handle || !*Handle || !userCode || !userName) return -1;
std::string utf8Name = LStrHandleToUTF8(userName);
if (utf8Name.empty()) return -1;
return (*Handle)->UpdateUser(userId, userCode, utf8Name);
}
catch (const std::exception& e) { return -1; }
}
2026-03-28 16:54:11 +11:00
extern "C" ANSFR_API int UpdateUser(ANSCENTER::ANSFacialRecognition** Handle, int userId, const char* userCode, const char* userName) {
ANS_DBG("ANSFR","UpdateUser: HandlePtr=%p, *Handle=%p, userId=%d, userCode=%s",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId, userCode ? userCode : "(null)");
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !userCode || !userName) return -1;
int result = (*Handle)->UpdateUser(userId, userCode, userName);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int DeleteUser(ANSCENTER::ANSFacialRecognition** Handle, int userId) {
ANS_DBG("ANSFR","DeleteUser: HandlePtr=%p, *Handle=%p, userId=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
int result = (*Handle)->DeleteUser(userId);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int DeleteUsers(ANSCENTER::ANSFacialRecognition** Handle, int* userIds, int count) {
ANS_DBG("ANSFR","DeleteUsers: HandlePtr=%p, *Handle=%p, count=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), count);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !userIds || count <= 0) return -1;
std::vector<int> ids(userIds, userIds + count);
return (*Handle)->DeleteUsers(ids);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int InsertFace(ANSCENTER::ANSFacialRecognition** Handle, int userId, unsigned char* jpeg_string, unsigned int bufferLength) {
ANS_DBG("ANSFR","InsertFace: HandlePtr=%p, *Handle=%p, userId=%d, bufLen=%u",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId, bufferLength);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return -1;
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) {
return 0;
}
int result = (*Handle)->InsertFace(userId, frame);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int InsertFaces(ANSCENTER::ANSFacialRecognition** Handle, int userId, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle faceIdsStr) {
ANS_DBG("ANSFR","InsertFaces: HandlePtr=%p, *Handle=%p, userId=%d, bufLen=%u",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId, bufferLength);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !faceIdsStr) return -1;
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<int> faceIds = (*Handle)->InsertMultipleFaces(userId, frame);
std::string st;
for (size_t i = 0; i < faceIds.size(); ++i) {
if (i > 0) st += ";";
st += std::to_string(faceIds[i]);
}
return CopyToLStrHandle(faceIdsStr, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int CheckFaceEmbedding(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength) {
ANS_DBG("ANSFR","CheckFaceEmbedding: HandlePtr=%p, *Handle=%p, bufLen=%u",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), bufferLength);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return -1;
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) {
return 0;
}
int result = (*Handle)->CheckFace(frame);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int InsertFaceBinary(ANSCENTER::ANSFacialRecognition** Handle, int userId, unsigned char* jpeg_bytes, unsigned int width, unsigned int height) {
ANS_DBG("ANSFR","InsertFaceBinary: HandlePtr=%p, *Handle=%p, userId=%d, %ux%u",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId, width, height);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return -1;
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone(); // make a copy
if (frame.empty()) {
return 0;
}
int result = (*Handle)->InsertFace(userId, frame);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int DeleteFace(ANSCENTER::ANSFacialRecognition** Handle, int faceId) {
ANS_DBG("ANSFR","DeleteFace: HandlePtr=%p, *Handle=%p, faceId=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), faceId);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
int result = (*Handle)->DeleteFace(faceId);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int Reload(ANSCENTER::ANSFacialRecognition** Handle) {
ANS_DBG("ANSFR","Reload: HandlePtr=%p, *Handle=%p", (void*)Handle, (void*)(Handle ? *Handle : nullptr));
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
bool result = (*Handle)->Reload();
if (result == true) return 1;
else return 0;
}
catch (const std::exception& e) {
return -1;
}
}
// New management API
extern "C" ANSFR_API int GetUser(ANSCENTER::ANSFacialRecognition** Handle, int userId, LStrHandle userRecord) {
ANS_DBG("ANSFR","GetUser: HandlePtr=%p, *Handle=%p, userId=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !userRecord) return -1;
std::string st;
(*Handle)->GetUser(userId, st);
return CopyToLStrHandle(userRecord, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetUsers(ANSCENTER::ANSFacialRecognition** Handle, LStrHandle userRecords) {
ANS_DBG("ANSFR","GetUsers: HandlePtr=%p, *Handle=%p", (void*)Handle, (void*)(Handle ? *Handle : nullptr));
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !userRecords) return -1;
std::string st;
std::vector<int> userIds;
(*Handle)->GetUsers(st, userIds);
return CopyToLStrHandle(userRecords, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFace(ANSCENTER::ANSFacialRecognition** Handle, int faceId, LStrHandle faceRecord) {
ANS_DBG("ANSFR","GetFace: HandlePtr=%p, *Handle=%p, faceId=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), faceId);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !faceRecord) return -1;
std::string st;
(*Handle)->GetFace(faceId, st);
return CopyToLStrHandle(faceRecord, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFaces(ANSCENTER::ANSFacialRecognition** Handle, int userId, LStrHandle faceRecords) {
ANS_DBG("ANSFR","GetFaces: HandlePtr=%p, *Handle=%p, userId=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !faceRecords) return -1;
std::string st;
(*Handle)->GetFaces(userId, st);
return CopyToLStrHandle(faceRecords, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int DeleteFacesByUser(ANSCENTER::ANSFacialRecognition** Handle, int userId) {
ANS_DBG("ANSFR","DeleteFacesByUser: HandlePtr=%p, *Handle=%p, userId=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
int ret = (*Handle)->DeleteFacesByUser(userId);
return ret;
}
catch (const std::exception& e) {
return -1;
}
}
// For testing only
extern "C" ANSFR_API int GetUserString(ANSCENTER::ANSFacialRecognition** Handle, int userId, std::string& userRecord) {
ANS_DBG("ANSFR","GetUserString: HandlePtr=%p, *Handle=%p, userId=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
(*Handle)->GetUser(userId, userRecord);
return 1;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetUsersString(ANSCENTER::ANSFacialRecognition** Handle, std::string& userRecords, std::vector<int>& userIds) {
ANS_DBG("ANSFR","GetUsersString: HandlePtr=%p, *Handle=%p", (void*)Handle, (void*)(Handle ? *Handle : nullptr));
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
(*Handle)->GetUsers(userRecords, userIds);
return 1;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFaceString(ANSCENTER::ANSFacialRecognition** Handle, int faceId, std::string& faceRecord) {
ANS_DBG("ANSFR","GetFaceString: HandlePtr=%p, *Handle=%p, faceId=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), faceId);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
(*Handle)->GetFace(faceId, faceRecord);
return 1;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFacesString(ANSCENTER::ANSFacialRecognition** Handle, int userId, std::string& faceRecords) {
ANS_DBG("ANSFR","GetFacesString: HandlePtr=%p, *Handle=%p, userId=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), userId);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
(*Handle)->GetFaces(userId, faceRecords);
return 1;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API double BlurCalculation(unsigned char* jpeg_string, unsigned int bufferLength) {
ANS_DBG("ANSFR","BlurCalculation: bufLen=%u", bufferLength);
2026-03-28 16:54:11 +11:00
try {
if (!jpeg_string || bufferLength == 0) return -1;
cv::Mat image = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
cv::Mat gray;
cvtColor(image, gray, cv::COLOR_BGR2GRAY);
cv::Mat laplacian;
Laplacian(gray, laplacian, CV_64F);
cv::Scalar mean, stddev;
meanStdDev(laplacian, mean, stddev);
image.release();
gray.release();
return stddev.val[0] * stddev.val[0];
}
catch (const std::exception& e) {
return -1;
}
}
2026-03-31 14:10:21 +11:00
// Unicode conversion utilities for LabVIEW wrapper classes
extern "C" ANSFR_API int ANSFR_ConvertUTF8ToUTF16LE(const char* utf8Str, LStrHandle result, int includeBOM) {
ANS_DBG("ANSFR","ANSFR_ConvertUTF8ToUTF16LE: includeBOM=%d, len=%d",
includeBOM, utf8Str ? (int)strlen(utf8Str) : -1);
2026-03-31 14:10:21 +11:00
try {
if (!utf8Str || !result) return -1;
int len = (int)strlen(utf8Str);
if (len == 0) return 0;
const char bom[2] = { '\xFF', '\xFE' };
2026-03-31 14:10:21 +11:00
bool hasUnicodeEscapes = false;
for (int i = 0; i + 1 < len; i++) {
if (utf8Str[i] == '\\' && utf8Str[i + 1] == 'u') { hasUnicodeEscapes = true; break; }
2026-03-31 14:10:21 +11:00
}
if (hasUnicodeEscapes) {
std::string utf16le;
if (includeBOM) utf16le.assign(bom, 2);
utf16le.reserve(len * 2 + 2);
2026-03-31 14:10:21 +11:00
for (int i = 0; i < len; ) {
if (i + 5 < len && utf8Str[i] == '\\' && utf8Str[i + 1] == 'u') {
char hex[5] = { utf8Str[i + 2], utf8Str[i + 3], utf8Str[i + 4], utf8Str[i + 5], 0 };
uint16_t cp = (uint16_t)strtoul(hex, nullptr, 16);
utf16le += static_cast<char>(cp & 0xFF);
utf16le += static_cast<char>((cp >> 8) & 0xFF);
i += 6;
} else {
utf16le += utf8Str[i];
utf16le += '\0';
i++;
}
}
int size = (int)utf16le.size();
MgErr error = DSSetHandleSize(result, sizeof(int32) + size * sizeof(uChar));
if (error != noErr) return -2;
(*result)->cnt = size;
memcpy((*result)->str, utf16le.data(), size);
return 1;
}
#ifdef _WIN32
int wideLen = MultiByteToWideChar(CP_UTF8, 0, utf8Str, len, nullptr, 0);
if (wideLen <= 0) return 0;
std::wstring wideStr(wideLen, 0);
MultiByteToWideChar(CP_UTF8, 0, utf8Str, len, &wideStr[0], wideLen);
int dataSize = wideLen * (int)sizeof(wchar_t);
int bomSize = includeBOM ? 2 : 0;
int totalSize = bomSize + dataSize;
MgErr error = DSSetHandleSize(result, sizeof(int32) + totalSize * sizeof(uChar));
2026-03-31 14:10:21 +11:00
if (error != noErr) return -2;
(*result)->cnt = totalSize;
if (includeBOM) memcpy((*result)->str, bom, 2);
memcpy((*result)->str + bomSize, wideStr.data(), dataSize);
2026-03-31 14:10:21 +11:00
return 1;
#else
return 0;
#endif
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int ANSFR_ConvertUTF16LEToUTF8(const unsigned char* utf16leBytes, int byteLen, LStrHandle result) {
ANS_DBG("ANSFR","ANSFR_ConvertUTF16LEToUTF8: byteLen=%d", byteLen);
2026-03-31 14:10:21 +11:00
try {
if (!utf16leBytes || byteLen <= 0 || !result) return -1;
bool isUtf16le = (byteLen >= 2 && byteLen % 2 == 0);
if (isUtf16le) {
bool isAscii = true;
for (int i = 1; i < byteLen; i += 2) {
if (utf16leBytes[i] != 0x00) { isAscii = false; break; }
}
if (isAscii) {
int asciiLen = byteLen / 2;
MgErr error = DSSetHandleSize(result, sizeof(int32) + asciiLen * sizeof(uChar));
if (error != noErr) return -2;
(*result)->cnt = asciiLen;
for (int i = 0; i < asciiLen; i++) (*result)->str[i] = utf16leBytes[i * 2];
return 1;
}
}
#ifdef _WIN32
int wideLen = byteLen / (int)sizeof(wchar_t);
const wchar_t* wideStr = reinterpret_cast<const wchar_t*>(utf16leBytes);
int utf8Len = WideCharToMultiByte(CP_UTF8, 0, wideStr, wideLen, nullptr, 0, nullptr, nullptr);
if (utf8Len <= 0) return 0;
std::string utf8Str(utf8Len, 0);
WideCharToMultiByte(CP_UTF8, 0, wideStr, wideLen, &utf8Str[0], utf8Len, nullptr, nullptr);
MgErr error = DSSetHandleSize(result, sizeof(int32) + utf8Len * sizeof(uChar));
if (error != noErr) return -2;
(*result)->cnt = utf8Len;
memcpy((*result)->str, utf8Str.data(), utf8Len);
return 1;
#else
return 0;
#endif
}
catch (...) { return -1; }
}
2026-03-28 16:54:11 +11:00
extern "C" ANSFR_API int UpdateParameters(ANSCENTER::ANSFacialRecognition** Handle, float knownPersonThreshold, int enableAgeGender, int enableFaceEmotions, int enableHeadPose, int minFaceSize, float faceDetectorThreshold, int enableFaceliveness, int antiSpoof, int removeFakeFaces) {
ANS_DBG("ANSFR","UpdateParameters: HandlePtr=%p, *Handle=%p, knownThr=%f, ageGender=%d, emotions=%d, headPose=%d, minFace=%d, faceThr=%f, liveness=%d, antiSpoof=%d, removeFake=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), knownPersonThreshold,
enableAgeGender, enableFaceEmotions, enableHeadPose, minFaceSize,
faceDetectorThreshold, enableFaceliveness, antiSpoof, removeFakeFaces);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
bool _enableAgeGender = false;
bool _enableFaceEmotions = false;
bool _enableHeadPose = false;
bool _enablefaceLiveness = false;
bool _enableantiSpoof = false;
bool _removeFakeFaces = false;
if (enableAgeGender == 1)_enableAgeGender = true;
if (enableFaceEmotions == 1)_enableFaceEmotions = true;
if (enableHeadPose == 1)_enableHeadPose = true;
if (enableFaceliveness == 1)_enablefaceLiveness = true;
if (antiSpoof == 1)_enableantiSpoof = true;
if (removeFakeFaces == 1)_removeFakeFaces = true;
bool result = (*Handle)->UpdateParameters(knownPersonThreshold, _enableAgeGender, _enableFaceEmotions, _enableHeadPose, minFaceSize, faceDetectorThreshold, _enablefaceLiveness, _enableantiSpoof, _removeFakeFaces);
if (result)return 1;
else return 0;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetParamters(ANSCENTER::ANSFacialRecognition** Handle, LStrHandle faceParams) {
ANS_DBG("ANSFR","GetParamters: HandlePtr=%p, *Handle=%p", (void*)Handle, (void*)(Handle ? *Handle : nullptr));
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !faceParams) return -1;
std::string st;
float knownPersonThrehold;
bool enableAgeGender;
bool enableEmotions;
bool enableHeadPose;
bool enablefaceLiveness;
bool enableantiSpoof;
bool removeFakeFaces;
int _enableAgeGender = 0;
int _enableEmotions = 0;
int _enableHeadPose = 0;
int _enablefaceLiveness = 0;
int _enableantiSpoof = 0;
int _removeFakeFaces = 0;
int _minFaceSize = 0;
float faceDetectionThreshold;
(*Handle)->GetFaceParameters(knownPersonThrehold, enableAgeGender, enableEmotions, enableHeadPose, _minFaceSize, faceDetectionThreshold, enablefaceLiveness, enableantiSpoof,removeFakeFaces);
if (enableAgeGender)_enableAgeGender = 1;
if (enableEmotions)_enableEmotions = 1;
if (enableHeadPose)_enableHeadPose = 1;
if (enablefaceLiveness)_enablefaceLiveness = 1;
if (enableantiSpoof)_enableantiSpoof = 1;
if (removeFakeFaces)_removeFakeFaces = 1;
st = std::to_string(knownPersonThrehold) + ";" +
std::to_string(_enableAgeGender) + ";" +
std::to_string(_enableEmotions) + ";" +
std::to_string(_enableHeadPose) + ";" +
std::to_string(_minFaceSize) + ";" +
std::to_string(faceDetectionThreshold)+";"+
std::to_string(_enablefaceLiveness) + ";" +
std::to_string(_enableantiSpoof) + ";" +
std::to_string(_removeFakeFaces);
return CopyToLStrHandle(faceParams, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int UpdateFaceQueue(ANSCENTER::ANSFacialRecognition** Handle, int queueSize, int numKnownFaceInQueue, int enableFaceQueue) {
ANS_DBG("ANSFR","UpdateFaceQueue: HandlePtr=%p, *Handle=%p, queueSize=%d, numKnown=%d, enable=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr), queueSize, numKnownFaceInQueue, enableFaceQueue);
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle) return -1;
bool _enableFaceQueue = false;
if (enableFaceQueue == 1)_enableFaceQueue = true;
bool result = (*Handle)->UpdateFaceQueue(queueSize, numKnownFaceInQueue, _enableFaceQueue);
if (result)return 1;
else return 0;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFaceQueue(ANSCENTER::ANSFacialRecognition** Handle, LStrHandle faceQueue) {
ANS_DBG("ANSFR","GetFaceQueue: HandlePtr=%p, *Handle=%p", (void*)Handle, (void*)(Handle ? *Handle : nullptr));
2026-03-28 16:54:11 +11:00
try {
if (!Handle || !*Handle || !faceQueue) return -1;
std::string st;
int faceQueueSize;
int numKnownFaceInQueue;
bool enableFaceQueue = false;
int _enableAgeGender = 0;
(*Handle)->GetFaceQueue(faceQueueSize, numKnownFaceInQueue, enableFaceQueue);
if (enableFaceQueue)_enableAgeGender = 1;
st = std::to_string(faceQueueSize) + ";" +
std::to_string(numKnownFaceInQueue) + ";" +
std::to_string(_enableAgeGender);
return CopyToLStrHandle(faceQueue, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int RunInferenceComplete_LV(
ANSCENTER::ANSFacialRecognition** Handle,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
ANS_DBG("ANSFR","RunInferenceComplete_LV: HandlePtr=%p, *Handle=%p, cam=%s, getJpeg=%d, jpegSize=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr),
cameraId ? cameraId : "(null)", getJpegString, jpegImageSize);
2026-03-28 16:54:11 +11:00
// Validate inputs
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!Handle || !(*Handle)) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
// Clone image for thread safety - streaming thread may swap the pointer
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(localImage, cameraId);
tl_currentGpuFrame() = nullptr; // Clear after inference — prevent leaking to unrelated calls
// All processing below is thread-local (no shared state)
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
// Scale bounding boxes if resizing
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
// Clamp to image bounds
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
// Convert to JPEG if requested
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
// Convert to JSON and write detection result
std::string stDetectionResult = engine->FaceObjectsToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
// Write JPEG to LabVIEW string handle if requested
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
extern "C" ANSFR_API int RunFaceDetectionComplete_LV(
ANSCENTER::ANSFacialRecognition** Handle,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
ANS_DBG("ANSFR","RunFaceDetectionComplete_LV: HandlePtr=%p, *Handle=%p, cam=%s, getJpeg=%d, jpegSize=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr),
cameraId ? cameraId : "(null)", getJpegString, jpegImageSize);
2026-03-28 16:54:11 +11:00
// Validate inputs
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!Handle || !(*Handle)) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
// Clone image for thread safety - streaming thread may swap the pointer
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
// All processing below is thread-local (no shared state)
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
// Scale bounding boxes if resizing
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
// Clamp to image bounds
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
// Convert to JPEG if requested
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
// Convert to JSON and write detection result
std::string stDetectionResult = engine->FaceToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
// Write JPEG to LabVIEW string handle if requested
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
extern "C" ANSFR_API int RunFaceRecogniserComplete_LV(
ANSCENTER::ANSFacialRecognition** Handle,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
ANS_DBG("ANSFR","RunFaceRecogniserComplete_LV: HandlePtr=%p, *Handle=%p, cam=%s, getJpeg=%d, jpegSize=%d",
(void*)Handle, (void*)(Handle ? *Handle : nullptr),
cameraId ? cameraId : "(null)", getJpegString, jpegImageSize);
2026-03-28 16:54:11 +11:00
// Validate inputs
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!Handle || !(*Handle)) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
// Clone image for thread safety - streaming thread may swap the pointer
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
// All processing below is thread-local (no shared state)
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
// Scale bounding boxes if resizing
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
// Clamp to image bounds
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
// Convert to JPEG if requested
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
// Convert to JSON and write detection result
std::string stDetectionResult = engine->FaceObjectsToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
// Write JPEG to LabVIEW string handle if requested
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
// ============================================================================
// V2 API — accepts uint64_t handle by value (eliminates LabVIEW buffer reuse bug)
// ============================================================================
#define FR_V2_HANDLE_SETUP(handleVal) \
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal); \
if (_v2Direct == nullptr) return 0; \
ANSCENTER::ANSFacialRecognition* _v2Arr[1] = { _v2Direct }; \
ANSCENTER::ANSFacialRecognition** Handle = &_v2Arr[0];
extern "C" ANSFR_API int RunInference_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunInference_LV_V2: handleVal=%llu handle=%p, bufLen=%u",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal), bufferLength);
2026-03-28 16:54:11 +11:00
FR_V2_HANDLE_SETUP(handleVal);
try {
if (!jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFInference(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
ANS_DBG("ANSFR","RunInference_LV_V2 EXCEPTION: %s", e.what());
2026-03-28 16:54:11 +11:00
return -1;
}
}
extern "C" ANSFR_API int RunInferenceWithCamId_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunInferenceWithCamId_LV_V2: handleVal=%llu handle=%p, bufLen=%u, cam=%s",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal),
bufferLength, cameraId ? cameraId : "(null)");
2026-03-28 16:54:11 +11:00
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunDetector_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunDetector_LV_V2: handleVal=%llu handle=%p, bufLen=%u",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal), bufferLength);
2026-03-28 16:54:11 +11:00
FR_V2_HANDLE_SETUP(handleVal);
try {
if (!jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFDetector(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
ANS_DBG("ANSFR","RunDetector_LV_V2 EXCEPTION: %s", e.what());
2026-03-28 16:54:11 +11:00
return -1;
}
}
extern "C" ANSFR_API int RunDetectorWithCamId_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunDetectorWithCamId_LV_V2: handleVal=%llu handle=%p, bufLen=%u, cam=%s",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal),
bufferLength, cameraId ? cameraId : "(null)");
2026-03-28 16:54:11 +11:00
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Detect(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunRecognition_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunRecognition_LV_V2: handleVal=%llu handle=%p, bufLen=%u",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal), bufferLength);
2026-03-28 16:54:11 +11:00
FR_V2_HANDLE_SETUP(handleVal);
try {
if (!jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFRecognition(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
ANS_DBG("ANSFR","RunRecognition_LV_V2 EXCEPTION: %s", e.what());
2026-03-28 16:54:11 +11:00
return -1;
}
}
extern "C" ANSFR_API int RunRecognitionWithCamId_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunRecognitionWithCamId_LV_V2: handleVal=%llu handle=%p, bufLen=%u, cam=%s",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal),
bufferLength, cameraId ? cameraId : "(null)");
2026-03-28 16:54:11 +11:00
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunFaceDetection_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANS_DBG("ANSFR","RunFaceDetection_LV_V2: handleVal=%llu handle=%p, bufLen=%u, cam=%s",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal),
bufferLength, cameraId ? cameraId : "(null)");
2026-03-28 16:54:11 +11:00
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(frame, cameraId);
frame.release();
std::string st = engine->FaceToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunInferenceComplete_LV_V2(
uint64_t handleVal,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
ANS_DBG("ANSFR","RunInferenceComplete_LV_V2: handleVal=%llu handle=%p, cam=%s, getJpeg=%d, jpegSize=%d",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal),
cameraId ? cameraId : "(null)", getJpegString, jpegImageSize);
2026-03-28 16:54:11 +11:00
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
std::string stDetectionResult = engine->FaceObjectsToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
extern "C" ANSFR_API int RunFaceDetectionComplete_LV_V2(
uint64_t handleVal,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
ANS_DBG("ANSFR","RunFaceDetectionComplete_LV_V2: handleVal=%llu handle=%p, cam=%s, getJpeg=%d, jpegSize=%d",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal),
cameraId ? cameraId : "(null)", getJpegString, jpegImageSize);
2026-03-28 16:54:11 +11:00
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
std::string stDetectionResult = engine->FaceToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
extern "C" ANSFR_API int RunFaceRecogniserComplete_LV_V2(
uint64_t handleVal,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
ANS_DBG("ANSFR","RunFaceRecogniserComplete_LV_V2: handleVal=%llu handle=%p, cam=%s, getJpeg=%d, jpegSize=%d",
(unsigned long long)handleVal, (void*)reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal),
cameraId ? cameraId : "(null)", getJpegString, jpegImageSize);
2026-03-28 16:54:11 +11:00
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
std::string stDetectionResult = engine->FaceObjectsToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}