Files
ANSCORE/modules/ANSFR/dllmain.cpp
Tuan Nghia Nguyen c1b919ec47 Fix setting GPU behaviour:
Condition	maxSlotsPerGpu	Behavior
OptimizeModelStr	0	Bypass: non-shared temporary engine
1 GPU	1	Single slot, no round-robin
>1 GPU, VRAM < 24 GB	1	Round-robin: 1 slot per GPU
>1 GPU, VRAM >= 24 GB	-1	Elastic: on-demand slot growth
2026-03-30 09:59:09 +11:00

1534 lines
59 KiB
C++

// dllmain.cpp : Defines the entry point for the DLL application.
#include "pch.h"
#include "ANSFR.h"
#include "NV12PreprocessHelper.h" // tl_currentGpuFrame()
#include "ANSGpuFrameRegistry.h" // gpu_frame_lookup(cv::Mat*)
#include <opencv2/imgcodecs.hpp>
#include "ANSOVFaceDetector.h"
#include "SCRFDFaceDetector.h"
#include "FaceNet.h"
#include "ANSFaceRecognizer.h"
#include "ANSLibsLoader.h"
#include "engine/TRTEngineCache.h"
#include "engine/EnginePoolManager.h"
#include <memory>
#include <climits>
#include <unordered_map>
#include <condition_variable>
#include <cstdint>
#include <atomic>
// Each DLL that instantiates Engine<T> templates needs its own definition
// of g_forceNoPool (referenced by EngineBuildLoadNetwork.inl).
// ANSODEngine exports its own; ANSFR needs a local copy.
std::atomic<bool> g_forceNoPool{false};
#include <boost/uuid/uuid.hpp>
#include <boost/uuid/uuid_generators.hpp>
#include <boost/uuid/uuid_io.hpp>
// Handle registry with refcount — prevents use-after-free when
// ReleaseANSRFHandle is called while inference is still running.
static std::unordered_map<ANSCENTER::ANSFacialRecognition*, int>& FRHandleRegistry() {
static std::unordered_map<ANSCENTER::ANSFacialRecognition*, int> s;
return s;
}
static std::mutex& FRHandleRegistryMutex() {
static std::mutex m;
return m;
}
static std::condition_variable& FRHandleRegistryCV() {
static std::condition_variable cv;
return cv;
}
static void RegisterFRHandle(ANSCENTER::ANSFacialRecognition* h) {
std::lock_guard<std::mutex> lk(FRHandleRegistryMutex());
FRHandleRegistry()[h] = 1;
}
static ANSCENTER::ANSFacialRecognition* AcquireFRHandle(ANSCENTER::ANSFacialRecognition* h) {
std::lock_guard<std::mutex> lk(FRHandleRegistryMutex());
auto it = FRHandleRegistry().find(h);
if (it == FRHandleRegistry().end()) return nullptr;
it->second++;
return h;
}
static bool ReleaseFRHandleRef(ANSCENTER::ANSFacialRecognition* h) {
std::lock_guard<std::mutex> lk(FRHandleRegistryMutex());
auto it = FRHandleRegistry().find(h);
if (it == FRHandleRegistry().end()) return false;
it->second--;
if (it->second <= 0) {
FRHandleRegistry().erase(it);
FRHandleRegistryCV().notify_all();
return true;
}
return false;
}
static bool UnregisterFRHandle(ANSCENTER::ANSFacialRecognition* h) {
std::unique_lock<std::mutex> lk(FRHandleRegistryMutex());
auto it = FRHandleRegistry().find(h);
if (it == FRHandleRegistry().end()) return false;
it->second--;
bool ok = FRHandleRegistryCV().wait_for(lk, std::chrono::seconds(30), [&]() {
auto it2 = FRHandleRegistry().find(h);
return it2 == FRHandleRegistry().end() || it2->second <= 0;
});
if (!ok) {
OutputDebugStringA("WARNING: UnregisterFRHandle timed out waiting for in-flight inference\n");
}
FRHandleRegistry().erase(h);
return true;
}
// RAII guard — ensures ReleaseFRHandleRef is always called, preventing
// refcount leaks that would cause UnregisterFRHandle to deadlock.
class FRHandleGuard {
ANSCENTER::ANSFacialRecognition* engine;
public:
explicit FRHandleGuard(ANSCENTER::ANSFacialRecognition* e) : engine(e) {}
~FRHandleGuard() { if (engine) ReleaseFRHandleRef(engine); }
ANSCENTER::ANSFacialRecognition* get() const { return engine; }
explicit operator bool() const { return engine != nullptr; }
FRHandleGuard(const FRHandleGuard&) = delete;
FRHandleGuard& operator=(const FRHandleGuard&) = delete;
};
// Determine maxSlotsPerGpu based on GPU topology:
// 1 GPU → 1 (single slot, no round-robin needed)
// >1 GPU, VRAM<24GB → 1 (round-robin: 1 slot per GPU)
// >1 GPU, VRAM≥24GB → -1 (elastic: on-demand slot growth)
static int GetPoolMaxSlotsPerGpu() {
static int s_result = INT_MIN;
static std::mutex s_mutex;
std::lock_guard<std::mutex> lk(s_mutex);
if (s_result != INT_MIN) return s_result;
int gpuCount = 0;
cudaGetDeviceCount(&gpuCount);
if (gpuCount <= 1) {
s_result = 1;
std::cout << "Info [FR GPU]: Single GPU — pool mode: 1 slot, no round-robin" << std::endl;
return s_result;
}
constexpr size_t kLargeVramBytes = 24ULL * 1024 * 1024 * 1024; // 24 GB
size_t totalMem = 0, freeMem = 0;
cudaSetDevice(0);
cudaMemGetInfo(&freeMem, &totalMem);
if (totalMem >= kLargeVramBytes) {
s_result = -1;
std::cout << "Info [FR GPU]: " << gpuCount << " GPUs, VRAM >= 24 GB — pool mode: elastic" << std::endl;
} else {
s_result = 1;
std::cout << "Info [FR GPU]: " << gpuCount << " GPUs, VRAM < 24 GB — pool mode: round-robin" << std::endl;
}
return s_result;
}
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
) noexcept
{
switch (ul_reason_for_call)
{
case DLL_PROCESS_ATTACH:
// Pin the DLL so it is never unmapped while idle-timer threads are
// still running. During LabVIEW shutdown the CLR/COM teardown can
// unload DLLs before all threads exit → crash at unmapped code.
{
HMODULE hSelf = nullptr;
GetModuleHandleExW(
GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS |
GET_MODULE_HANDLE_EX_FLAG_PIN,
reinterpret_cast<LPCWSTR>(&DllMain),
&hSelf);
}
break;
case DLL_THREAD_ATTACH:
case DLL_THREAD_DETACH:
break;
case DLL_PROCESS_DETACH:
// ExitProcess: OS killed worker threads, CUDA context is dead.
// Set flag so Engine/Pool destructors skip CUDA cleanup.
if (lpReserved != nullptr) {
g_processExiting().store(true, std::memory_order_relaxed);
break;
}
// Dynamic FreeLibrary — threads are still alive, safe to clean up.
try {
std::vector<ANSCENTER::ANSFacialRecognition*> leakedHandles;
{
std::lock_guard<std::mutex> lk(FRHandleRegistryMutex());
for (auto& [h, _] : FRHandleRegistry())
leakedHandles.push_back(h);
FRHandleRegistry().clear();
}
for (auto* h : leakedHandles) {
try { h->Destroy(); delete h; } catch (...) {}
}
try { EnginePoolManager<float>::instance().clearAll(); } catch (...) {}
try { TRTEngineCache::instance().clearAll(); } catch (...) {}
} catch (...) {}
break;
}
return TRUE;
}
// Helper: safely copy a std::string into a LabVIEW LStrHandle.
// Returns 1 on success, 0 on failure (empty string or allocation error).
static int CopyToLStrHandle(LStrHandle handle, const std::string& str) noexcept {
if (str.empty() || handle == nullptr) return 0;
const auto size = static_cast<int32>(str.length());
MgErr error = DSSetHandleSize(handle, sizeof(int32) + size * sizeof(uChar));
if (error != noErr) return 0;
(*handle)->cnt = size;
memcpy((*handle)->str, str.c_str(), static_cast<size_t>(size));
return 1;
}
extern "C" ANSFR_API int CreateANSRFHandle(ANSCENTER::ANSFacialRecognition** Handle,
const char* licenseKey,
const char* configFilePath,
const char* databaseFilePath,
const char* recogniserFilePath,
const char* detectorFilePath,
int precisionType,
float knownPersonThreshold,
int enableAgeGender,
int enableFaceEmotions,
int enableHeadPose,
int minFaceSize,
float faceDetectorThreshold,
int enableFaceLiveness,
int enableAntiSpoofing)
{
try {
// Ensure all shared DLLs (OpenCV, OpenVINO, TRT, ORT) are pre-loaded
ANSCENTER::ANSLibsLoader::Initialize();
if (!Handle || !licenseKey || !configFilePath || !databaseFilePath || !recogniserFilePath) return -1;
// Release existing handle if called twice (prevents leak from LabVIEW)
if (*Handle) {
if (UnregisterFRHandle(*Handle)) {
(*Handle)->Destroy();
delete *Handle;
}
*Handle = nullptr;
}
// std::unique_ptr ensures automatic cleanup if Initialize() throws
auto ptr = std::make_unique<ANSCENTER::ANSFacialRecognition>();
const bool _enableFaceLiveness = (enableFaceLiveness == 1);
const bool _enableAntiSpoofing = (enableAntiSpoofing == 1);
ptr->SetMaxSlotsPerGpu(GetPoolMaxSlotsPerGpu());
int result = ptr->Initialize(licenseKey,
configFilePath,
databaseFilePath,
recogniserFilePath,
detectorFilePath ? detectorFilePath : "",
precisionType,
knownPersonThreshold,
enableAgeGender,
enableFaceEmotions,
enableHeadPose,
minFaceSize,
faceDetectorThreshold,
_enableFaceLiveness,
_enableAntiSpoofing);
if (result < 0) {
*Handle = nullptr;
return result;
}
// Transfer ownership to caller on success
*Handle = ptr.release();
RegisterFRHandle(*Handle);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int LoadANSRFEngine(ANSCENTER::ANSFacialRecognition** Handle) {
try {
if (!Handle || !*Handle) return -1;
bool result = (*Handle)->LoadEngine();
if (result == true) return 1;
else return 0;
}
catch (const std::exception& e) {
return -1;
}
}
static int ReleaseANSRFHandle_Impl(ANSCENTER::ANSFacialRecognition** Handle) {
try {
if (!Handle || !*Handle) return 1;
if (!UnregisterFRHandle(*Handle)) {
*Handle = nullptr;
return 1; // Not in registry — already freed
}
(*Handle)->Destroy();
delete *Handle;
*Handle = nullptr;
return 1;
}
catch (...) {
if (Handle) *Handle = nullptr;
return 0;
}
}
extern "C" ANSFR_API int ReleaseANSRFHandle(ANSCENTER::ANSFacialRecognition** Handle) {
__try {
return ReleaseANSRFHandle_Impl(Handle);
}
__except (EXCEPTION_EXECUTE_HANDLER) {
return 0;
}
}
extern "C" ANSFR_API std::string RunANSRFInference(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength) {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFInferenceBinary(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_bytes, unsigned int width, unsigned int height) {
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone();
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFRecognition(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength) {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFRecognitionBinary(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_bytes, unsigned int width, unsigned int height) {
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone();
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFDetectorBinary(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_bytes, unsigned int width, unsigned int height) {
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone();
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Detect(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
extern "C" ANSFR_API std::string RunANSRFDetector(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength) {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return "";
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Detect(frame);
frame.release();
std::string result = engine->FaceObjectsToJsonString(outputs);
return result;
}
catch (...) { return ""; }
}
//// For LabVIEW API
extern "C" ANSFR_API int RunInference_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFInference(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int RunInferenceWithCamId_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunDetector_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult)
{
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFDetector(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int RunDetectorWithCamId_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Detect(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunRecognition_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFRecognition(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int RunRecognitionWithCamId_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunFaceDetection_LV(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(frame, cameraId);
frame.release();
std::string st = engine->FaceToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API std::string RunANSRFFaceDetector(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_bytes, unsigned int width, unsigned int height)
{
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return "";
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return "";
auto* engine = guard.get();
try {
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone();
if (frame.empty()) return "";
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(frame, "0000");
frame.release();
std::string st = engine->FaceToJsonString(outputs);
return st;
}
catch (...) { return ""; }
}
// User management
extern "C" ANSFR_API int InsertUser(ANSCENTER::ANSFacialRecognition** Handle, const char* userCode, const char* userName) {
try {
if (!Handle || !*Handle || !userCode || !userName) return -1;
int result = (*Handle)->InsertUser(userCode, userName);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int UpdateUser(ANSCENTER::ANSFacialRecognition** Handle, int userId, const char* userCode, const char* userName) {
try {
if (!Handle || !*Handle || !userCode || !userName) return -1;
int result = (*Handle)->UpdateUser(userId, userCode, userName);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int DeleteUser(ANSCENTER::ANSFacialRecognition** Handle, int userId) {
try {
if (!Handle || !*Handle) return -1;
int result = (*Handle)->DeleteUser(userId);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int DeleteUsers(ANSCENTER::ANSFacialRecognition** Handle, int* userIds, int count) {
try {
if (!Handle || !*Handle || !userIds || count <= 0) return -1;
std::vector<int> ids(userIds, userIds + count);
return (*Handle)->DeleteUsers(ids);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int InsertFace(ANSCENTER::ANSFacialRecognition** Handle, int userId, unsigned char* jpeg_string, unsigned int bufferLength) {
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return -1;
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) {
return 0;
}
int result = (*Handle)->InsertFace(userId, frame);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int InsertFaces(ANSCENTER::ANSFacialRecognition** Handle, int userId, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle faceIdsStr) {
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0 || !faceIdsStr) return -1;
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<int> faceIds = (*Handle)->InsertMultipleFaces(userId, frame);
std::string st;
for (size_t i = 0; i < faceIds.size(); ++i) {
if (i > 0) st += ";";
st += std::to_string(faceIds[i]);
}
return CopyToLStrHandle(faceIdsStr, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int CheckFaceEmbedding(ANSCENTER::ANSFacialRecognition** Handle, unsigned char* jpeg_string, unsigned int bufferLength) {
try {
if (!Handle || !*Handle || !jpeg_string || bufferLength == 0) return -1;
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) {
return 0;
}
int result = (*Handle)->CheckFace(frame);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int InsertFaceBinary(ANSCENTER::ANSFacialRecognition** Handle, int userId, unsigned char* jpeg_bytes, unsigned int width, unsigned int height) {
try {
if (!Handle || !*Handle || !jpeg_bytes || width == 0 || height == 0) return -1;
cv::Mat frame = cv::Mat(height, width, CV_8UC3, jpeg_bytes).clone(); // make a copy
if (frame.empty()) {
return 0;
}
int result = (*Handle)->InsertFace(userId, frame);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int DeleteFace(ANSCENTER::ANSFacialRecognition** Handle, int faceId) {
try {
if (!Handle || !*Handle) return -1;
int result = (*Handle)->DeleteFace(faceId);
return result;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int Reload(ANSCENTER::ANSFacialRecognition** Handle) {
try {
if (!Handle || !*Handle) return -1;
bool result = (*Handle)->Reload();
if (result == true) return 1;
else return 0;
}
catch (const std::exception& e) {
return -1;
}
}
// New management API
extern "C" ANSFR_API int GetUser(ANSCENTER::ANSFacialRecognition** Handle, int userId, LStrHandle userRecord) {
try {
if (!Handle || !*Handle || !userRecord) return -1;
std::string st;
(*Handle)->GetUser(userId, st);
return CopyToLStrHandle(userRecord, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetUsers(ANSCENTER::ANSFacialRecognition** Handle, LStrHandle userRecords) {
try {
if (!Handle || !*Handle || !userRecords) return -1;
std::string st;
std::vector<int> userIds;
(*Handle)->GetUsers(st, userIds);
return CopyToLStrHandle(userRecords, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFace(ANSCENTER::ANSFacialRecognition** Handle, int faceId, LStrHandle faceRecord) {
try {
if (!Handle || !*Handle || !faceRecord) return -1;
std::string st;
(*Handle)->GetFace(faceId, st);
return CopyToLStrHandle(faceRecord, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFaces(ANSCENTER::ANSFacialRecognition** Handle, int userId, LStrHandle faceRecords) {
try {
if (!Handle || !*Handle || !faceRecords) return -1;
std::string st;
(*Handle)->GetFaces(userId, st);
return CopyToLStrHandle(faceRecords, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int DeleteFacesByUser(ANSCENTER::ANSFacialRecognition** Handle, int userId) {
try {
if (!Handle || !*Handle) return -1;
int ret = (*Handle)->DeleteFacesByUser(userId);
return ret;
}
catch (const std::exception& e) {
return -1;
}
}
// For testing only
extern "C" ANSFR_API int GetUserString(ANSCENTER::ANSFacialRecognition** Handle, int userId, std::string& userRecord) {
try {
if (!Handle || !*Handle) return -1;
(*Handle)->GetUser(userId, userRecord);
return 1;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetUsersString(ANSCENTER::ANSFacialRecognition** Handle, std::string& userRecords, std::vector<int>& userIds) {
try {
if (!Handle || !*Handle) return -1;
(*Handle)->GetUsers(userRecords, userIds);
return 1;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFaceString(ANSCENTER::ANSFacialRecognition** Handle, int faceId, std::string& faceRecord) {
try {
if (!Handle || !*Handle) return -1;
(*Handle)->GetFace(faceId, faceRecord);
return 1;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFacesString(ANSCENTER::ANSFacialRecognition** Handle, int userId, std::string& faceRecords) {
try {
if (!Handle || !*Handle) return -1;
(*Handle)->GetFaces(userId, faceRecords);
return 1;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API double BlurCalculation(unsigned char* jpeg_string, unsigned int bufferLength) {
try {
if (!jpeg_string || bufferLength == 0) return -1;
cv::Mat image = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
cv::Mat gray;
cvtColor(image, gray, cv::COLOR_BGR2GRAY);
cv::Mat laplacian;
Laplacian(gray, laplacian, CV_64F);
cv::Scalar mean, stddev;
meanStdDev(laplacian, mean, stddev);
image.release();
gray.release();
return stddev.val[0] * stddev.val[0];
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int UpdateParameters(ANSCENTER::ANSFacialRecognition** Handle, float knownPersonThreshold, int enableAgeGender, int enableFaceEmotions, int enableHeadPose, int minFaceSize, float faceDetectorThreshold, int enableFaceliveness, int antiSpoof, int removeFakeFaces) {
try {
if (!Handle || !*Handle) return -1;
bool _enableAgeGender = false;
bool _enableFaceEmotions = false;
bool _enableHeadPose = false;
bool _enablefaceLiveness = false;
bool _enableantiSpoof = false;
bool _removeFakeFaces = false;
if (enableAgeGender == 1)_enableAgeGender = true;
if (enableFaceEmotions == 1)_enableFaceEmotions = true;
if (enableHeadPose == 1)_enableHeadPose = true;
if (enableFaceliveness == 1)_enablefaceLiveness = true;
if (antiSpoof == 1)_enableantiSpoof = true;
if (removeFakeFaces == 1)_removeFakeFaces = true;
bool result = (*Handle)->UpdateParameters(knownPersonThreshold, _enableAgeGender, _enableFaceEmotions, _enableHeadPose, minFaceSize, faceDetectorThreshold, _enablefaceLiveness, _enableantiSpoof, _removeFakeFaces);
if (result)return 1;
else return 0;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetParamters(ANSCENTER::ANSFacialRecognition** Handle, LStrHandle faceParams) {
try {
if (!Handle || !*Handle || !faceParams) return -1;
std::string st;
float knownPersonThrehold;
bool enableAgeGender;
bool enableEmotions;
bool enableHeadPose;
bool enablefaceLiveness;
bool enableantiSpoof;
bool removeFakeFaces;
int _enableAgeGender = 0;
int _enableEmotions = 0;
int _enableHeadPose = 0;
int _enablefaceLiveness = 0;
int _enableantiSpoof = 0;
int _removeFakeFaces = 0;
int _minFaceSize = 0;
float faceDetectionThreshold;
(*Handle)->GetFaceParameters(knownPersonThrehold, enableAgeGender, enableEmotions, enableHeadPose, _minFaceSize, faceDetectionThreshold, enablefaceLiveness, enableantiSpoof,removeFakeFaces);
if (enableAgeGender)_enableAgeGender = 1;
if (enableEmotions)_enableEmotions = 1;
if (enableHeadPose)_enableHeadPose = 1;
if (enablefaceLiveness)_enablefaceLiveness = 1;
if (enableantiSpoof)_enableantiSpoof = 1;
if (removeFakeFaces)_removeFakeFaces = 1;
st = std::to_string(knownPersonThrehold) + ";" +
std::to_string(_enableAgeGender) + ";" +
std::to_string(_enableEmotions) + ";" +
std::to_string(_enableHeadPose) + ";" +
std::to_string(_minFaceSize) + ";" +
std::to_string(faceDetectionThreshold)+";"+
std::to_string(_enablefaceLiveness) + ";" +
std::to_string(_enableantiSpoof) + ";" +
std::to_string(_removeFakeFaces);
return CopyToLStrHandle(faceParams, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int UpdateFaceQueue(ANSCENTER::ANSFacialRecognition** Handle, int queueSize, int numKnownFaceInQueue, int enableFaceQueue) {
try {
if (!Handle || !*Handle) return -1;
bool _enableFaceQueue = false;
if (enableFaceQueue == 1)_enableFaceQueue = true;
bool result = (*Handle)->UpdateFaceQueue(queueSize, numKnownFaceInQueue, _enableFaceQueue);
if (result)return 1;
else return 0;
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int GetFaceQueue(ANSCENTER::ANSFacialRecognition** Handle, LStrHandle faceQueue) {
try {
if (!Handle || !*Handle || !faceQueue) return -1;
std::string st;
int faceQueueSize;
int numKnownFaceInQueue;
bool enableFaceQueue = false;
int _enableAgeGender = 0;
(*Handle)->GetFaceQueue(faceQueueSize, numKnownFaceInQueue, enableFaceQueue);
if (enableFaceQueue)_enableAgeGender = 1;
st = std::to_string(faceQueueSize) + ";" +
std::to_string(numKnownFaceInQueue) + ";" +
std::to_string(_enableAgeGender);
return CopyToLStrHandle(faceQueue, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int RunInferenceComplete_LV(
ANSCENTER::ANSFacialRecognition** Handle,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
// Validate inputs
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!Handle || !(*Handle)) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
// Clone image for thread safety - streaming thread may swap the pointer
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(localImage, cameraId);
tl_currentGpuFrame() = nullptr; // Clear after inference — prevent leaking to unrelated calls
// All processing below is thread-local (no shared state)
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
// Scale bounding boxes if resizing
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
// Clamp to image bounds
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
// Convert to JPEG if requested
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
// Convert to JSON and write detection result
std::string stDetectionResult = engine->FaceObjectsToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
// Write JPEG to LabVIEW string handle if requested
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
extern "C" ANSFR_API int RunFaceDetectionComplete_LV(
ANSCENTER::ANSFacialRecognition** Handle,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
// Validate inputs
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!Handle || !(*Handle)) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
// Clone image for thread safety - streaming thread may swap the pointer
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
// All processing below is thread-local (no shared state)
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
// Scale bounding boxes if resizing
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
// Clamp to image bounds
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
// Convert to JPEG if requested
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
// Convert to JSON and write detection result
std::string stDetectionResult = engine->FaceToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
// Write JPEG to LabVIEW string handle if requested
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
extern "C" ANSFR_API int RunFaceRecogniserComplete_LV(
ANSCENTER::ANSFacialRecognition** Handle,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
// Validate inputs
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!Handle || !(*Handle)) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(*Handle));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
// Clone image for thread safety - streaming thread may swap the pointer
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
// All processing below is thread-local (no shared state)
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
// Scale bounding boxes if resizing
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
// Clamp to image bounds
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
// Convert to JPEG if requested
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
// Convert to JSON and write detection result
std::string stDetectionResult = engine->FaceObjectsToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
// Write JPEG to LabVIEW string handle if requested
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
// ============================================================================
// V2 API — accepts uint64_t handle by value (eliminates LabVIEW buffer reuse bug)
// ============================================================================
#define FR_V2_HANDLE_SETUP(handleVal) \
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal); \
if (_v2Direct == nullptr) return 0; \
ANSCENTER::ANSFacialRecognition* _v2Arr[1] = { _v2Direct }; \
ANSCENTER::ANSFacialRecognition** Handle = &_v2Arr[0];
extern "C" ANSFR_API int RunInference_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
FR_V2_HANDLE_SETUP(handleVal);
try {
if (!jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFInference(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int RunInferenceWithCamId_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunDetector_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
FR_V2_HANDLE_SETUP(handleVal);
try {
if (!jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFDetector(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int RunDetectorWithCamId_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Detect(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunRecognition_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, LStrHandle detectionResult) {
FR_V2_HANDLE_SETUP(handleVal);
try {
if (!jpeg_string || bufferLength == 0 || !detectionResult) return -1;
std::string st = RunANSRFRecognition(Handle, jpeg_string, bufferLength);
return CopyToLStrHandle(detectionResult, st);
}
catch (const std::exception& e) {
return -1;
}
}
extern "C" ANSFR_API int RunRecognitionWithCamId_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(frame, cameraId);
frame.release();
std::string st = engine->FaceObjectsToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunFaceDetection_LV_V2(uint64_t handleVal, unsigned char* jpeg_string, unsigned int bufferLength, const char* cameraId, LStrHandle detectionResult) {
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!jpeg_string || bufferLength == 0 || !cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
cv::Mat frame = cv::imdecode(cv::Mat(1, bufferLength, CV_8UC1, jpeg_string), cv::IMREAD_COLOR);
if (frame.empty()) return 0;
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(frame, cameraId);
frame.release();
std::string st = engine->FaceToJsonString(outputs);
return CopyToLStrHandle(detectionResult, st);
}
catch (...) { return -1; }
}
extern "C" ANSFR_API int RunInferenceComplete_LV_V2(
uint64_t handleVal,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Inference(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
std::string stDetectionResult = engine->FaceObjectsToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
extern "C" ANSFR_API int RunFaceDetectionComplete_LV_V2(
uint64_t handleVal,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::Object> outputs = engine->FaceDetect(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
std::string stDetectionResult = engine->FaceToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}
extern "C" ANSFR_API int RunFaceRecogniserComplete_LV_V2(
uint64_t handleVal,
cv::Mat** cvImage,
const char* cameraId,
int getJpegString,
int jpegImageSize,
LStrHandle detectionResult,
LStrHandle imageStr)
{
ANSCENTER::ANSFacialRecognition* _v2Direct = reinterpret_cast<ANSCENTER::ANSFacialRecognition*>(handleVal);
if (_v2Direct == nullptr) return -1;
if (!cvImage || !(*cvImage) || (*cvImage)->empty()) return -2;
if (!cameraId || !detectionResult) return -1;
FRHandleGuard guard(AcquireFRHandle(_v2Direct));
if (!guard) return -3;
auto* engine = guard.get();
try {
// Lookup NV12 frame data BEFORE cloning (clone creates new cv::Mat*)
tl_currentGpuFrame() = ANSGpuFrameRegistry::instance().lookup(*cvImage);
cv::Mat localImage = (**cvImage).clone();
int originalWidth = localImage.cols;
int originalHeight = localImage.rows;
if (originalWidth == 0 || originalHeight == 0) return -2;
std::vector<ANSCENTER::FaceResultObject> outputs = engine->Recognize(localImage, cameraId);
tl_currentGpuFrame() = nullptr;
bool getJpeg = (getJpegString == 1);
std::string stImage;
int maxImageSize = originalWidth;
bool resizeNeeded = (jpegImageSize > 0) && (jpegImageSize < maxImageSize);
float ratio = 1.0f;
int newWidth = originalWidth;
int newHeight = originalHeight;
if (resizeNeeded) {
newWidth = jpegImageSize;
newHeight = static_cast<int>(std::round(newWidth * static_cast<double>(originalHeight) / originalWidth));
ratio = static_cast<float>(newWidth) / originalWidth;
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x * ratio), newWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y * ratio), newHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width * ratio), newWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height * ratio), newHeight - obj.box.y));
}
}
else {
for (auto& obj : outputs) {
obj.box.x = std::max(0, std::min(static_cast<int>(obj.box.x), originalWidth - 1));
obj.box.y = std::max(0, std::min(static_cast<int>(obj.box.y), originalHeight - 1));
obj.box.width = std::max(1, std::min(static_cast<int>(obj.box.width), originalWidth - obj.box.x));
obj.box.height = std::max(1, std::min(static_cast<int>(obj.box.height), originalHeight - obj.box.y));
}
}
if (getJpeg) {
cv::Mat processedImage = localImage;
if (resizeNeeded) {
cv::resize(localImage, processedImage, cv::Size(newWidth, newHeight), 0, 0, cv::INTER_AREA);
}
std::vector<uchar> buf;
if (cv::imencode(".jpg", processedImage, buf, { cv::IMWRITE_JPEG_QUALITY, 50 })) {
stImage.assign(buf.begin(), buf.end());
}
}
std::string stDetectionResult = engine->FaceObjectsToJsonString(outputs);
if (!CopyToLStrHandle(detectionResult, stDetectionResult)) return 0;
if (getJpeg) {
if (!CopyToLStrHandle(imageStr, stImage)) return 0;
}
return 1;
}
catch (const std::exception&) {
return 0;
}
catch (...) {
return 0;
}
}