Files
ANSCORE/tests/ANSFR-UnitTest/ANSFR-UnitTest.cpp

1661 lines
71 KiB
C++
Raw Normal View History

2026-03-29 08:45:38 +11:00
#include <iostream>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/opencv.hpp>
#include <opencv2/dnn.hpp>
#include "boost/property_tree/ptree.hpp"
#include "boost/property_tree/json_parser.hpp"
#include "boost/foreach.hpp"
#include "boost/optional.hpp"
#include <fstream>
#include <sstream>
#include <iostream>
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/objdetect.hpp>
#include <map>
#include <vector>
#include <string>
#include <chrono>
#include <thread>
#include <iomanip>
2026-03-29 08:45:38 +11:00
#include "ANSFR.h"
#include "fastdeploy/vision.h"
#include "ANSFilePlayer.h"
#include <opencv2/dnn_superres.hpp>
#include <nvjpeg.h>
using namespace cv;
using namespace dnn;
using namespace std;
template<typename T>
T GetOptionalValue(const boost::property_tree::ptree& pt, std::string attribute, T defaultValue) {
if (pt.count(attribute)) {
return pt.get<T>(attribute);
}
return defaultValue;
}
template <typename T>
T GetData(const boost::property_tree::ptree& pt, const std::string& key)
{
T ret;
if (boost::optional<T> data = pt.get_optional<T>(key))
{
ret = data.get();
}
return ret;
}
typedef struct {
int width;
int height;
unsigned char* buffer;
unsigned long data_size;
} my_bitmap_type;
std::vector<unsigned char> EncodeToJpegData(const my_bitmap_type* image)
{
nvjpegHandle_t nv_handle;
nvjpegEncoderState_t nv_enc_state;
nvjpegEncoderParams_t nv_enc_params;
cudaStream_t stream = NULL;
nvjpegStatus_t er;
nvjpegCreateSimple(&nv_handle);
nvjpegEncoderStateCreate(nv_handle, &nv_enc_state, stream);
nvjpegEncoderParamsCreate(nv_handle, &nv_enc_params, stream);
nvjpegEncoderParamsSetSamplingFactors(nv_enc_params, NVJPEG_CSS_444, stream);
nvjpegImage_t nv_image;
nv_image.channel[0] = image->buffer;
nv_image.pitch[0] = 3 * image->width;
er = nvjpegEncodeImage(nv_handle, nv_enc_state, nv_enc_params, &nv_image,
NVJPEG_INPUT_BGRI, image->width, image->height, stream);
size_t length = 0;
nvjpegEncodeRetrieveBitstream(nv_handle, nv_enc_state, NULL, &length, stream);
cudaStreamSynchronize(stream);
std::vector<unsigned char> jpeg(length);
nvjpegEncodeRetrieveBitstream(nv_handle, nv_enc_state, jpeg.data(), &length, 0);
nvjpegEncoderParamsDestroy(nv_enc_params);
nvjpegEncoderStateDestroy(nv_enc_state);
nvjpegDestroy(nv_handle);
return jpeg;
}
int InsertUsers(std::string databaseFilePath,std::string recognizerFilePath, std::string detectorFilePath) {
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath, databaseFilePath.c_str(), recognizerFilePath.c_str(), detectorFilePath.c_str(),1);
std::cout << "Insert users.\n";
string userId = "0001";
string userName = "Tien";
int userid= InsertUser(&infHandle, userId.c_str(), userName.c_str());
std::cout << "user id:" << userid << " has been inserted" << std::endl;
userId = "0002";
userName = "Tuan";
userid=InsertUser(&infHandle, userId.c_str(), userName.c_str());
std::cout << "user id:" << userid << " has been inserted" << std::endl;
std::cout << "End of inserting users.\n";
ReleaseANSRFHandle(&infHandle);
return 0;
}
int InsertFaces(std::string databaseFilePath, std::string recognizerFilePath, std::string detectorFilePath,std::string imageFolder) {
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath, databaseFilePath.c_str(), recognizerFilePath.c_str(), detectorFilePath.c_str(),1);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
int userId = 1;
unsigned int bufferLength = 0;
//string imagePath = "C:\\Programs\\DemoAssets\\NV\\Ng Viet Xuan Nhi.png";// "C:\\Programs\\DemoAssets\\Images\\Hoang.jpg";;// imageFolder + "\\SuperImages\\Tom.jpg";
//string imagePath = "C:\\Programs\\DemoAssets\\NV\\OtherIssues\\Tuan_Anh\\TA5.jpg";// "C:\\Programs\\DemoAssets\\Images\\Hoang.jpg";;// imageFolder + "\\SuperImages\\Tom.jpg";
string imagePath = "C:\\Programs\\DemoAssets\\NV\\No\\canread.png";// "C:\\Programs\\DemoAssets\\Images\\Hoang.jpg";;// imageFolder + "\\SuperImages\\Tom.jpg";
cv::Mat frame = cv::imread(imagePath, cv::IMREAD_COLOR);
unsigned char* jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
InsertFaceBinary(& infHandle, userId, jpeg_string,width,height);
delete jpeg_string;
/*
imagePath = imageFolder+"\\SuperImages\\Tom1.jpg";
frame = cv::imread(imagePath, cv::IMREAD_COLOR);
jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
height = frame.rows;
width = frame.cols;
InsertFaceBinary(&infHandle, userId, jpeg_string, width, height);
delete jpeg_string;
userId = 2;
imagePath = imageFolder+"\\SuperImages\\Jil.jpg";
frame = cv::imread(imagePath, cv::IMREAD_COLOR);
jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
height = frame.rows;
width = frame.cols;
InsertFaceBinary(&infHandle, userId, jpeg_string, width, height);
delete jpeg_string;
imagePath = imageFolder+"\\SuperImages\\Jil1.jpg";
frame = cv::imread(imagePath, cv::IMREAD_COLOR);
jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
height = frame.rows;
width = frame.cols;
InsertFaceBinary(&infHandle, userId, jpeg_string, width, height);
delete jpeg_string;
imagePath = imageFolder+"\\SuperImages\\Jil2.jpg";
frame = cv::imread(imagePath, cv::IMREAD_COLOR);
jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
height = frame.rows;
width = frame.cols;
InsertFaceBinary(&infHandle, userId, jpeg_string, width, height);
delete jpeg_string;*/
std::cout << "End of inserting faces.\n";
ReleaseANSRFHandle(&infHandle);
return 0;
}
int RunInferenceTest(std::string databaseFilePath, std::string recognizerFilePath,std::string facedetectorPath, std::string imagePath) {
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = facedetectorPath;
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(),0.65,1,1,1);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
Reload(&infHandle);
unsigned int bufferLength = 0;
cv::Mat frame = cv::imread(imagePath, cv::IMREAD_COLOR);
unsigned char* jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
delete jpeg_string;
std::cout << "Result:"<< detectionResult;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto class_id = GetData<int>(result, "user_id");
const auto class_name = GetData<std::string>(result, "user_name");
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, cv::format("%s:%d", class_name, class_id), cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
cv::imshow("ANS Face detection", frame);
if (cv::waitKey(0) == 27) // Wait for 'esc' key press to exit
{
std::cout << "End of inserting faces.\n";
}
std::cout << "End of inserting faces.\n";
ReleaseANSRFHandle(&infHandle);
cudaDeviceReset();
//ReleaseLoggers();
return 0;
}
int TestManagementAPI(std::string databaseFilePath, std::string recognizerFilePath) {
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath, databaseFilePath.c_str(), recognizerFilePath.c_str(), detectorFilePath.c_str(),1);
std::vector<int> userIds;
Reload(&infHandle);
string userRecords;
GetUsersString(&infHandle, userRecords,userIds);
std::cout << "User records: " << userRecords << std::endl;
string userRecord;
GetUserString(&infHandle, 1, userRecord);
std::cout << "User record: " << userRecord << std::endl;
string faceRecord;
GetFaceString(& infHandle, 1, faceRecord);
std::cout << "Face record: " << faceRecord << std::endl;
string faceRecords;
GetFacesString(&infHandle, 1, faceRecords);
std::cout << "Face records: " << faceRecords << std::endl;
ReleaseANSRFHandle(&infHandle);
return 0;
}
int RunDetectTest(std::string databaseFilePath, std::string recognizerFilePath, std::string imagePath) {
const char* configFilePath = "";
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath, databaseFilePath.c_str(), recognizerFilePath.c_str(), detectorFilePath.c_str(),1);
unsigned int bufferLength = 0;
cv::Mat frame = cv::imread(imagePath, cv::IMREAD_COLOR);
unsigned char* jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
string detectionResult = RunANSRFDetectorBinary(&infHandle, jpeg_string, width, height);
std::cout << "Result:" << detectionResult;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto class_id = GetData<int>(result, "user_id");
const auto class_name = GetData<string>(result, "user_name");
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, cv::format("%s:%d", class_name, class_id), cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
cv::imshow("ANS Face detection", frame);
if (cv::waitKey(0) == 27) // Wait for 'esc' key press to exit
{
std::cout << "End of inserting faces.\n";
}
delete jpeg_string;
ReleaseANSRFHandle(&infHandle);
return 0;
}
int DeleteFaces(std::string databaseFilePath, std::string recognizerFilePath) {
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath, databaseFilePath.c_str(), recognizerFilePath.c_str(), detectorFilePath.c_str(), 1);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::vector<int> userIds;
std::string userRecords;
GetUsersString(&infHandle, userRecords, userIds);
for (int i = 0; i < userIds.size(); i++) {
DeleteUser(&infHandle, userIds[i]);
}
std::cout << "End of deleting faces.\n";
ReleaseANSRFHandle(&infHandle);
return 0;
}
int TestANSVISNoUserCase(std::string databaseFilePath, std::string recognizerFilePath, std::string detectorFilePath, std::string imagePath) {
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath, databaseFilePath.c_str(), recognizerFilePath.c_str(), detectorFilePath.c_str(), 1);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
Reload(&infHandle);
unsigned int bufferLength = 0;
cv::Mat frame = cv::imread(imagePath, cv::IMREAD_COLOR);
unsigned char* jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
std::cout << "Result:" << detectionResult;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto class_id = GetData<int>(result, "user_id");
const auto class_name = GetData<std::string>(result, "user_name");
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, cv::format("%s:%d", class_name, class_id), cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
cv::imshow("ANS Face detection", frame);
if (cv::waitKey(0) == 27) // Wait for 'esc' key press to exit
{
std::cout << "End of inserting faces.\n";
}
delete jpeg_string;
std::cout << "End of inserting faces.\n";
ReleaseANSRFHandle(&infHandle);
return 0;
}
int TestCompleteFR() {
std::filesystem::path currentPath = "C:\\Programs\\DemoAssets";
std::string databaseFilePath = currentPath.string() + "\\ANSFRDB\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_GenericFD(GPU)_v1.0.zip";
//std::string imagePath = "C:\\Programs\\DemoAssets\\Images\\TestFaces\\SuperImages\\Tom.jpg";
std::string imagePath = "C:\\Programs\\DemoAssets\\Images\\TestFaces\\SuperImages\\FaceRecordings\\1\\0001_138x138.jpg";
std::string imageFolder = "C:\\Programs\\DemoAssets\\Images\\TestFaces";
//TestANSVISNoUserCase(databaseFilePath, recognizerFilePath, facedetectorFilePath, imagePath);
//InsertUsers(databaseFilePath, recognizerFilePath, facedetectorFilePath);
InsertFaces(databaseFilePath, recognizerFilePath, facedetectorFilePath,imageFolder);
//RunInferenceTest(databaseFilePath, recognizerFilePath, facedetectorFilePath,imagePath);
//DeleteFaces(databaseFilePath, recognizerFilePath);
std::cout << "End of program.\n";
}
int TestFaceRecognition() {
// Get the current working directory
//std::filesystem::path currentPath = "C:\\Programs\\DemoAssets";
//std::string databaseFilePath = currentPath.string() + "\\ANSFRDB\\ANSFR.db";
//std::string recognizerFilePath = currentPath.string() + "\\ANSAIModels\\ANS_FaceRecognizer_v1.0.zip";
//std::string facedetectorFilePath = currentPath.string() + "\\ANSAIModels\\ANS_GenericFD(GPU)_v1.0.zip";
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_GenericFD(GPU)_v1.0.zip";
std::string imagePath = "C:\\Programs\\DemoAssets\\TestImages\\SuperImages\\Jack.jpg";
RunInferenceTest(databaseFilePath, recognizerFilePath, facedetectorFilePath, imagePath);
std::cout << "End of program.\n";
}
int InsertAlex(std::string databaseFilePath, std::string recognizerFilePath, std::string detectorFilePath) {
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath, databaseFilePath.c_str(), recognizerFilePath.c_str(), detectorFilePath.c_str(), 1);
std::cout << "Insert users.\n";
string userId = "0001";
string userName = "Tom";
int userid = InsertUser(&infHandle, userId.c_str(), userName.c_str());
std::cout << "user id:" << userid << " has been inserted" << std::endl;
userId = "0002";
userName = "Jil";
userid = InsertUser(&infHandle, userId.c_str(), userName.c_str());
std::cout << "user id:" << userid << " has been inserted" << std::endl;
userId = "0003";
userName = "Mark";
userid = InsertUser(&infHandle, userId.c_str(), userName.c_str());
std::cout << "user id:" << userid << " has been inserted" << std::endl;
userId = "0004";
userName = "Jack";
userid = InsertUser(&infHandle, userId.c_str(), userName.c_str());
std::cout << "user id:" << userid << " has been inserted" << std::endl;
std::cout << "End of inserting users.\n";
ReleaseANSRFHandle(&infHandle);
return 0;
}
int InsertAlexFaces(std::string databaseFilePath, std::string recognizerFilePath, std::string detectorFilePath, std::string imageFolder) {
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath, databaseFilePath.c_str(), recognizerFilePath.c_str(), detectorFilePath.c_str(), 1);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
int userId = 1;
unsigned int bufferLength = 0;
string imagePath = "C:\\Programs\\DemoAssets\\TestImages\\SuperImages\\Tom.jpg";
cv::Mat frame = cv::imread(imagePath, cv::IMREAD_COLOR);
unsigned char* jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
InsertFaceBinary(&infHandle, userId, jpeg_string, width, height);
delete jpeg_string;
userId =2;
imagePath = "C:\\Programs\\DemoAssets\\TestImages\\SuperImages\\Jil.jpg";
frame = cv::imread(imagePath, cv::IMREAD_COLOR);
jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
height = frame.rows;
width = frame.cols;
InsertFaceBinary(&infHandle, userId, jpeg_string, width, height);
delete jpeg_string;
userId = 3;
imagePath = "C:\\Programs\\DemoAssets\\TestImages\\SuperImages\\Mark.jpg";
frame = cv::imread(imagePath, cv::IMREAD_COLOR);
jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
height = frame.rows;
width = frame.cols;
InsertFaceBinary(&infHandle, userId, jpeg_string, width, height);
delete jpeg_string;
userId = 4;
imagePath = "C:\\Programs\\DemoAssets\\TestImages\\SuperImages\\Jack.jpg";
frame = cv::imread(imagePath, cv::IMREAD_COLOR);
jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
height = frame.rows;
width = frame.cols;
InsertFaceBinary(&infHandle, userId, jpeg_string, width, height);
delete jpeg_string;
std::cout << "End of inserting faces.\n";
ReleaseANSRFHandle(&infHandle);
return 0;
}
int TestFromActualDB() {
std::filesystem::path currentPath = "C:\\Programs\\DemoAssets";
std::string databaseFilePath = currentPath.string() + "\\ANSFRDB\\ANSFR.db";
std::string recognizerFilePath = currentPath.string() + "\\ANSAIModels\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = currentPath.string() + "\\ANSAIModels\\ANS_GenericFD(GPU)_v1.0.zip";
databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string imagePath = "C:\\Programs\\DemoAssets\\TestImages\\SuperImages\\Jack.jpg";
RunInferenceTest(databaseFilePath, recognizerFilePath, facedetectorFilePath, imagePath);
std::cout << "End of program.\n";
}
int TestAlexRecognition() {
// Get the current working directory
std::filesystem::path currentPath = "C:\\Programs\\DemoAssets";
std::string databaseFilePath = currentPath.string() + "\\ANSFRDB\\ANSFR.db";
std::string recognizerFilePath = currentPath.string() + "\\ANSAIModels\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = currentPath.string() + "\\ANSAIModels\\ANS_GenericFD(GPU)_v1.0.zip";
std::string imagePath = "C:\\Programs\\DemoAssets\\TestImages\\SuperImages\\Jil.jpg";
std::string imageFolder = "C:\\Programs\\DemoAssets\\TestImages\\SuperImages";
InsertAlex(databaseFilePath, recognizerFilePath, facedetectorFilePath);
InsertAlexFaces(databaseFilePath, recognizerFilePath, facedetectorFilePath, imageFolder);
RunInferenceTest(databaseFilePath, recognizerFilePath, facedetectorFilePath, imagePath);
std::cout << "End of program.\n";
return 0;
}
int FaceRecognitionBenchmark() {
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
// Get the current working directory
std::filesystem::path currentPath = "C:\\Programs\\DemoAssets";
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_GenericFD(GPU)_v1.0.zip";
std::string videoFilePath = currentPath.string() + "\\Videos\\BMIP.mp4";
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = facedetectorFilePath;
int result = CreateANSRFHandle(& infHandle,
licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(),
1,
0.25, 1, 1, 1);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
Reload(&infHandle);
cv::VideoCapture capture(videoFilePath);
if (!capture.isOpened()) {
printf("could not read this video file...\n");
return -1;
}
while (true)
{
cv::Mat frame;
if (!capture.read(frame)) // if not success, break loop
{
std::cout << "\n Cannot read the video file. please check your video.\n";
break;
}
unsigned int bufferLength = 0;
unsigned char* jpeg_string = ANSCENTER::ANSUtilityHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
auto start = std::chrono::system_clock::now();
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
delete jpeg_string;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto class_id = GetData<int>(result, "user_id");
const auto class_name = GetData<std::string>(result, "user_name");
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
const auto sim = GetData<float>(result, "similarity");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, cv::format("%s:%d-%f", class_name, class_id,sim), cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
//printf("Time = %lld ms\n", static_cast<long long int>(elapsed.count()));
std::cout << "Time:" << static_cast<long long int>(elapsed.count()) << "; Result" << detectionResult << std::endl;
cv::imshow("ANS Object Tracking", frame);
if (cv::waitKey(30) == 27) // Wait for 'esc' key press to exit
{
break;
}
}
capture.release();
cv::destroyAllWindows();
ReleaseANSRFHandle(&infHandle);
std::cout << "End of program.\n";
return 0;
}
int RunInferenceTests(std::string databaseFilePath, std::string recognizerFilePath, std::string facedetectorPath, std::string folder) {
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = facedetectorPath;
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(), 0.65, 1, 1, 1);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
Reload(&infHandle);
unsigned int bufferLength = 0;
for (const auto& entry : std::filesystem::directory_iterator(folder)) {
if (entry.is_regular_file()) {
std::string filePath = entry.path().string();//"C:\\Projects\\ANSVIS\\Documentation\\TestImages\\FacialRecognition\\Faces\\097.jpg";//
std::cout << "Image File:" << filePath << std::endl;
cv::Mat frame = cv::imread(filePath, cv::IMREAD_COLOR);
unsigned char* jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
std::cout << "Result:" << detectionResult;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto class_id = GetData<int>(result, "user_id");
const auto class_name = GetData<std::string>(result, "user_name");
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, cv::format("%s:%d", class_name, class_id), cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
delete jpeg_string;
}
}
std::cout << "End of inserting faces.\n";
ReleaseANSRFHandle(&infHandle);
return 0;
}
int StressTest() {
// Get the current working directory
std::filesystem::path currentPath = "C:\\Programs\\DemoAssets";
std::string databaseFilePath = currentPath.string() + "\\ANSFRDB\\ANSFR.db";
std::string recognizerFilePath = currentPath.string() + "\\ANSAIModels\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = currentPath.string() + "\\ANSAIModels\\ANS_GenericFD(GPU)_v1.0.zip";
std::string imageFolder = currentPath.string()+"\\TestImages\\FacialRecognition\\Faces";
for (int i = 0; i < 10; i++) {
RunInferenceTests(databaseFilePath, recognizerFilePath, facedetectorFilePath, imageFolder);
}
std::cout << "End of program.\n";
return 0;
}
int ExampleOfSuperResolitionon() {
// Initialize the super-resolution model
cv::dnn_superres::DnnSuperResImpl sr;
// Load the pre-trained model
std::string modelPath = "ESPCN_x4.pb"; // Replace with the path to your model file
try {
sr.readModel(modelPath);
}
catch (const cv::Exception& e) {
std::cerr << "Error loading model: " << e.what() << std::endl;
return -1;
}
// Set the model and scaling factor
sr.setModel("espcn", 4); // "espcn" model with a 4x scaling factor
// Load the input image
cv::Mat image = cv::imread("input_image.jpg"); // Replace with your image file path
if (image.empty()) {
std::cerr << "Could not open or find the image." << std::endl;
return -1;
}
// Upscale the image
cv::Mat upscaledImage;
sr.upsample(image, upscaledImage);
// Display the original and upscaled images
cv::imshow("Original Image", image);
cv::imshow("Upscaled Image", upscaledImage);
cv::waitKey(0);
return 0;
}
int ANSVISTest() {
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
// Get the current working directory
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.1.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFDET\\ANS_GenericFD(GPU)_v1.0.zip";
//std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_FaceRecognizer_v1.1.zip";
//std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_GenericFD(CPU)_v1.0.zip";
//std::string videoFilePath = "E:\\Programs\\DemoAssets\\Videos\\ANSVIS_Issues\\face.mp4";
//std::string videoFilePath = "E:\\Programs\\DemoAssets\\Videos\\TestFR\\Face31.mp4";
std::string videoFilePath = "E:\\Programs\\DemoAssets\\Videos\\classroom.mp4";
//std::string videoFilePath = "E:\\Programs\\DemoAssets\\Videos\\TestFR\\school1.mp4";// "C:\\ProgramData\\ANSCENTER\\Shared\\classroom.mp4";
2026-03-29 08:45:38 +11:00
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
int enalbleHeadPose = 1;
int enableFaceLiveness = 1;
int enableAgeGender = 1;
int enableEmotion = 1;
int enableAntispoofing = 1;
int precision = 0;
std::string detectorFilePath = facedetectorFilePath;
int result = CreateANSRFHandle( & infHandle,
licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(),
precision,
0.25, enableAgeGender, enableEmotion, enalbleHeadPose, 30, 0.55, enableFaceLiveness, enableAntispoofing);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
Reload(&infHandle);
cv::VideoCapture capture(videoFilePath);
if (!capture.isOpened()) {
printf("could not read this video file...\n");
return -1;
}
int index = 0;
while (true)
{
cv::Mat frame;
if (!capture.read(frame)) // if not success, break loop
{
std::cout << "\n Cannot read the video file. Restarting...\n";
capture.set(cv::CAP_PROP_POS_FRAMES, 0); // Reset to the beginning of the video
continue;
}
index++;
//if (index == 200)Reload(&infHandle);
unsigned int bufferLength = 0;
unsigned char* jpeg_string = ANSCENTER::ANSUtilityHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
auto start = std::chrono::system_clock::now();
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
std::cout << "Time:" << static_cast<long long int>(elapsed.count()) << "; Result" << detectionResult << std::endl;
delete jpeg_string;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
index++;
std::cout << "Detected Index:" << index << std::endl;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto class_id = GetData<int>(result, "user_id");
const auto class_name = GetData<std::string>(result, "user_name");
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
const auto sim = GetData<float>(result, "similarity");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, cv::format("%s:%d-%f", class_name, class_id, sim), cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
cv::imshow("ANS Object Tracking", frame);
if (cv::waitKey(30) == 27) // Wait for 'esc' key press to exit
{
break;
}
}
capture.release();
cv::destroyAllWindows();
ReleaseANSRFHandle(&infHandle);
std::cout << "End of program.\n";
return 0;
}
// ANSVISTestCPU — CPU-only variant of ANSVISTest for Intel CPU PCs without NVIDIA GPU.
// Uses CPU face detector model and forces precision=0 (FP32) for ONNX Runtime / OpenVINO.
int ANSVISTestCPU() {
boost::property_tree::ptree pt;
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.1.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFDET\\ANS_GenericFD(CPU)_v1.0.zip";
std::string videoFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\classroom.mp4";
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle = nullptr;
std::string licenseKey = "";
int enableHeadPose = 1;
int enableFaceLiveness = 1;
int enableAgeGender = 1;
int enableEmotion = 1;
int enableAntispoofing = 1;
int precision = 0; // FP32
std::cout << "=== ANSVISTestCPU ===" << std::endl;
std::cout << "Database: " << databaseFilePath << std::endl;
std::cout << "Recognizer: " << recognizerFilePath << std::endl;
std::cout << "FaceDetector: " << facedetectorFilePath << std::endl;
std::cout << "Video: " << videoFilePath << std::endl;
// Step 1: Create handle
std::cout << "[CPU Test] Step 1: Creating handle..." << std::endl;
int result = CreateANSRFHandle(&infHandle,
licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
facedetectorFilePath.c_str(),
precision,
0.25, enableAgeGender, enableEmotion, enableHeadPose, 30, 0.55, enableFaceLiveness, enableAntispoofing);
std::cout << "[CPU Test] CreateANSRFHandle result: " << result << std::endl;
if (result < 0) {
std::cerr << "[CPU Test] FAILED: CreateANSRFHandle returned " << result << std::endl;
return -1;
}
// Step 2: Load engine
std::cout << "[CPU Test] Step 2: Loading engine..." << std::endl;
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "[CPU Test] LoadANSRFEngine result: " << loadEngineResult << std::endl;
if (loadEngineResult != 1) {
std::cerr << "[CPU Test] FAILED: LoadANSRFEngine returned " << loadEngineResult << std::endl;
ReleaseANSRFHandle(&infHandle);
return -2;
}
// Step 3: Reload database
std::cout << "[CPU Test] Step 3: Reloading database..." << std::endl;
Reload(&infHandle);
// Step 4: Open video
std::cout << "[CPU Test] Step 4: Opening video..." << std::endl;
cv::VideoCapture capture(videoFilePath);
if (!capture.isOpened()) {
std::cerr << "[CPU Test] FAILED: Could not open video: " << videoFilePath << std::endl;
ReleaseANSRFHandle(&infHandle);
return -3;
}
int totalFrames = static_cast<int>(capture.get(cv::CAP_PROP_FRAME_COUNT));
std::cout << "[CPU Test] Video opened: " << totalFrames << " frames" << std::endl;
// Step 5: Run inference on video frames
std::cout << "[CPU Test] Step 5: Running inference..." << std::endl;
int frameIndex = 0;
int totalDetections = 0;
double totalInferenceMs = 0.0;
int maxFrames = 200; // Process up to 200 frames for the test
while (frameIndex < maxFrames) {
cv::Mat frame;
if (!capture.read(frame)) {
std::cout << "[CPU Test] End of video at frame " << frameIndex << std::endl;
break;
}
frameIndex++;
unsigned int bufferLength = 0;
unsigned char* jpeg_string = ANSCENTER::ANSUtilityHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
auto start = std::chrono::system_clock::now();
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
totalInferenceMs += static_cast<double>(elapsed.count());
delete[] jpeg_string;
if (!detectionResult.empty()) {
try {
pt.clear();
std::stringstream ss;
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
int detCount = 0;
BOOST_FOREACH(const boost::property_tree::ptree::value_type& child, pt.get_child("results")) {
const boost::property_tree::ptree& r = child.second;
const auto class_id = GetData<int>(r, "user_id");
const auto class_name = GetData<std::string>(r, "user_name");
const auto x = GetData<float>(r, "x");
const auto y = GetData<float>(r, "y");
const auto w = GetData<float>(r, "width");
const auto h = GetData<float>(r, "height");
const auto sim = GetData<float>(r, "similarity");
detCount++;
cv::rectangle(frame, cv::Rect(x, y, w, h), cv::Scalar(0, 255, 0), 2);
cv::putText(frame, cv::format("%s:%d-%.2f", class_name.c_str(), class_id, sim),
cv::Point(x, y - 5), 0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
totalDetections += detCount;
}
catch (const std::exception& e) {
std::cerr << "[CPU Test] JSON parse error at frame " << frameIndex << ": " << e.what() << std::endl;
}
}
// Show every 10th frame progress
if (frameIndex % 10 == 0) {
std::cout << "[CPU Test] Frame " << frameIndex << "/" << maxFrames
<< " | Time: " << elapsed.count() << "ms"
<< " | Detections: " << totalDetections << std::endl;
}
cv::imshow("ANS CPU Test", frame);
if (cv::waitKey(1) == 27) {
std::cout << "[CPU Test] ESC pressed, stopping." << std::endl;
break;
}
}
// Step 6: Print summary
double avgMs = (frameIndex > 0) ? (totalInferenceMs / frameIndex) : 0.0;
std::cout << "\n=== CPU Test Summary ===" << std::endl;
std::cout << "Frames processed: " << frameIndex << std::endl;
std::cout << "Total detections: " << totalDetections << std::endl;
std::cout << "Avg inference: " << avgMs << " ms/frame" << std::endl;
std::cout << "Total time: " << totalInferenceMs << " ms" << std::endl;
if (frameIndex == 0) {
std::cerr << "[CPU Test] FAILED: No frames processed" << std::endl;
} else {
std::cout << "[CPU Test] PASSED" << std::endl;
}
// Cleanup
capture.release();
cv::destroyAllWindows();
ReleaseANSRFHandle(&infHandle);
std::cout << "[CPU Test] Done." << std::endl;
return (frameIndex > 0) ? 0 : -4;
}
// ANSVISTestCPU_Lightweight — Detection + Recognition only, no attribute models.
// Measures baseline face detection + recognition speed on Intel CPU/iGPU.
int ANSVISTestCPU_Lightweight() {
boost::property_tree::ptree pt;
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.1.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFDET\\ANS_GenericFD(CPU)_v1.0.zip";
std::string videoFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\classroom.mp4";
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle = nullptr;
std::string licenseKey = "";
// All attribute models DISABLED for lightweight test
int enableHeadPose = 0;
int enableFaceLiveness = 0;
int enableAgeGender = 0;
int enableEmotion = 0;
int enableAntispoofing = 0;
int precision = 0; // FP32
std::cout << "\n=== ANSVISTestCPU_Lightweight ===" << std::endl;
std::cout << "Mode: Detection + Recognition ONLY (no attributes)" << std::endl;
std::cout << "Database: " << databaseFilePath << std::endl;
std::cout << "Recognizer: " << recognizerFilePath << std::endl;
std::cout << "FaceDetector: " << facedetectorFilePath << std::endl;
std::cout << "Video: " << videoFilePath << std::endl;
// Step 1: Create handle
std::cout << "[Lightweight] Step 1: Creating handle..." << std::endl;
int result = CreateANSRFHandle(&infHandle,
licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
facedetectorFilePath.c_str(),
precision,
0.25, enableAgeGender, enableEmotion, enableHeadPose, 30, 0.55, enableFaceLiveness, enableAntispoofing);
std::cout << "[Lightweight] CreateANSRFHandle result: " << result << std::endl;
if (result < 0) {
std::cerr << "[Lightweight] FAILED: CreateANSRFHandle returned " << result << std::endl;
return -1;
}
// Step 2: Load engine
std::cout << "[Lightweight] Step 2: Loading engine..." << std::endl;
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "[Lightweight] LoadANSRFEngine result: " << loadEngineResult << std::endl;
if (loadEngineResult != 1) {
std::cerr << "[Lightweight] FAILED: LoadANSRFEngine returned " << loadEngineResult << std::endl;
ReleaseANSRFHandle(&infHandle);
return -2;
}
// Step 3: Reload database
std::cout << "[Lightweight] Step 3: Reloading database..." << std::endl;
Reload(&infHandle);
// Step 4: Open video
std::cout << "[Lightweight] Step 4: Opening video..." << std::endl;
cv::VideoCapture capture(videoFilePath);
if (!capture.isOpened()) {
std::cerr << "[Lightweight] FAILED: Could not open video: " << videoFilePath << std::endl;
ReleaseANSRFHandle(&infHandle);
return -3;
}
int totalFrames = static_cast<int>(capture.get(cv::CAP_PROP_FRAME_COUNT));
std::cout << "[Lightweight] Video opened: " << totalFrames << " frames" << std::endl;
// Step 5: Run inference
std::cout << "[Lightweight] Step 5: Running inference..." << std::endl;
int frameIndex = 0;
int totalDetections = 0;
double totalInferenceMs = 0.0;
int maxFrames = 200;
while (frameIndex < maxFrames) {
cv::Mat frame;
if (!capture.read(frame)) {
std::cout << "[Lightweight] End of video at frame " << frameIndex << std::endl;
break;
}
frameIndex++;
unsigned int bufferLength = 0;
unsigned char* jpeg_string = ANSCENTER::ANSUtilityHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
auto start = std::chrono::system_clock::now();
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
totalInferenceMs += static_cast<double>(elapsed.count());
delete[] jpeg_string;
if (!detectionResult.empty()) {
try {
pt.clear();
std::stringstream ss;
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
int detCount = 0;
BOOST_FOREACH(const boost::property_tree::ptree::value_type& child, pt.get_child("results")) {
const boost::property_tree::ptree& r = child.second;
const auto x = GetData<float>(r, "x");
const auto y = GetData<float>(r, "y");
const auto w = GetData<float>(r, "width");
const auto h = GetData<float>(r, "height");
detCount++;
cv::rectangle(frame, cv::Rect(x, y, w, h), cv::Scalar(0, 255, 0), 2);
}
totalDetections += detCount;
}
catch (...) {}
}
if (frameIndex % 10 == 0) {
double avgSoFar = totalInferenceMs / frameIndex;
std::cout << "[Lightweight] Frame " << frameIndex << "/" << maxFrames
<< " | Time: " << elapsed.count() << "ms"
<< " | Avg: " << static_cast<int>(avgSoFar) << "ms"
<< " | FPS: " << std::fixed << std::setprecision(1) << (1000.0 / avgSoFar)
<< " | Faces: " << totalDetections << std::endl;
}
cv::imshow("ANS CPU Lightweight", frame);
if (cv::waitKey(1) == 27) break;
}
// Summary
double avgMs = (frameIndex > 0) ? (totalInferenceMs / frameIndex) : 0.0;
double fps = (avgMs > 0) ? (1000.0 / avgMs) : 0.0;
std::cout << "\n=== Lightweight Test Summary ===" << std::endl;
std::cout << "Frames processed: " << frameIndex << std::endl;
std::cout << "Total detections: " << totalDetections << std::endl;
std::cout << "Avg inference: " << avgMs << " ms/frame" << std::endl;
std::cout << "Avg FPS: " << std::fixed << std::setprecision(1) << fps << std::endl;
std::cout << "Total time: " << totalInferenceMs << " ms" << std::endl;
std::cout << (frameIndex > 0 ? "[Lightweight] PASSED" : "[Lightweight] FAILED") << std::endl;
capture.release();
cv::destroyAllWindows();
ReleaseANSRFHandle(&infHandle);
return (frameIndex > 0) ? 0 : -4;
}
2026-03-29 08:45:38 +11:00
// ANSVISTestFilePlayer — Same as ANSVISTest but uses ANSFILEPLAYER (HW decode + NV12 registry)
// instead of cv::VideoCapture. This enables NV12 fast-path testing for the FR pipeline:
// - SCRFD face detection uses fused NV12→RGB center-letterbox kernel
// - Face alignment uses NV12 affine warp kernel (full 4K resolution)
int ANSVISTestFilePlayer() {
boost::property_tree::ptree pt;
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.1.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFDET\\ANS_GenericFD(GPU)_v1.0.zip";
// std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_GenericFD(CPU)_v1.0.zip";
std::string videoFilePath = "E:\\Programs\\DemoAssets\\Videos\\TestFR\\school1.mp4";
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
int enableHeadPose = 1;
int enableFaceLiveness = 1;
int enableAgeGender = 1;
int enableEmotion = 1;
int enableAntispoofing = 1;
int precision = 0;
int result = CreateANSRFHandle(&infHandle,
licenseKey.c_str(), configFilePath,
databaseFilePath.c_str(), recognizerFilePath.c_str(),
facedetectorFilePath.c_str(), precision,
0.25, enableAgeGender, enableEmotion, enableHeadPose,
30, 0.55, enableFaceLiveness, enableAntispoofing);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult << std::endl;
Reload(&infHandle);
// Create ANSFILEPLAYER instead of cv::VideoCapture
// This uses FFmpeg HW decode → NV12 frames are registered in ANSGpuFrameRegistry
ANSCENTER::ANSFILEPLAYER* filePlayer = nullptr;
int fpResult = CreateANSFilePlayerHandle(&filePlayer, licenseKey.c_str(), videoFilePath.c_str());
if (fpResult != 1 || !filePlayer) {
std::cout << "Failed to create file player. Result: " << fpResult << std::endl;
ReleaseANSRFHandle(&infHandle);
return -1;
}
StartFilePlayer(&filePlayer);
std::cout << "File player started. Waiting for first frame..." << std::endl;
// Wait for player to start producing frames
std::this_thread::sleep_for(std::chrono::milliseconds(500));
int index = 0;
while (true) {
int width = 0, height = 0;
int64_t timeStamp = 0;
cv::Mat* image = nullptr;
int getResult = GetFilePlayerCVImage(&filePlayer, width, height, timeStamp, &image);
if (getResult != 1 || !image || image->empty()) {
// Check if player is still running (video may have ended)
if (!IsFilePlayerRunning(&filePlayer)) {
std::cout << "File player stopped. Reconnecting..." << std::endl;
ReconnectFilePlayer(&filePlayer);
std::this_thread::sleep_for(std::chrono::milliseconds(500));
} else {
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
continue;
}
index++;
cv::Mat frame = image->clone(); // Clone for display (image pointer may be reused)
// Run FR inference directly with cv::Mat — the NV12 registry lookup
// happens inside SCRFD::Detect() using frame.datastart as the key
auto start = std::chrono::system_clock::now();
std::vector<ANSCENTER::FaceResultObject> outputs = infHandle->Inference(frame, "FRCAM");
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
std::string detectionResult = infHandle->FaceObjectsToJsonString(outputs);
std::cout << "Frame:" << index << " Time:" << elapsed.count()
<< "ms Faces:" << outputs.size() << std::endl;
// Draw results
for (const auto& face : outputs) {
cv::rectangle(frame, face.box, cv::Scalar(0, 255, 0), 2);
cv::putText(frame,
cv::format("%s-%s-%.3f", face.userName.c_str(), face.userId.c_str(), face.similarity),
cv::Point(face.box.x, face.box.y - 5), 0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
cv::imshow("ANS FR FilePlayer (NV12)", frame);
if (cv::waitKey(30) == 27) break;
}
StopFilePlayer(&filePlayer);
ReleaseANSFilePlayerHandle(&filePlayer);
cv::destroyAllWindows();
ReleaseANSRFHandle(&infHandle);
std::cout << "End of ANSVISTestFilePlayer." << std::endl;
return 0;
}
int RunInferenceLoopTest(std::string databaseFilePath, std::string recognizerFilePath, std::string facedetectorPath, std::string imagePath) {
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = facedetectorPath;
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(), 0.65, 1, 1, 1);
for (int i = 0; i < 1000; i++) {
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
Reload(&infHandle);
unsigned int bufferLength = 0;
cv::Mat frame = cv::imread(imagePath, cv::IMREAD_COLOR);
unsigned char* jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
delete jpeg_string;
std::cout << "Result:" << detectionResult;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto class_id = GetData<int>(result, "user_id");
const auto class_name = GetData<std::string>(result, "user_name");
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, cv::format("%s:%d", class_name, class_id), cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
}
ReleaseANSRFHandle(&infHandle);
return 0;
}
int FRStressTest() {
// Get the current working directory
std::filesystem::path currentPath = "C:\\Programs\\DemoAssets";
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFDET\\ANS_GenericFD(GPU)_v1.0.zip";
//std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_GenericFD(CPU)_v1.0.zip";
std::string imagePath = "C:\\Programs\\DemoAssets\\Images\\TestFaces\\Tien1.jpg";
for (int i = 0; i < 100; i++) {
RunInferenceTest(databaseFilePath, recognizerFilePath, facedetectorFilePath, imagePath);
}
//RunInferenceLoopTest(databaseFilePath, recognizerFilePath, facedetectorFilePath, imagePath);
std::cout << "End of program.\n";
return 0;
}
int ANSVISRecordingTest() {
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
// Get the current working directory
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_GenericFD(GPU)_v1.0.zip";
std::string videoFilePath = "C:\\Programs\\DemoAssets\\Videos\\Weapon\\BMGWeapon.mp4";// "C:\\Programs\\DemoAssets\\Videos\\classroom.mp4";
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = facedetectorFilePath;
int result = CreateANSRFHandle(&infHandle,
licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(),
1,
0.25, 1, 1, 1, 20);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
Reload(&infHandle);
cv::VideoCapture capture(videoFilePath);
if (!capture.isOpened()) {
printf("could not read this video file...\n");
return -1;
}
int index = 0;
while (true)
{
cv::Mat frame;
if (!capture.read(frame)) // if not success, break loop
{
std::cout << "\n Cannot read the video file. Restarting...\n";
capture.set(cv::CAP_PROP_POS_FRAMES, 0); // Reset to the beginning of the video
continue;
}
//index++;
//if (index == 200)Reload(&infHandle);
unsigned int bufferLength = 0;
unsigned char* jpeg_string = ANSCENTER::ANSUtilityHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
auto start = std::chrono::system_clock::now();
string detectionResult = RunANSRFFaceDetector(&infHandle, jpeg_string, width, height);
delete jpeg_string;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, "face", cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
//printf("Time = %lld ms\n", static_cast<long long int>(elapsed.count()));
std::cout << "Time:" << static_cast<long long int>(elapsed.count()) << "; Result" << detectionResult << std::endl;
cv::imshow("ANS Object Tracking", frame);
if (cv::waitKey(30) == 27) // Wait for 'esc' key press to exit
{
break;
}
}
capture.release();
cv::destroyAllWindows();
ReleaseANSRFHandle(&infHandle);
std::cout << "End of program.\n";
return 0;
}
// Test face detector
int FaceDetectorTest() {
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
// Get the current working directory
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_GenericFD(GPU)_v1.0.zip";
/* std::filesystem::path currentPath = "C:\\Programs\\DemoAssets";
std::string databaseFilePath = currentPath.string() + "\\ANSFRDB\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_FaceRecognizer_v1.0.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANS_GenericFD(GPU)_v1.0.zip";*/
std::string videoFilePath = "C:\\Programs\\DemoAssets\\Videos\\classroom.mp4";// "C:\\Programs\\DemoAssets\\Videos\\TestFR\\Face_Issue.mp4";// ;//
//"C:\\Programs\\DemoAssets\\Videos\\classroom.mp4";//classroom BMIP
//"C:\\Programs\\DemoAssets\\Videos\\TestFR\\BMGOffice.mp4";//
// "C:\\Programs\\DemoAssets\\Videos\\BMIP.mp4";//
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = facedetectorFilePath;
int result = CreateANSRFHandle(&infHandle,
licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(),
1,
0.25, 1, 1, 1, 50);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
Reload(&infHandle);
cv::VideoCapture capture(videoFilePath);
if (!capture.isOpened()) {
printf("could not read this video file...\n");
return -1;
}
int index = 0;
while (true)
{
cv::Mat frame;
if (!capture.read(frame)) // if not success, break loop
{
std::cout << "\n Cannot read the video file. Restarting...\n";
capture.set(cv::CAP_PROP_POS_FRAMES, 0); // Reset to the beginning of the video
continue;
}
index++;
if (index == 200)Reload(&infHandle);
unsigned int bufferLength = 0;
unsigned char* jpeg_string = ANSCENTER::ANSUtilityHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
auto start = std::chrono::system_clock::now();
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
delete jpeg_string;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto class_id = GetData<int>(result, "user_id");
const auto class_name = GetData<std::string>(result, "user_name");
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
const auto sim = GetData<float>(result, "similarity");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, cv::format("%s:%d-%f", class_name, class_id, sim), cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
//printf("Time = %lld ms\n", static_cast<long long int>(elapsed.count()));
std::cout << "Time:" << static_cast<long long int>(elapsed.count()) << "; Result" << detectionResult << std::endl;
cv::imshow("ANS Object Tracking", frame);
if (cv::waitKey(30) == 27) // Wait for 'esc' key press to exit
{
break;
}
}
capture.release();
cv::destroyAllWindows();
ReleaseANSRFHandle(&infHandle);
std::cout << "End of program.\n";
return 0;
}
int ANSVISImageTest() {
boost::property_tree::ptree root;
boost::property_tree::ptree detectionObjects;
boost::property_tree::ptree pt;
// Get the current working directory
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.1.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFDET\\ServerOptimised\\ANS_GenericFD(GPU)_v1.0_NVIDIAGeForceRTX4070LaptopGPU.zip";
std::string imageFilePath = "E:\\Programs\\DemoAssets\\Images\\Unknown\\4615.jpg";
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
std::string detectorFilePath = facedetectorFilePath;
int result = CreateANSRFHandle(&infHandle,
licenseKey.c_str(),
configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(),
1,
0.25, 1, 1, 1, 30);
int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
Reload(&infHandle);
cv::Mat frame = cv::imread(imageFilePath, cv::IMREAD_COLOR);
unsigned int bufferLength = 0;
unsigned char* jpeg_string = ANSCENTER::ANSUtilityHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
auto start = std::chrono::system_clock::now();
string detectionResult = RunANSRFInferenceBinary(&infHandle, jpeg_string, width, height);
delete jpeg_string;
if (!detectionResult.empty()) {
pt.clear();
std::stringstream ss;
ss.clear();
ss << detectionResult;
boost::property_tree::read_json(ss, pt);
BOOST_FOREACH(const boost::property_tree::ptree::value_type & child, pt.get_child("results"))
{
const boost::property_tree::ptree& result = child.second;
const auto class_id = GetData<int>(result, "user_id");
const auto class_name = GetData<std::string>(result, "user_name");
const auto x = GetData<float>(result, "x");
const auto y = GetData<float>(result, "y");
const auto width = GetData<float>(result, "width");
const auto height = GetData<float>(result, "height");
const auto sim = GetData<float>(result, "similarity");
cv::rectangle(frame, cv::Rect(x, y, width, height), 123, 2);
cv::putText(frame, cv::format("%s:%d-%f", class_name, class_id, sim), cv::Point(x, y - 5),
0, 0.6, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
}
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
//printf("Time = %lld ms\n", static_cast<long long int>(elapsed.count()));
std::cout << "Time:" << static_cast<long long int>(elapsed.count()) << "; Result" << detectionResult << std::endl;
cv::imshow("ANS Object Tracking", frame);
if (cv::waitKey(30) == 27) // Wait for 'esc' key press to exit
cv::destroyAllWindows();
ReleaseANSRFHandle(&infHandle);
std::cout << "End of program.\n";
return 0;
}
int InsertUser(std::string databaseFilePath, std::string recognizerFilePath, std::string detectorFilePath, string userId, string userName) {
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(), 1, 0.35, 0, 0, 0, 30);
std::cout << "Insert users.\n";
int userid = InsertUser(&infHandle, userId.c_str(), userName.c_str());
std::cout << "user id:" << userid << " has been inserted" << std::endl;
std::cout << "End of inserting users.\n";
ReleaseANSRFHandle(&infHandle);
return 0;
}
int InsertFace(std::string databaseFilePath, std::string recognizerFilePath, std::string detectorFilePath, std::string imagePath) {
const char* configFilePath = "";
ANSCENTER::ANSFacialRecognition* infHandle;
std::string licenseKey = "";
int result = CreateANSRFHandle(&infHandle, licenseKey.c_str(), configFilePath,
databaseFilePath.c_str(),
recognizerFilePath.c_str(),
detectorFilePath.c_str(), 1, 0.35, 0, 0, 0, 30); int loadEngineResult = LoadANSRFEngine(&infHandle);
std::cout << "Load Engine Result:" << loadEngineResult;
UpdateParameters(&infHandle, 0.35, 0, 0, 0, 30, 0.3,1,1,1);
int userId = 1;
unsigned int bufferLength = 0;
cv::Mat frame = cv::imread(imagePath, cv::IMREAD_COLOR);
unsigned char* jpeg_string = ANSCENTER::ANSFRHelper::CVMatToBytes(frame, bufferLength);
int height = frame.rows;
int width = frame.cols;
InsertFaceBinary(&infHandle, userId, jpeg_string, width, height);
delete jpeg_string;
std::cout << "End of inserting faces.\n";
ReleaseANSRFHandle(&infHandle);
return 0;
}
int TestCompleteFR1() {
std::filesystem::path currentPath = "E:\\Programs\\DemoAssets";
std::string databaseFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANSFR.db";
std::string recognizerFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFR\\ANS_FaceRecognizer_v1.1.zip";
std::string facedetectorFilePath = "C:\\ProgramData\\ANSCENTER\\ANSVIS Server\\ANSFDET\\ServerOptimised\\ANS_GenericFD(GPU)_v1.0_NVIDIAGeForceRTX4070LaptopGPU.zip";
//std::string imageFile = "E:\\Programs\\DemoAssets\\Images\\Face\\Original.jpg";
std::string imageFile = "E:\\Programs\\DemoAssets\\Images\\Face\\61.png";
//InsertUser(databaseFilePath, recognizerFilePath, facedetectorFilePath,"0001","Tien");
InsertFace(databaseFilePath, recognizerFilePath, facedetectorFilePath, imageFile);
std::cout << "End of program.\n";
return 1;
}
int main()
{
2026-03-29 08:45:38 +11:00
//FaceDetectorTest();
//TestFaceRecognition();
//TestCompleteFR1();
2026-03-29 08:45:38 +11:00
//ANSVISImageTest();
//ANSVISTest();
2026-03-29 08:45:38 +11:00
//ANSVISTestFilePlayer();
// ANSVISRecordingTest();
//FRStressTest();
2026-03-29 08:45:38 +11:00
//for (int i = 0; i < 20; i++) {
// ANSVISTest();
//}
// StressTest();
//TestFromActualDB();
//TestAlexRecognition();
//FaceRecognitionBenchmark();
// TestCompleteFR();
//TestFaceRecognition();
//FaceRecognitionBenchmark();
ANSVISTestCPU_Lightweight();
ANSVISTestCPU();
2026-03-29 08:45:38 +11:00
std::cin.get();
}