Initial setup for CLion

This commit is contained in:
2026-03-28 16:54:11 +11:00
parent 239cc02591
commit 7b4134133c
1136 changed files with 811916 additions and 0 deletions

227
ANSLIB/ANSLIB.cpp Normal file
View File

@@ -0,0 +1,227 @@
#include "ANSLIB.h"
#include "ANSLibsLoader.h"
#include <memory>
#define LOAD_FUNC(name) \
name##Func = (name##FuncT)GetProcAddress(dllHandle, #name); \
if (!name##Func) { success = false; }
namespace ANSCENTER
{
ANSLIB::ANSLIB() {
// Ensure all shared DLLs (OpenCV, OpenVINO, TRT, ORT) are pre-loaded
ANSCENTER::ANSLibsLoader::Initialize();
const char* dllPath = "C:\\ProgramData\\ANSCENTER\\Shared\\ANSODEngine.dll"; // Hardcoded path
dllHandle = LoadLibraryA(dllPath);
if (!dllHandle) {
return;
}
bool success = true;
LOAD_FUNC(CreateANSODHandle_CS);
LOAD_FUNC(RunInference_CPP);
LOAD_FUNC(RunInferenceComplete_CPP);
LOAD_FUNC(OptimizeModelStr_CS);
LOAD_FUNC(GetEngineType);
LOAD_FUNC(LoadModelFromFolder);
LOAD_FUNC(GetActiveRect);
LOAD_FUNC(DetectMovement);
LOAD_FUNC(Optimize);
LOAD_FUNC(GetODParameters);
LOAD_FUNC(ReleaseANSODHandle);
LOAD_FUNC(UpdateDetectionMinScore);
LOAD_FUNC(SetPrompt);
LOAD_FUNC(SetTracker);
LOAD_FUNC(SetTrackerParameters);
loaded = success;
if (!loaded) {
FreeLibrary(dllHandle);
dllHandle = nullptr;
}
}
ANSLIB* ANSLIB::Create() {
try {
auto ptr = std::make_unique<ANSLIB>();
return ptr.release();
}
catch (...) { return nullptr; }
}
void ANSLIB::Destroy(ANSLIB* instance) {
std::unique_ptr<ANSLIB> ptr(instance);
}
ANSLIB::~ANSLIB() noexcept {
try {
if (ANSHandle && ReleaseANSODHandleFunc) {
ReleaseANSODHandleFunc(&ANSHandle);
ANSHandle = nullptr;
}
if (dllHandle) {
FreeLibrary(dllHandle);
dllHandle = nullptr;
}
}
catch (...) {}
}
bool ANSLIB::IsLoaded() const {
return loaded;
}
// Public APIs
int ANSLIB::Initialize(const char* licenseKey,
const char* modelFilePath,
const char* modelFileZipPassword,
float modelThreshold,
float modelConfThreshold,
float modelNMSThreshold,
int modelType,
int detectionType, int loadEngineOnCreation,std::string& labels)
{
if (!loaded) {
return -1;
}
if (CreateANSODHandle_CSFunc) {
const char* result = CreateANSODHandle_CSFunc(&ANSHandle, licenseKey, modelFilePath, modelFileZipPassword,
modelThreshold, modelConfThreshold, modelNMSThreshold,
1, modelType, detectionType, loadEngineOnCreation);
labels = (result != nullptr) ? std::string(result) : std::string();
if (labels.empty()) {
return -1;
}
else {
return 1;
}
}
return 0;
}
int ANSLIB::RunInference(cv::Mat cvImage, const char* cameraId, std::vector<ANSCENTER::Object>& detectionResult) {
if (!loaded) {
return -1;
}
if (RunInference_CPPFunc) {
auto imagePtr = std::make_unique<cv::Mat>(cvImage);
try {
cv::Mat* rawPtr = imagePtr.get();
int result = RunInference_CPPFunc(&ANSHandle, &rawPtr, cameraId, detectionResult);
return result;
}
catch (...) {
return -1;
}
}
return -1;
}
int ANSLIB::RunInferenceWithOptions(cv::Mat cvImage, const char* cameraId, const char* activeROIMode, std::vector<ANSCENTER::Object>& detectionResult) {
if (!loaded) {
return -1;
}
if (RunInferenceComplete_CPPFunc) {
auto imagePtr = std::make_unique<cv::Mat>(cvImage);
try {
cv::Mat* rawPtr = imagePtr.get();
int result = RunInferenceComplete_CPPFunc(&ANSHandle, &rawPtr, cameraId, activeROIMode,detectionResult);
return result;
}
catch (...) {
return -1;
}
}
return -1;
}
int ANSLIB::OptimizeModel(const char* modelFilePath, const char* modelFileZipPassword, int modelType, int modelDetectionType,int fp16) {
if (!loaded) {
return -1;
}
if (OptimizeModelStr_CSFunc) {
const char* result = OptimizeModelStr_CSFunc(modelFilePath, modelFileZipPassword, modelType, modelDetectionType, fp16);
if (result) {
return 1;
}
else {
return -1;
}
}
return -1;
}
const char* ANSLIB::CreateANSODHandle_CS(void** Handle, const char* licenseKey, const char* modelFilePath,
const char* modelFileZipPassword, float modelThreshold, float modelConfThreshold,
float modelNMSThreshold, int autoDetectEngine, int modelType, int detectionType, int loadEngineOnCreation) {
return CreateANSODHandle_CSFunc ? CreateANSODHandle_CSFunc(Handle, licenseKey, modelFilePath, modelFileZipPassword,
modelThreshold, modelConfThreshold, modelNMSThreshold,
autoDetectEngine, modelType, detectionType, loadEngineOnCreation)
: "Function not loaded";
}
const char* ANSLIB::OptimizeModelStr_CS(const char* modelFilePath, const char* modelFileZipPassword, int modelType, int modelDetectionType,int fp16) {
return OptimizeModelStr_CSFunc ? OptimizeModelStr_CSFunc(modelFilePath, modelFileZipPassword, modelType, modelDetectionType,fp16)
: "Function not loaded";
}
int ANSLIB::RunInference_CPP(void** Handle, cv::Mat** cvImage, const char* cameraId, std::vector<ANSCENTER::Object>& detectionResult) {
return RunInference_CPPFunc ? RunInference_CPPFunc(Handle, cvImage, cameraId, detectionResult) : -1;
}
int ANSLIB::RunInferenceComplete_CPP(void** Handle, cv::Mat** cvImage, const char* cameraId, const char* activeROIMode, std::vector<ANSCENTER::Object>& detectionResult) {
return RunInferenceComplete_CPPFunc ? RunInferenceComplete_CPPFunc(Handle, cvImage, cameraId, activeROIMode, detectionResult) : -1;
}
int ANSLIB::GetEngineType() {
if (!loaded || !GetEngineTypeFunc) return -1;
return GetEngineTypeFunc();
}
int ANSLIB::LoadModelFromFolder(const char* licenseKey, const char* modelName, const char* className,
float detectionScoreThreshold, float modelConfThreshold, float modelMNSThreshold,
int autoDetectEngine, int modelType, int detectionType, int loadEngineOnCreation, const char* modelFolder,std::string& labelMap)
{
if (!loaded || !LoadModelFromFolderFunc) return -1;
return LoadModelFromFolderFunc(&ANSHandle, licenseKey, modelName, className,
detectionScoreThreshold, modelConfThreshold, modelMNSThreshold,
autoDetectEngine, modelType, detectionType, loadEngineOnCreation,modelFolder, labelMap);
}
int ANSLIB::GetActiveRect(cv::Mat cvImage, cv::Rect& activeWindow) {
if (!loaded || !GetActiveRectFunc) return -1;
return GetActiveRectFunc(&ANSHandle, cvImage, activeWindow);
}
int ANSLIB::DetectMovement(cv::Mat image, const char* cameraId, std::vector<Object>& results) {
if (!loaded || !DetectMovementFunc) return -1;
return DetectMovementFunc(&ANSHandle, image, cameraId, results);
}
int ANSLIB::ReleaseANSODHandle(void** Handle) {
return ReleaseANSODHandleFunc ? ReleaseANSODHandleFunc(Handle) : -1;
}
cv::Rect ANSLIB::GetActiveWindow(cv::Mat cvImage) {
cv::Rect activeWindow;
if (GetActiveRect(cvImage, activeWindow) == 0) {
return activeWindow;
}
return cv::Rect(); // Return an empty rectangle if failed
}
int ANSLIB::GetODParameters(ANSCENTER::Params& param) {
if (!loaded || !GetODParametersFunc) return -1;
return GetODParametersFunc(&ANSHandle, param);
}
int ANSLIB::Optimize(bool fp16) {
if (!loaded || !OptimizeFunc) return -1;
return OptimizeFunc(&ANSHandle, fp16);
}
int ANSLIB::UpdateDetectionMinScore(float scoreThreshold) {
if (!loaded || !UpdateDetectionMinScoreFunc) return -1;
return UpdateDetectionMinScoreFunc(&ANSHandle, scoreThreshold);
}
int ANSLIB::SetPrompt(const char* text) {
if (!loaded || !SetPromptFunc) return -1;
return SetPromptFunc(&ANSHandle, text);
}
int ANSLIB::SetTracker(int trackerType, int enableTracker) {
if (!loaded || !SetTrackerFunc) return -1;
return SetTrackerFunc(&ANSHandle, trackerType, enableTracker);
}
int ANSLIB::SetTrackerParameters(const char* trackerParams) {
if (!loaded || !SetTrackerParametersFunc) return -1;
return SetTrackerParametersFunc(&ANSHandle, trackerParams);
}
}

439
ANSLIB/ANSLIB.h Normal file
View File

@@ -0,0 +1,439 @@
#ifndef ANSLIB_H
#define ANSLIB_H
#define ANSLIB_API __declspec(dllexport)
#define CUSTOM_API __declspec(dllexport)
#pragma once
#include <windows.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core/cuda.hpp>
enum DetectionType {
CLASSIFICATION = 0,
DETECTION = 1,
SEGMENTATION = 2,
FACEDETECTOR = 3,
FACERECOGNIZER = 4,
LICENSEPLATE = 5,
TEXTSCENSE = 6,
KEYPOINT = 7,
OBB = 8 // Oriented Bounding Box
};
enum ModelType {
TENSORFLOW = 0,
YOLOV4 = 1,
YOLOV5 = 2,
YOLOV8 = 3,
TENSORRT = 4,
OPENVINO = 5,
FACEDETECT = 6,
FACERECOGNIZE = 7,
ALPR = 8,
OCR = 9,
ANOMALIB = 10,
POSE = 11,
SAM = 12,
ODHUBMODEL = 13,
YOLOV10RTOD = 14, // TensorRT Object Detection for Yolov10
YOLOV10OVOD = 15, // OpenVINO Object Detection for Yolov10
CUSTOMDETECTOR = 16, // Custom Detector
YOLOV12 = 17, // YoloV12 standard for yolov12
CUSTOMPY = 18, // Custom Python script model
MOTIONDETECTOR = 19, // Motion Detector,
ONNXCL = 20,
ONNXPOSE = 21,
RTPOSE = 22,
ONNXSEG = 23,
RTSEG = 24,
ONNXOBB = 25,
RTOBB = 26,
MOVIENET = 27,
ONNXSAM3 = 28,
RTSAM3 = 29,
ONNXYOLO = 30,
RTYOLO = 31
};
enum TrackerType {
BYTETRACK = 0,
UCMC = 1,
OCSORT = 2
};
namespace ANSCENTER {
struct Point {
int x, y;
};
struct ROIConfig {
bool Rectangle;
bool Polygon;
bool Line;
int MinItems;
int MaxItems;
std::string Name;
std::string ROIMatch;
};
struct Parameter {
std::string Name;
std::string DataType;
int NoOfDecimals;
int MaxValue;
int MinValue;
std::string StartValue;
std::vector<std::string> ListItems;
std::string DefaultValue;
std::string Value;
};
struct ROIValue {
std::string ROIMatch;
std::vector<Point> ROIPoints;
std::string Option;
std::string Name;
int OriginalImageSize;
};
struct Params {
std::vector<ROIConfig> ROI_Config;
std::vector<std::string> ROI_Options;
std::vector<Parameter> Parameters;
std::vector<ROIValue> ROI_Values;
};
/* Example
{
"ROI_Config":[
{
"Rectangle":true,
"Polygon":true,
"Line":false,
"MinItems":0,
"MaxItems":3,
"Name":"Traffic Light",
"ROI-Match":"All Corners"
},
{
"Rectangle":true,
"Polygon":false,
"Line":false,
"MinItems":1,
"MaxItems":1,
"Name":"Car Zone",
"ROI-Match":"All Corners"
},
{
"Rectangle":false,
"Polygon":false,
"Line":true,
"MinItems":1,
"MaxItems":2,
"Name":"Cross Line",
"ROI-Match":"All Corners"
}
],
"ROI_Options":[
"Inside ROI",
"Inside ROI",
"Both Directions"
],
"Parameters":[
{
"Name":"Para1",
"DataType":"Boolean",
"NoOfdecimals":0,
"MaxValue":0,
"MinValue":0,
"StartValue":"",
"ListItems":[],
"DefaultValue":"",
"Value":"true"
},
{
"Name":"Para2",
"DataType":"Integer",
"NoOfdecimals":0,
"MaxValue":5,
"MinValue":1,
"StartValue":"2",
"ListItems":[],
"DefaultValue":"",
"Value":"3"
},
{
"Name":"Para3",
"DataType":"List-Single",
"NoOfdecimals":0,
"MaxValue":0,
"MinValue":0,
"StartValue":"",
"ListItems":["A","B","C"],
"DefaultValue":"",
"Value":"A"
},
{
"Name":"Para4",
"DataType":"Range",
"NoOfdecimals":0,
"MaxValue":100,
"MinValue":50,
"StartValue":">,60",
"ListItems":[">","<"],
"DefaultValue":"",
"Value":">,52.000000"
}
],
"ROI_Values":[
{
"ROI-Match":"Centre Point",
"ROIPoints":[
{"x":269,"y":134},
{"x":777,"y":134},
{"x":777,"y":457},
{"x":269,"y":457}
],
"Option":"Inside ROI",
"Name":"Car Zone 1",
"OriginalImageSize":1920
},
{
"ROI-Match":"Centre Point",
"ROIPoints":[{"x":280,"y":613},{"x":1108,"y":280}],
"Option":"Above",
"Name":"Cross Line 1",
"OriginalImageSize":1920
},
{
"ROI-Match":"Centre Point",
"ROIPoints":[{"x":1511,"y":383},{"x":1283,"y":754}],
"Option":"Left side",
"Name":"Cross Line 2",
"OriginalImageSize":1920
},
{
"ROI-Match":"Centre Point",
"ROIPoints":[
{"x":229,"y":161},
{"x":964,"y":161},
{"x":964,"y":628},
{"x":229,"y":628}
],
"Option":"Left side",
"Name":"Traffic Light 1",
"OriginalImageSize":1920
},
{
"ROI-Match":"Centre Point",
"ROIPoints":[
{"x":1115,"y":304},
{"x":1730,"y":304},
{"x":1730,"y":695},
{"x":1115,"y":695}
],
"Option":"Left side",
"Name":"Traffic Light 2",
"OriginalImageSize":1920
},
{
"ROI-Match":"Centre Point",
"ROIPoints":[
{"x":678,"y":683},
{"x":1217,"y":683},
{"x":1217,"y":1026},
{"x":678,"y":1026}
],
"Option":"Left side",
"Name":"Traffic Light 3",
"OriginalImageSize":1920
}
]
}
*/
struct Object
{
int classId{ 0 };
int trackId{ 0 };
std::string className{};
float confidence{ 0.0 };
cv::Rect box{};
std::vector<cv::Point2f> polygon; // Polygon that contain x1,y1,x2,y2,x3,y3,x4,y4
cv::Mat mask{}; // Face image in box (cropped) or mask image for segmentation
cv::cuda::GpuMat gpuMask{}; // GPU-resident face crop (set by NV12 affine warp, avoids re-upload)
std::vector<float> kps{}; // Pose exsimate 17 keypoints or oriented bouding box (xcenter, ycenter, width, height, angle)
std::string extraInfo; // More information such as facial recognition
std::string cameraId; // Use to check if this object belongs to any camera
//std::string attributes; // Attributes such as keypoint string
};
class ANSLIB_API ANSLIB {
public:
ANSLIB();
~ANSLIB() noexcept;
[[nodiscard]] static ANSLIB* Create();
static void Destroy(ANSLIB* instance);
[[nodiscard]] int Initialize(const char* licenseKey,
const char* modelFilePath,
const char* modelFileZipPassword,
float modelThreshold,
float modelConfThreshold,
float modelNMSThreshold,
int modelType,
int detectionType, int loadEngineOnCreation,std::string &labels);
[[nodiscard]] int RunInference(cv::Mat cvImage, const char* cameraId, std::vector<ANSCENTER::Object>& detectionResult);
[[nodiscard]] int RunInferenceWithOptions(cv::Mat cvImage, const char* cameraId, const char* activeROIMode, std::vector<ANSCENTER::Object>& detectionResult);
[[nodiscard]] int OptimizeModel(const char* modelFilePath, const char* modelFileZipPassword, int modelType, int modelDetectionType,int fp16);
[[nodiscard]] int Optimize(bool fp16); // Perform optimization on the loaded model on current model folder
[[nodiscard]] int GetEngineType();
[[nodiscard]] int LoadModelFromFolder(const char* licenseKey, const char* modelName, const char* className,
float detectionScoreThreshold, float modelConfThreshold, float modelMNSThreshold,
int autoDetectEngine, int modelType, int detectionType, int loadEngineOnCreation, const char* modelFolder, std::string& labelMap);
[[nodiscard]] int DetectMovement(cv::Mat image, const char* cameraId, std::vector<Object>& results);
[[nodiscard]] cv::Rect GetActiveWindow(cv::Mat cvImage);
[[nodiscard]] int SetPrompt(const char* text);
[[nodiscard]] int SetTracker(int trackerType, int enableTracker);
[[nodiscard]] int SetTrackerParameters(const char* trackerParams);
private:
HMODULE dllHandle = nullptr;
bool loaded = false;
void* ANSHandle = nullptr;
const char* CreateANSODHandle_CS(void** Handle,
const char* licenseKey,
const char* modelFilePath,
const char* modelFileZipPassword,
float modelThreshold,
float modelConfThreshold,
float modelNMSThreshold,
int autoDetectEngine,
int modelType,
int detectionType,
int loadEngineOnCreation);
int RunInference_CPP(void** Handle, cv::Mat** cvImage, const char* cameraId, std::vector<ANSCENTER::Object>& detectionResult);
int RunInferenceComplete_CPP(void** Handle, cv::Mat** cvImage, const char* cameraId, const char* activeROIMode,std::vector<ANSCENTER::Object>& detectionResult);
const char* OptimizeModelStr_CS(const char* modelFilePath, const char* modelFileZipPassword, int modelType, int modelDetectionType,int fp16);
int ReleaseANSODHandle(void** Handle);
int GetActiveRect(cv::Mat cvImage, cv::Rect& activeWindow);
int GetODParameters(ANSCENTER::Params& param);
int UpdateDetectionMinScore(float scoreThreshold);
[[nodiscard]] bool IsLoaded() const;
typedef const char* (*CreateANSODHandle_CSFuncT)(void**, const char*, const char*, const char*, float, float, float, int, int, int, int);
typedef const char* (*OptimizeModelStr_CSFuncT)(const char*, const char*, int, int, int);
typedef int (*RunInference_CPPFuncT)(void**, cv::Mat**, const char*, std::vector<ANSCENTER::Object>& detectionResult);
typedef int (*RunInferenceComplete_CPPFuncT)(void**, cv::Mat**, const char*, const char*, std::vector<ANSCENTER::Object>& detectionResult);
typedef int (*ReleaseANSODHandleFuncT)(void**);
typedef int(*GetEngineTypeFuncT)();
typedef int(*LoadModelFromFolderFuncT)(void**, const char*, const char*, const char*, float, float, float, int, int, int,int, const char*,std::string&);
typedef int(*GetActiveRectFuncT)(void**, cv::Mat, cv::Rect&);
typedef int(*DetectMovementFuncT)(void**, cv::Mat, const char*, std::vector<ANSCENTER::Object>&);
typedef int(*OptimizeFuncT)(void**, bool);
typedef int(*GetODParametersFuncT)(void**, ANSCENTER::Params&);
typedef int(*UpdateDetectionMinScoreFuncT)(void**, float); // Add 'Func' to match other typedefs
typedef int(*SetPromptFuncT)(void**, const char*);
typedef int(*SetTrackerFuncT)(void**, int, int);
typedef int(*SetTrackerParametersFuncT)(void**, const char*);
CreateANSODHandle_CSFuncT CreateANSODHandle_CSFunc = nullptr;
OptimizeModelStr_CSFuncT OptimizeModelStr_CSFunc = nullptr;
ReleaseANSODHandleFuncT ReleaseANSODHandleFunc = nullptr;
RunInference_CPPFuncT RunInference_CPPFunc = nullptr;
RunInferenceComplete_CPPFuncT RunInferenceComplete_CPPFunc = nullptr;
GetEngineTypeFuncT GetEngineTypeFunc = nullptr;
LoadModelFromFolderFuncT LoadModelFromFolderFunc = nullptr;
GetActiveRectFuncT GetActiveRectFunc = nullptr;
DetectMovementFuncT DetectMovementFunc = nullptr;
OptimizeFuncT OptimizeFunc = nullptr;
GetODParametersFuncT GetODParametersFunc = nullptr;
UpdateDetectionMinScoreFuncT UpdateDetectionMinScoreFunc = nullptr;
SetPromptFuncT SetPromptFunc = nullptr;
SetTrackerFuncT SetTrackerFunc = nullptr;
SetTrackerParametersFuncT SetTrackerParametersFunc = nullptr;
};
}
struct CustomObject
{
int classId{ 0 };
int trackId{ 0 };
std::string className{};
float confidence{ 0.0 };
cv::Rect box{};
std::vector<cv::Point2f> polygon; // Polygon that contain x1,y1,x2,y2,x3,y3,x4,y4
cv::Mat mask{}; // Face image in box (cropped) or mask image for segmentation
cv::cuda::GpuMat gpuMask{}; // GPU-resident face crop (set by NV12 affine warp, avoids re-upload)
std::vector<float> kps{}; // Pose exsimate 17 keypoints or oriented bouding box (xcenter, ycenter, width, height, angle)
std::string extraInfo; // More information such as facial recognition
std::string cameraId; // Use to check if this object belongs to any camera
//std::string attributes; // Attributes such as keypoint string
};
struct CustomPoint { int x, y; };
struct CustomROIConfig {
bool Rectangle;
bool Polygon;
bool Line;
int MinItems;
int MaxItems;
std::string Name;
std::string ROIMatch;
};
struct CustomParameter {
std::string Name;
std::string DataType;
int NoOfDecimals;
int MaxValue;
int MinValue;
std::string StartValue;
std::vector<std::string> ListItems;
std::string DefaultValue;
std::string Value;
};
struct CustomROIValue {
std::string ROIMatch;
std::vector<CustomPoint> ROIPoints;
std::string Option;
std::string Name;
int OriginalImageSize;
};
struct CustomParams {
std::vector<CustomROIConfig> ROI_Config;
std::vector<std::string> ROI_Options;
std::vector<CustomParameter> Parameters;
std::vector<CustomROIValue> ROI_Values;
};
class CUSTOM_API IANSCustomClass
{
protected:
std::string _modelDirectory; // The directory where the model is located
float _detectionScoreThreshold{ 0.5 };
CustomParams _params; // Parameters for the model
bool _loadEngineOnCreate{ false }; // Load engine on create
public:
virtual bool Initialize(const std::string& modelDirectory, float detectionScoreThreshold, std::string& labelMap) = 0;
virtual bool OptimizeModel(bool fp16) = 0;
virtual std::vector<CustomObject> RunInference(const cv::Mat& input) = 0;
virtual std::vector<CustomObject> RunInference(const cv::Mat& input, const std::string& camera_id) = 0;
virtual bool ConfigureParameters(CustomParams& param) = 0;
virtual bool Destroy() = 0;
bool SetParameters(const CustomParams& param) {
try {
this->_params.ROI_Config.clear();
for (auto& cf : param.ROI_Config) {
this->_params.ROI_Config.push_back(cf);
}
this->_params.ROI_Options.clear();
for (auto& op : param.ROI_Options) {
this->_params.ROI_Options.push_back(op);
}
this->_params.Parameters.clear();
for (auto& par : param.Parameters) {
this->_params.Parameters.push_back(par);
}
this->_params.ROI_Values.clear();
for (auto& roi : param.ROI_Values) {
this->_params.ROI_Values.push_back(roi);
}
return true;
}
catch (...) {
return false;
}
};
void SetLoadEngineOnCreate(bool loadEngineOnCreate) {
this->_loadEngineOnCreate = loadEngineOnCreate;
}
};
#endif

30
ANSLIB/CMakeLists.txt Normal file
View File

@@ -0,0 +1,30 @@
# ANSLIB — Main facade DLL (aggregates all modules)
add_library(ANSLIB SHARED
ANSLIB.cpp
ANSLIB.h
dllmain.cpp
pch.cpp
pch.h
framework.h
)
target_include_directories(ANSLIB PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}
${SHARED_INCLUDE_DIR}
)
target_link_libraries(ANSLIB
PUBLIC ANSLibsLoader
PUBLIC opencv
# The facade links against all modules
PRIVATE ANSODEngine
PRIVATE ANSCV
PRIVATE ANSMOT
PRIVATE ANSUtilities
PRIVATE ANSOCR
PRIVATE ANSFR
PRIVATE ANSLPR
)
target_compile_definitions(ANSLIB PRIVATE UNICODE _UNICODE ANSLIB_EXPORTS _USRDLL)
target_precompile_headers(ANSLIB PRIVATE pch.h)

19
ANSLIB/dllmain.cpp Normal file
View File

@@ -0,0 +1,19 @@
// dllmain.cpp : Defines the entry point for the DLL application.
#include "pch.h"
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
) noexcept
{
switch (ul_reason_for_call)
{
case DLL_PROCESS_ATTACH:
case DLL_THREAD_ATTACH:
case DLL_THREAD_DETACH:
case DLL_PROCESS_DETACH:
break;
}
return TRUE;
}

7
ANSLIB/framework.h Normal file
View File

@@ -0,0 +1,7 @@
#pragma once
#define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers
#define NOMINMAX // Prevent windows.h from defining min/max macros
// which break std::min / std::max (C2589)
// Windows Header Files
#include <windows.h>

5
ANSLIB/pch.cpp Normal file
View File

@@ -0,0 +1,5 @@
// pch.cpp: source file corresponding to the pre-compiled header
#include "pch.h"
// When you are using pre-compiled headers, this source file is necessary for compilation to succeed.

18
ANSLIB/pch.h Normal file
View File

@@ -0,0 +1,18 @@
// pch.h: This is a precompiled header file.
// Files listed below are compiled only once, improving build performance for future builds.
// This also affects IntelliSense performance, including code completion and many code browsing features.
// However, files listed here are ALL re-compiled if any one of them is updated between builds.
// Do not add files here that you will be updating frequently as this negates the performance advantage.
#ifndef PCH_H
#define PCH_H
// add headers that you want to pre-compile here
#include "framework.h"
#include <memory>
#include <string>
#include <string_view>
#include <vector>
#include <mutex>
#endif //PCH_H