Refactor project structure

This commit is contained in:
2026-03-28 19:56:39 +11:00
parent 1d267378b2
commit 8a2e721058
511 changed files with 59 additions and 48 deletions

View File

@@ -0,0 +1,670 @@
#include "ANSANNHUB.h"
#include <cstdint>
#ifndef NOMINMAX
#define NOMINMAX
#endif
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#include <windows.h>
#include <memory>
#include <numeric>
#include <sstream>
#include <unordered_map>
#include <condition_variable>
#include <mutex>
// --- Refcounted handle registry infrastructure ---
static std::unordered_map<ANSCENTER::ANNHUBAPI*, int>& ANNHUBHandleRegistry() {
static std::unordered_map<ANSCENTER::ANNHUBAPI*, int> s;
return s;
}
static std::mutex& ANNHUBHandleRegistryMutex() {
static std::mutex m;
return m;
}
static std::condition_variable& ANNHUBHandleRegistryCV() {
static std::condition_variable cv;
return cv;
}
static void RegisterANNHUBHandle(ANSCENTER::ANNHUBAPI* h) {
std::lock_guard<std::mutex> lk(ANNHUBHandleRegistryMutex());
ANNHUBHandleRegistry()[h] = 1;
}
static ANSCENTER::ANNHUBAPI* AcquireANNHUBHandle(ANSCENTER::ANNHUBAPI* h) {
std::lock_guard<std::mutex> lk(ANNHUBHandleRegistryMutex());
auto it = ANNHUBHandleRegistry().find(h);
if (it == ANNHUBHandleRegistry().end()) return nullptr;
it->second++;
return h;
}
static bool ReleaseANNHUBHandleRef(ANSCENTER::ANNHUBAPI* h) {
std::lock_guard<std::mutex> lk(ANNHUBHandleRegistryMutex());
auto it = ANNHUBHandleRegistry().find(h);
if (it == ANNHUBHandleRegistry().end()) return false;
it->second--;
if (it->second <= 0) {
ANNHUBHandleRegistry().erase(it);
ANNHUBHandleRegistryCV().notify_all();
return true;
}
ANNHUBHandleRegistryCV().notify_all();
return false;
}
static bool UnregisterANNHUBHandle(ANSCENTER::ANNHUBAPI* h) {
std::unique_lock<std::mutex> lk(ANNHUBHandleRegistryMutex());
auto it = ANNHUBHandleRegistry().find(h);
if (it == ANNHUBHandleRegistry().end()) return false;
it->second--;
if (!ANNHUBHandleRegistryCV().wait_for(lk, std::chrono::seconds(30), [&]() {
auto it2 = ANNHUBHandleRegistry().find(h);
return it2 == ANNHUBHandleRegistry().end() || it2->second <= 0;
})) {
OutputDebugStringA("WARNING: UnregisterANNHUBHandle timed out after 30s waiting for refcount to reach zero.\n");
}
ANNHUBHandleRegistry().erase(h);
return true;
}
class ANNHUBHandleGuard {
ANSCENTER::ANNHUBAPI* engine;
public:
explicit ANNHUBHandleGuard(ANSCENTER::ANNHUBAPI* e) : engine(e) {}
~ANNHUBHandleGuard() { if (engine) ReleaseANNHUBHandleRef(engine); }
ANNHUBHandleGuard(const ANNHUBHandleGuard&) = delete;
ANNHUBHandleGuard& operator=(const ANNHUBHandleGuard&) = delete;
explicit operator bool() const { return engine != nullptr; }
ANSCENTER::ANNHUBAPI* get() const { return engine; }
};
// --- End handle registry infrastructure ---
static bool ansannhubLicenceValid = false;
// Global once_flag to protect license checking
static std::once_flag ansannhubLicenseOnceFlag;
namespace ANSCENTER {
void ReLu(std::vector<double>& iVal, std::vector<double>& oVal)
{
int dim = iVal.size();
oVal.resize(dim);
for (int i = 0; i < dim; i++)
{
if (iVal[i] >= 0) oVal[i] = iVal[i];
else oVal[i] = 0;
}
}
void LogSig(std::vector<double>& iVal, std::vector<double>& oVal)
{
int dim = iVal.size();
oVal.resize(dim);
for (int i = 0; i < dim; i++)
{
oVal[i] = 1 / (1 + exp(-iVal[i]));
}
}
void TanSig(std::vector<double>& iVal, std::vector<double>& oVal)
{
int dim = iVal.size();
oVal.resize(dim);
for (int i = 0; i < dim; i++)
{
oVal[i] = 2 / (1 + exp(-2 * iVal[i])) - 1;
}
}
void PureLin(std::vector<double>& iVal, std::vector<double>& oVal)
{
oVal = iVal;
}
void SoftMax(std::vector<double>& iVal, std::vector<double>& oVal)
{
double softmaxWeight = 0;
int dim = iVal.size();
oVal.resize(dim);
if (dim == 1) {
oVal[0] = 1 / (1 + exp(-iVal[0]));
}
else {
for (int i = 0; i < dim; i++) {
softmaxWeight = softmaxWeight + exp(iVal[i]);
}
for (int i = 0; i < dim; i++) {
oVal[i] = exp(iVal[i]) / softmaxWeight;
}
}
}
void ActivationFunction(std::vector<double>& iVal, std::vector<double>& oVal, int mode) {
switch (mode) {
case 0:
PureLin(iVal, oVal);
break;
case 1:
ReLu(iVal, oVal);
break;
case 2:
LogSig(iVal, oVal);
break;
case 3:
TanSig(iVal, oVal);
break;
case 4:
SoftMax(iVal, oVal);
break;
default:
TanSig(iVal, oVal);
break;
}
}
static void VerifyGlobalANSANNHUBLicense(const std::string& licenseKey) {
try {
ansannhubLicenceValid = ANSCENTER::ANSLicenseHelper::LicenseVerification(licenseKey, 1000, "ANNHUB-LV");//Default productId=1000
}
catch (std::exception& e) {
//this->_logger.LogFatal("ANSOPENCV::CheckLicense. Error:", e.what(), __FILE__, __LINE__);
}
}
void ANNHUBAPI::CheckLicense() {
//try {
// _licenseValid = ANSCENTER::ANSLicenseHelper::LicenseVerification(_licenseKey, 1000, "ANNHUB-LV");//Default productId=1000
//}
//catch (std::exception& e) {
// //this->_logger.LogFatal("ANSOPENCV::CheckLicense. Error:", e.what(), __FILE__, __LINE__);
//}
try {
// Check once globally
std::call_once(ansannhubLicenseOnceFlag, [this]() {
VerifyGlobalANSANNHUBLicense(_licenseKey);
});
// Update this instance's local license flag
_licenseValid = ansannhubLicenceValid;
}
catch (const std::exception& e) {
//this->_logger.LogFatal("ANNHUBAPI::CheckLicense. Error:", e.what(), __FILE__, __LINE__);
}
}
ANNHUBAPI::ANNHUBAPI()
{
// Control creation
isCreated = 0;
// Structrural parameters
nInputNodes = 1;
nHiddenNodes = 10;
nOutputNodes = 1;
hiddenActivation = 2; // default =2
outputActivation = 2; // default =2;
dataNormalisationModeInput = 0;
dataNormalisationModeOutput = 0;
}
ANNHUBAPI::~ANNHUBAPI() noexcept
{
try {
Destroy();
}
catch (...) {}
}
void ANNHUBAPI::Destroy()
{
if (isCreated != 0) {
FreeNeuralNetwork();
}
}
void ANNHUBAPI::FreeNeuralNetwork()
{
try {
// Clear vectors
IW.clear();
LW.clear();
nInput.clear();
nOutput.clear();
Ib.clear();
Lb.clear();
xminInput.clear();
xmaxInput.clear();
xminOutput.clear();
xmaxOutput.clear();
// Reset the flag indicating whether the network is created
isCreated = 0;
}
catch (const std::exception& e) {
//this->_logger.LogFatal("ANNHUBAPI::FreeNeuralNetwork. Error:", e.what(), __FILE__, __LINE__);
}
}
/*
(x[i]-xmin[i]) (ymax-ymin)
x[i] = ---------------- * +ymin
(xmax[i]-xmin[i])
*/
void ANNHUBAPI::PreProcessing(std::vector<double>& Input)
{
switch (dataNormalisationModeInput) {
case 0: // linear
break;
case 1: // mapminmax
for (int i = 0; i < nInputNodes; i++) {
if (xmaxInput[i] != xminInput[i]) {
Input[i] = (Input[i] - xminInput[i]) * (ymaxInput - yminInput) / (xmaxInput[i] - xminInput[i]) + yminInput;
}
}
break;
default:// minmaxmap
for (int i = 0; i < nInputNodes; i++) {
if (xmaxInput[i] != xminInput[i]) {
Input[i] = (Input[i] - xminInput[i]) * (ymaxInput - yminInput) / (xmaxInput[i] - xminInput[i]) + yminInput;
}
}
break;
}
}
void ANNHUBAPI::PostProcessing(std::vector<double>& Output)
{
switch (dataNormalisationModeOutput) {
case 0: // linear
break;
case 1:
for (int i = 0; i < nOutputNodes; i++) {
if (ymaxOutput != yminOutput) {
Output[i] = (Output[i] - yminOutput) * (xmaxOutput[i] - xminOutput[i]) / (ymaxOutput - yminOutput) + xminOutput[i];
}
}
break;
default:
for (int i = 0; i < nOutputNodes; i++) {
if (ymaxOutput != yminOutput) {
Output[i] = (Output[i] - yminOutput) * (xmaxOutput[i] - xminOutput[i]) / (ymaxOutput - yminOutput) + xminOutput[i];
}
}
break;
}
}
int ANNHUBAPI::ImportANNFromFile(std::string filename)
{
try {
FILE* fp;
int err = fopen_s(&fp, filename.c_str(), "r"); // r: Opens for reading.
if (err != 0) return -2;
float value, randNum;
//0. Check if it the correct type for C language
fscanf_s(fp, "%f", &value);
if (static_cast<int>(value) != 1) return -1; // Assume that the type C is 0
//1. Load random number
fscanf_s(fp, "%f", &randNum);
//2. Structure (IDs)
int trainingEngine, hlAct, olAct, costFunct, prePro, postPro, evalModel;
fscanf_s(fp, "%f", &value); trainingEngine = static_cast<int>(value);
fscanf_s(fp, "%f", &value); hlAct = static_cast<int>(value);
fscanf_s(fp, "%f", &value); olAct = static_cast<int>(value);
fscanf_s(fp, "%f", &value); costFunct = static_cast<int>(value);
fscanf_s(fp, "%f", &value); prePro = static_cast<int>(value);
fscanf_s(fp, "%f", &value); postPro = static_cast<int>(value);
fscanf_s(fp, "%f", &value); evalModel = static_cast<int>(value);
//2.1 Activation function
hiddenActivation = hlAct - 10;
outputActivation = olAct - 10;
//2.2 Data Processing
dataNormalisationModeInput = prePro - 1000;
dataNormalisationModeOutput = postPro - 1000;
// 3. Load neural network structure and min max inputs value for pre-post processing
int ipNodes, hdNodes, opNodes;
fscanf_s(fp, "%f", &value); ipNodes = static_cast<int>(value);
fscanf_s(fp, "%f", &value); hdNodes = static_cast<int>(value);
fscanf_s(fp, "%f", &value); opNodes = static_cast<int>(value);
Create(ipNodes, hdNodes, opNodes);
// 4. Load Input-Hidden weights (extraction formula)
for (int j = 0; j < nInputNodes; j++)
{
for (int i = 0; i < nHiddenNodes; i++)
{
fscanf_s(fp, "%f", &value);
IW[i][j] = value - randNum;
}
}
// 4.1. Load bias Hidden weights
for (int i = 0; i < nHiddenNodes; i++)
{
fscanf_s(fp, "%f", &value);
Ib[i] = value - randNum;
}
// 4.2. Load Hidden_Output weights
for (int j = 0; j < nHiddenNodes; j++)
{
for (int i = 0; i < nOutputNodes ; i++)
{
fscanf_s(fp, "%f", &value);
LW[i][j] = value - randNum;
}
}
// 4.3. Load bias Output weights
for (int i = 0; i < nOutputNodes; i++)
{
fscanf_s(fp, "%f", &value);
Lb[i] = value - randNum;
}
// 5. For Pre-processing
if (dataNormalisationModeInput >= 0) {
// Range settings
fscanf_s(fp, "%f", &value);
yminInput = value - randNum;
fscanf_s(fp, "%f", &value);
ymaxInput = value - randNum;
// Min and max
for (int i = 0; i < nInputNodes; i++) {
fscanf_s(fp, "%f", &value);
xminInput[i] = value - randNum;
}
for (int i = 0; i < nInputNodes; i++) {
fscanf_s(fp, "%f", &value);
xmaxInput[i] = value - randNum;
}
}
// 6. For Post-processing
if (dataNormalisationModeOutput >= 0) {
// Range settings
fscanf_s(fp, "%f", &value);
yminOutput = value - randNum;
fscanf_s(fp, "%f", &value);
ymaxOutput = value - randNum;
for (int i = 0; i < nOutputNodes; i++) {
fscanf_s(fp, "%f", &value);
xminOutput[i] = value - randNum;
}
for (int i = 0; i < nOutputNodes; i++) {
fscanf_s(fp, "%f", &value);
xmaxOutput[i] = value - randNum;
}
}
fclose(fp);
return 0;
}
catch (std::exception& e) {
//this->_logger.LogFatal("ANNHUBAPI::ImportANNFromFile. Error:", e.what(), __FILE__, __LINE__);
return -1;
}
}
void ANNHUBAPI::Create(int inputNodes, int hiddenNodes, int outputNodes)
{
try {
if (isCreated != 0) {
FreeNeuralNetwork();
}
nInputNodes = inputNodes;
nHiddenNodes = hiddenNodes;
nOutputNodes = outputNodes;
nInput.resize(inputNodes);
nOutput.resize(outputNodes);
IW.resize(hiddenNodes, std::vector<double>(inputNodes));
LW.resize(outputNodes, std::vector<double>(hiddenNodes));
Ib.resize(hiddenNodes);
Lb.resize(outputNodes);
xminInput.resize(inputNodes);
xmaxInput.resize(inputNodes);
xminOutput.resize(outputNodes);
xmaxOutput.resize(outputNodes);
isCreated = 1;
}
catch (std::exception& e) {
//this->_logger.LogFatal("ANNHUBAPI::Create. Error:", e.what(), __FILE__, __LINE__);
}
}
bool ANNHUBAPI::Init(std::string licenseKey, std::string modelFilePath) {
try {
_licenseKey = licenseKey;
_modelFilePath = modelFilePath;
CheckLicense();
if (_licenseValid) {
int result = ImportANNFromFile(_modelFilePath);
if (result == 0) return true;
else return false;
}
else {
return false;
}
}
catch (std::exception& e) {
//this->_logger.LogFatal("ANNHUBAPI::Init. Error:", e.what(), __FILE__, __LINE__);
return false;
}
}
std::vector<double> ANNHUBAPI::Inference(std::vector<double> ip) {
try {
int i, j;
std::vector<double> a1(nHiddenNodes), n1(nHiddenNodes), n2(nOutputNodes);
if (!_licenseValid) return {}; // Invalid license
if (isCreated == 0) return {};
// Need to check the input size as well, return {} if it fails
PreProcessing(ip);
//1. Calculate n1
for (i = 0; i < nHiddenNodes; i++) {
n1[i] = 0;
for (j = 0; j < nInputNodes; j++)
{
if (std::isnan(IW[i][j]) || std::isnan(ip[j])) {
continue;
}
n1[i] = n1[i] + IW[i][j] * ip[j];
}
n1[i] = n1[i] + Ib[i];
}
ActivationFunction(n1, a1, hiddenActivation);
// 3. Calculate n2
for (i = 0; i < nOutputNodes; i++) {
n2[i] = 0;
for (j = 0; j < nHiddenNodes; j++) {
n2[i] = n2[i] + LW[i][j] * a1[j];
}
n2[i] = n2[i] + Lb[i];
}
ActivationFunction(n2, nOutput, outputActivation);
PostProcessing(nOutput);
return nOutput;
}
catch (std::exception& e) {
//this->_logger.LogFatal("ANNHUBAPI::Inference. Error:", e.what(), __FILE__, __LINE__);
return {};
}
}
}
extern "C" __declspec(dllexport) int CreateANNHUBHandle(ANSCENTER::ANNHUBAPI * *Handle, const char* licenseKey, const char* modelFilePath) {
if (!Handle || !licenseKey || !modelFilePath) return -1;
try {
if (*Handle) {
if (UnregisterANNHUBHandle(*Handle)) {
(*Handle)->Destroy();
delete *Handle;
}
*Handle = nullptr;
}
auto ptr = std::make_unique<ANSCENTER::ANNHUBAPI>();
bool result = ptr->Init(licenseKey, modelFilePath);
if (result) {
*Handle = ptr.release();
RegisterANNHUBHandle(*Handle);
return 1;
}
*Handle = nullptr;
return 0;
}
catch (...) { return 0; }
}
static int ReleaseANNHUBHandle_Impl(ANSCENTER::ANNHUBAPI** Handle) {
if (Handle == nullptr || *Handle == nullptr) return -1;
UnregisterANNHUBHandle(*Handle);
(*Handle)->Destroy();
delete *Handle;
*Handle = nullptr;
return 0;
}
extern "C" __declspec(dllexport) int ReleaseANNHUBHandle(ANSCENTER::ANNHUBAPI * *Handle) {
__try {
return ReleaseANNHUBHandle_Impl(Handle);
}
__except (EXCEPTION_EXECUTE_HANDLER) {
if (Handle) *Handle = nullptr;
return 0;
}
}
extern "C" __declspec(dllexport) int ANNHUB_Inference(ANSCENTER::ANNHUBAPI * *pHandle, double* inputArray, int inputSize, double** outputArray) {
// Check for null pointers
if (pHandle == nullptr || *pHandle == nullptr || inputArray == nullptr || outputArray == nullptr || inputSize <= 0) {
return -1; // Invalid arguments
}
ANNHUBHandleGuard guard(AcquireANNHUBHandle(*pHandle));
if (!guard) return -1;
try {
// Convert inputArray to std::vector
std::vector<double> inputVector(inputArray, inputArray + inputSize);
// Call Inference
std::vector<double> inferenceResult = guard.get()->Inference(inputVector);
int resultSize = inferenceResult.size();
// Allocate memory for the output array
*outputArray = new double[resultSize];
if (*outputArray == nullptr) return -2; // Memory allocation failed
// Copy the inference result to the output array
std::copy(inferenceResult.begin(), inferenceResult.end(), *outputArray);
return 0; // Success
}
catch (const std::exception& e) {
// Log the exception message if you have a logging system
return -5; // Error code for exception in Inference
}
}
extern "C" __declspec(dllexport) int ANNHUB_InferenceLV(ANSCENTER::ANNHUBAPI * *pHandle, double* inputArray, int inputSize, LStrHandle outputHandle) {
// Check for null pointers
if (pHandle == nullptr || *pHandle == nullptr || inputArray == nullptr || outputHandle == nullptr || inputSize <= 0) {
return 0; // Invalid arguments
}
ANNHUBHandleGuard guard(AcquireANNHUBHandle(*pHandle));
if (!guard) return 0;
try {
// Convert inputArray to std::vector
std::vector<double> inputVector(inputArray, inputArray + inputSize);
// Call Inference
std::vector<double> inferenceResult = guard.get()->Inference(inputVector);
int resultSize = inferenceResult.size();
if (resultSize <= 0) return 0; // Error code for empty inference result
std::stringstream ss;
for (size_t i = 0; i < resultSize; ++i)
{
if (i == resultSize - 1) ss << inferenceResult[i];
else ss << inferenceResult[i]<<";";
}
std::string st= ss.str();
int size = st.length();
if (size > 0) {
MgErr error;
error = DSSetHandleSize(outputHandle, sizeof(int32) + size * sizeof(uChar));
if (error == noErr)
{
(*outputHandle)->cnt = size;
memcpy((*outputHandle)->str, st.c_str(), size);
return 1;
}
else return 0;
}
else return 0;
}
catch (const std::exception& e) {
// Log the exception message if you have a logging system
return 0; // Error code for exception in Inference
}
}
// --- V2 entry points: accept handle by value (uint64_t) to avoid LabVIEW buffer reuse bug ---
extern "C" __declspec(dllexport) int ANNHUB_Inference_V2(uint64_t handleVal, double* inputArray, int inputSize, double** outputArray) {
auto* _v2h = reinterpret_cast<ANSCENTER::ANNHUBAPI*>(handleVal);
if (!_v2h) return -1;
if (inputArray == nullptr || outputArray == nullptr || inputSize <= 0) return -1;
ANNHUBHandleGuard guard(AcquireANNHUBHandle(_v2h));
if (!guard) return -1;
try {
std::vector<double> inputVector(inputArray, inputArray + inputSize);
std::vector<double> inferenceResult = guard.get()->Inference(inputVector);
int resultSize = inferenceResult.size();
*outputArray = new double[resultSize];
if (*outputArray == nullptr) return -2;
std::copy(inferenceResult.begin(), inferenceResult.end(), *outputArray);
return 0;
}
catch (const std::exception& e) {
return -5;
}
}
extern "C" __declspec(dllexport) int ANNHUB_InferenceLV_V2(uint64_t handleVal, double* inputArray, int inputSize, LStrHandle outputHandle) {
auto* _v2h = reinterpret_cast<ANSCENTER::ANNHUBAPI*>(handleVal);
if (!_v2h) return 0;
if (inputArray == nullptr || outputHandle == nullptr || inputSize <= 0) return 0;
ANNHUBHandleGuard guard(AcquireANNHUBHandle(_v2h));
if (!guard) return 0;
try {
std::vector<double> inputVector(inputArray, inputArray + inputSize);
std::vector<double> inferenceResult = guard.get()->Inference(inputVector);
int resultSize = inferenceResult.size();
if (resultSize <= 0) return 0;
std::stringstream ss;
for (size_t i = 0; i < resultSize; ++i) {
if (i == resultSize - 1) ss << inferenceResult[i];
else ss << inferenceResult[i] << ";";
}
std::string st = ss.str();
int size = st.length();
if (size > 0) {
MgErr error;
error = DSSetHandleSize(outputHandle, sizeof(int32) + size * sizeof(uChar));
if (error == noErr) {
(*outputHandle)->cnt = size;
memcpy((*outputHandle)->str, st.c_str(), size);
return 1;
}
else return 0;
}
else return 0;
}
catch (const std::exception& e) {
return 0;
}
}

View File

@@ -0,0 +1,69 @@
#ifndef ANSANNHUB_H
#define ANSANNHUB_H
#define ANSANNHUB_API __declspec(dllexport)
#include <iostream>
#include <cstdint>
#include "ANSLicense.h"
#include <vector>
//#include "LabVIEWHeader/extcode.h"
namespace ANSCENTER
{
class ANSANNHUB_API ANNHUBAPI
{
private:
std::vector<double> nInput; //ANN inputs
std::vector<double> nOutput; //ANN outputs
std::vector<std::vector<double>> IW;
std::vector<std::vector<double>> LW;
std::vector<double> Ib;
std::vector<double> Lb;
// Structural parameters
int nInputNodes, nHiddenNodes, nOutputNodes;
int hiddenActivation; // default =2
int outputActivation; // default =2
int dataNormalisationModeInput; // default =1;
int dataNormalisationModeOutput; // default =1;
// Preprocessing and postprocessing settings
std::vector<double> xmaxInput, xminInput; // Maximum and minimum of inputs
double ymaxInput, yminInput; // Maximum and minimum of inputs
std::vector<double> xmaxOutput, xminOutput; // Maximum and minimum of outputs
double ymaxOutput, yminOutput; // Maximum and minimum of outputs
// Control creation
unsigned char isCreated;
std::string _licenseKey;
bool _licenseValid{ false };
bool _isInitialized{ false };
std::string _modelFilePath;
private:
void PreProcessing(std::vector<double>& Input); // mode =0--> linear, mode =1 mapminmax, mode =2 standarddev
void PostProcessing(std::vector<double>& Output); // mode =0--> linear, mode =1 mapminmax, mode =2 standarddev
void Create(int inputNodes, int HiddenNodes, int outputNodes);
void FreeNeuralNetwork();
void CheckLicense();
int ImportANNFromFile(std::string filename);
public:
ANNHUBAPI();
~ANNHUBAPI() noexcept;
[[nodiscard]] bool Init(std::string licenseKey, std::string modelFilePath);
[[nodiscard]] std::vector<double> Inference(std::vector<double> ip);
void Destroy();
[[nodiscard]] int GetOutputNode() { return nOutputNodes; };
};
}
extern "C" __declspec(dllexport) int CreateANNHUBHandle(ANSCENTER::ANNHUBAPI **Handle, const char* licenseKey, const char* modelFilePath);
extern "C" __declspec(dllexport) int ReleaseANNHUBHandle(ANSCENTER::ANNHUBAPI **Handle);
extern "C" __declspec(dllexport) int ANNHUB_Inference(ANSCENTER::ANNHUBAPI **pHandle, double* inputArray, int inputSize, double** outputArray);
extern "C" __declspec(dllexport) int ANNHUB_InferenceLV(ANSCENTER::ANNHUBAPI **pHandle, double* inputArray, int inputSize, LStrHandle outputHandle);
// --- V2 entry points: accept handle by value (uint64_t) to avoid LabVIEW buffer reuse bug ---
extern "C" __declspec(dllexport) int ANNHUB_Inference_V2(uint64_t handleVal, double* inputArray, int inputSize, double** outputArray);
extern "C" __declspec(dllexport) int ANNHUB_InferenceLV_V2(uint64_t handleVal, double* inputArray, int inputSize, LStrHandle outputHandle);
#endif

View File

@@ -0,0 +1,23 @@
# ANNHUB — Notification Hub DLL
add_library(ANNHUB SHARED
ANSANNHUB.cpp
ANSANNHUB.h
dllmain.cpp
pch.cpp
pch.h
framework.h
)
target_include_directories(ANNHUB PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}
${SHARED_INCLUDE_DIR}
)
target_link_libraries(ANNHUB
PRIVATE ANSLicensingSystem
PRIVATE anslicensing
PRIVATE labview
)
target_compile_definitions(ANNHUB PRIVATE UNICODE _UNICODE ANNHUB_EXPORTS _USRDLL)
target_precompile_headers(ANNHUB PRIVATE pch.h)

View File

@@ -0,0 +1,19 @@
// dllmain.cpp : Defines the entry point for the DLL application.
#include "pch.h"
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
) noexcept
{
switch (ul_reason_for_call)
{
case DLL_PROCESS_ATTACH:
case DLL_THREAD_ATTACH:
case DLL_THREAD_DETACH:
case DLL_PROCESS_DETACH:
break;
}
return TRUE;
}

View File

@@ -0,0 +1,7 @@
#pragma once
#define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers
#define NOMINMAX // Prevent windows.h from defining min/max macros
// which break std::min / std::max (C2589)
// Windows Header Files
#include <windows.h>

View File

@@ -0,0 +1,5 @@
// pch.cpp: source file corresponding to the pre-compiled header
#include "pch.h"
// When you are using pre-compiled headers, this source file is necessary for compilation to succeed.

18
integrations/ANNHUB/pch.h Normal file
View File

@@ -0,0 +1,18 @@
// pch.h: This is a precompiled header file.
// Files listed below are compiled only once, improving build performance for future builds.
// This also affects IntelliSense performance, including code completion and many code browsing features.
// However, files listed here are ALL re-compiled if any one of them is updated between builds.
// Do not add files here that you will be updating frequently as this negates the performance advantage.
#ifndef PCH_H
#define PCH_H
// add headers that you want to pre-compile here
#include "framework.h"
#include <memory>
#include <string>
#include <string_view>
#include <vector>
#include <mutex>
#endif //PCH_H