Add unit testes

This commit is contained in:
2026-04-05 14:30:43 +10:00
parent fed40b0c90
commit f57ed78763
12 changed files with 1013 additions and 216 deletions

31
tests/CMakeLists.txt Normal file
View File

@@ -0,0 +1,31 @@
project(ANSCustomModels_Tests LANGUAGES CXX)
# ---------- Google Test (fetched once, shared by all sub-projects) ----------
include(FetchContent)
FetchContent_Declare(
googletest
GIT_REPOSITORY https://github.com/google/googletest.git
GIT_TAG v1.14.0
)
set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
FetchContent_MakeAvailable(googletest)
enable_testing()
# ---------- Common paths (propagated to sub-projects via variables) ----------
set(ANSLIB_INCLUDE_DIR "C:/Projects/ANLS/ANSLIB/ANSLIB" CACHE PATH "")
set(OPENCV_INCLUDE_DIR "C:/ANSLibs/opencv/include" CACHE PATH "")
set(ANSLIB_LIB_DIR "C:/ProgramData/ANSCENTER/Shared" CACHE PATH "")
set(OPENCV_LIB_DIR "C:/ANSLibs/opencv/x64/vc17/lib" CACHE PATH "")
set(OPENCV_BIN_DIR "C:/ProgramData/ANSCENTER/Shared" CACHE PATH "")
set(TEST_COMMON_DIR "${CMAKE_CURRENT_SOURCE_DIR}" CACHE PATH "")
# ---------- Place all test .exe files alongside the DLLs they need ----------
# This ensures custom model DLLs (built by sibling projects) land in the same
# directory as the test executables so Windows can find them at runtime.
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" CACHE PATH "" FORCE)
# ---------- Sub-project test executables ----------
add_subdirectory(FireNSmokeDetection)
add_subdirectory(HelmetDetection)
add_subdirectory(WeaponDetection)

View File

@@ -0,0 +1,54 @@
project(FireNSmokeDetection_Tests LANGUAGES CXX)
add_executable(${PROJECT_NAME}
FireNSmokeDetectionTest.cpp
)
target_compile_features(${PROJECT_NAME} PRIVATE cxx_std_17)
target_compile_definitions(${PROJECT_NAME} PRIVATE
WIN32_LEAN_AND_MEAN
NOMINMAX
$<$<CONFIG:Debug>:_DEBUG>
$<$<CONFIG:Release>:NDEBUG>
)
target_include_directories(${PROJECT_NAME} PRIVATE
${TEST_COMMON_DIR}
${ANSLIB_INCLUDE_DIR}
${OPENCV_INCLUDE_DIR}
${CMAKE_SOURCE_DIR}/ANSCustomFireNSmokeDetection
)
target_link_directories(${PROJECT_NAME} PRIVATE
${ANSLIB_LIB_DIR}
${OPENCV_LIB_DIR}
)
target_link_libraries(${PROJECT_NAME} PRIVATE
gtest
gtest_main
ANSLIB
opencv_world4130
ANSCustomFireNSmokeDetection
)
if(MSVC)
target_compile_options(${PROJECT_NAME} PRIVATE /W3 /sdl /permissive-)
endif()
# Copy required DLLs next to the test executable so Windows can find them
add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD
# ANSLIB.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different
"${ANSLIB_LIB_DIR}/ANSLIB.dll"
"$<TARGET_FILE_DIR:${PROJECT_NAME}>"
# OpenCV DLL
COMMAND ${CMAKE_COMMAND} -E copy_if_different
"${OPENCV_BIN_DIR}/opencv_world4130.dll"
"$<TARGET_FILE_DIR:${PROJECT_NAME}>"
COMMENT "Copying runtime DLLs for ${PROJECT_NAME}"
)
include(GoogleTest)
gtest_discover_tests(${PROJECT_NAME} DISCOVERY_MODE PRE_TEST)

View File

@@ -0,0 +1,192 @@
#include "TestCommon.h"
#include "ANSCustomFireNSmoke.h"
// ===========================================================================
// Unit Tests — no model files required
// ===========================================================================
class FireNSmokeUnitTest : public ::testing::Test {
protected:
ANSCustomFS detector;
};
TEST_F(FireNSmokeUnitTest, EmptyFrameReturnsNoDetections) {
cv::Mat empty;
auto results = detector.RunInference(empty);
EXPECT_TRUE(results.empty());
}
TEST_F(FireNSmokeUnitTest, TinyFrameReturnsNoDetections) {
cv::Mat tiny = TestUtils::CreateTestFrame(5, 5);
auto results = detector.RunInference(tiny);
EXPECT_TRUE(results.empty());
}
TEST_F(FireNSmokeUnitTest, UninitializedDetectorReturnsNoDetections) {
cv::Mat frame = TestUtils::CreateTestFrame(640, 480);
auto results = detector.RunInference(frame);
EXPECT_TRUE(results.empty());
}
TEST_F(FireNSmokeUnitTest, RunInferenceWithCameraId) {
cv::Mat frame = TestUtils::CreateTestFrame(640, 480);
auto results = detector.RunInference(frame, "test_cam_01");
EXPECT_TRUE(results.empty());
}
TEST_F(FireNSmokeUnitTest, ConfigureParametersReturnsValidConfig) {
CustomParams params;
bool result = detector.ConfigureParameters(params);
EXPECT_TRUE(result);
// Should have ExclusiveROIs ROI config
ASSERT_FALSE(params.ROI_Config.empty());
EXPECT_EQ(params.ROI_Config[0].Name, "ExclusiveROIs");
EXPECT_TRUE(params.ROI_Config[0].Rectangle);
EXPECT_FALSE(params.ROI_Config[0].Polygon);
EXPECT_FALSE(params.ROI_Config[0].Line);
EXPECT_EQ(params.ROI_Config[0].MinItems, 0);
EXPECT_EQ(params.ROI_Config[0].MaxItems, 20);
// Should have SmokeScore and Sensitivity parameters
ASSERT_GE(params.Parameters.size(), 2u);
bool hasSmokeScore = false;
bool hasSensitivity = false;
for (const auto& p : params.Parameters) {
if (p.Name == "SmokeScore") {
hasSmokeScore = true;
EXPECT_EQ(p.DataType, "float");
EXPECT_EQ(p.MaxValue, 1);
EXPECT_EQ(p.MinValue, 0);
}
if (p.Name == "Sensitivity") {
hasSensitivity = true;
EXPECT_EQ(p.DataType, "float");
}
}
EXPECT_TRUE(hasSmokeScore) << "Missing SmokeScore parameter";
EXPECT_TRUE(hasSensitivity) << "Missing Sensitivity parameter";
}
TEST_F(FireNSmokeUnitTest, DestroySucceeds) {
EXPECT_TRUE(detector.Destroy());
}
TEST_F(FireNSmokeUnitTest, DestroyCanBeCalledMultipleTimes) {
EXPECT_TRUE(detector.Destroy());
EXPECT_TRUE(detector.Destroy());
}
TEST_F(FireNSmokeUnitTest, InitializeWithInvalidDirectoryFails) {
std::string labelMap;
bool result = detector.Initialize("C:\\NonExistent\\Path\\Model", 0.5f, labelMap);
EXPECT_FALSE(result);
}
TEST_F(FireNSmokeUnitTest, OptimizeBeforeInitializeReturnsFalse) {
EXPECT_FALSE(detector.OptimizeModel(true));
}
// ===========================================================================
// Integration Tests — require model files on disk
// ===========================================================================
class FireNSmokeIntegrationTest : public ::testing::Test {
protected:
ANSCustomFS detector;
std::string labelMap;
std::vector<std::string> classes;
void SetUp() override {
if (!TestConfig::ModelExists(TestConfig::FIRE_SMOKE_MODEL_DIR)) {
GTEST_SKIP() << "Fire/Smoke model not found at: " << TestConfig::FIRE_SMOKE_MODEL_DIR;
}
bool ok = detector.Initialize(TestConfig::FIRE_SMOKE_MODEL_DIR, 0.5f, labelMap);
ASSERT_TRUE(ok) << "Failed to initialize Fire/Smoke detector";
classes = TestUtils::ParseLabelMap(labelMap);
}
void TearDown() override {
detector.Destroy();
}
};
TEST_F(FireNSmokeIntegrationTest, InitializeProducesLabelMap) {
EXPECT_FALSE(labelMap.empty());
EXPECT_FALSE(classes.empty());
}
TEST_F(FireNSmokeIntegrationTest, InferenceOnSolidFrameReturnsNoDetections) {
cv::Mat frame = TestUtils::CreateTestFrame(1920, 1080);
auto results = detector.RunInference(frame, "test_cam");
EXPECT_TRUE(results.empty()) << "Solid gray frame should not trigger fire/smoke";
}
TEST_F(FireNSmokeIntegrationTest, InferenceOnSmallFrame) {
cv::Mat frame = TestUtils::CreateTestFrame(320, 240);
auto results = detector.RunInference(frame, "test_cam");
SUCCEED();
}
TEST_F(FireNSmokeIntegrationTest, InferenceOnLargeFrame) {
cv::Mat frame = TestUtils::CreateTestFrame(3840, 2160);
auto results = detector.RunInference(frame, "test_cam");
SUCCEED();
}
TEST_F(FireNSmokeIntegrationTest, DetectionResultFieldsAreValid) {
if (!TestConfig::VideoExists(TestConfig::FIRE_SMOKE_VIDEO)) {
GTEST_SKIP() << "Fire/Smoke test video not found";
}
cv::VideoCapture cap(TestConfig::FIRE_SMOKE_VIDEO);
ASSERT_TRUE(cap.isOpened());
bool detectionFound = false;
for (int i = 0; i < 300 && !detectionFound; i++) {
cv::Mat frame;
if (!cap.read(frame)) break;
auto results = detector.RunInference(frame, "test_cam");
for (const auto& obj : results) {
detectionFound = true;
EXPECT_GE(obj.confidence, 0.0f);
EXPECT_LE(obj.confidence, 1.0f);
EXPECT_GE(obj.box.width, 0);
EXPECT_GE(obj.box.height, 0);
EXPECT_TRUE(obj.classId == 0 || obj.classId == 2)
<< "Expected fire (0) or smoke (2), got classId=" << obj.classId;
}
}
cap.release();
}
TEST_F(FireNSmokeIntegrationTest, PerformanceBenchmark) {
if (!TestConfig::VideoExists(TestConfig::FIRE_SMOKE_VIDEO)) {
GTEST_SKIP() << "Fire/Smoke test video not found";
}
auto [totalDetections, avgMs] = TestUtils::RunVideoFrames(detector, TestConfig::FIRE_SMOKE_VIDEO, 100);
ASSERT_GE(totalDetections, 0) << "Video could not be opened";
std::cout << "[FireNSmoke] 100 frames: avg=" << avgMs << "ms/frame, "
<< "detections=" << totalDetections << std::endl;
EXPECT_LT(avgMs, 200.0) << "Average inference time exceeds 200ms";
}
TEST_F(FireNSmokeIntegrationTest, ThreadSafetyConcurrentInference) {
cv::Mat frame1 = TestUtils::CreateTestFrame(640, 480, cv::Scalar(100, 100, 100));
cv::Mat frame2 = TestUtils::CreateTestFrame(640, 480, cv::Scalar(200, 200, 200));
std::vector<CustomObject> results1, results2;
std::thread t1([&]() { results1 = detector.RunInference(frame1, "cam_1"); });
std::thread t2([&]() { results2 = detector.RunInference(frame2, "cam_2"); });
t1.join();
t2.join();
SUCCEED();
}

View File

@@ -0,0 +1,54 @@
project(HelmetDetection_Tests LANGUAGES CXX)
add_executable(${PROJECT_NAME}
HelmetDetectionTest.cpp
)
target_compile_features(${PROJECT_NAME} PRIVATE cxx_std_17)
target_compile_definitions(${PROJECT_NAME} PRIVATE
WIN32_LEAN_AND_MEAN
NOMINMAX
$<$<CONFIG:Debug>:_DEBUG>
$<$<CONFIG:Release>:NDEBUG>
)
target_include_directories(${PROJECT_NAME} PRIVATE
${TEST_COMMON_DIR}
${ANSLIB_INCLUDE_DIR}
${OPENCV_INCLUDE_DIR}
${CMAKE_SOURCE_DIR}/ANSCustomHelmetDetection
)
target_link_directories(${PROJECT_NAME} PRIVATE
${ANSLIB_LIB_DIR}
${OPENCV_LIB_DIR}
)
target_link_libraries(${PROJECT_NAME} PRIVATE
gtest
gtest_main
ANSLIB
opencv_world4130
ANSCustomHelmetDetection
)
if(MSVC)
target_compile_options(${PROJECT_NAME} PRIVATE /W3 /sdl /permissive-)
endif()
# Copy required DLLs next to the test executable so Windows can find them
add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD
# ANSLIB.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different
"${ANSLIB_LIB_DIR}/ANSLIB.dll"
"$<TARGET_FILE_DIR:${PROJECT_NAME}>"
# OpenCV DLL
COMMAND ${CMAKE_COMMAND} -E copy_if_different
"${OPENCV_BIN_DIR}/opencv_world4130.dll"
"$<TARGET_FILE_DIR:${PROJECT_NAME}>"
COMMENT "Copying runtime DLLs for ${PROJECT_NAME}"
)
include(GoogleTest)
gtest_discover_tests(${PROJECT_NAME} DISCOVERY_MODE PRE_TEST)

View File

@@ -0,0 +1,162 @@
#include "TestCommon.h"
#include "ANSCustomCodeHelmetDetection.h"
// ===========================================================================
// Unit Tests — no model files required
// ===========================================================================
class HelmetUnitTest : public ::testing::Test {
protected:
ANSCustomHMD detector;
};
TEST_F(HelmetUnitTest, EmptyFrameReturnsNoDetections) {
cv::Mat empty;
auto results = detector.RunInference(empty);
EXPECT_TRUE(results.empty());
}
TEST_F(HelmetUnitTest, TinyFrameReturnsNoDetections) {
cv::Mat tiny = TestUtils::CreateTestFrame(5, 5);
auto results = detector.RunInference(tiny);
EXPECT_TRUE(results.empty());
}
TEST_F(HelmetUnitTest, UninitializedDetectorReturnsNoDetections) {
cv::Mat frame = TestUtils::CreateTestFrame(640, 480);
auto results = detector.RunInference(frame);
EXPECT_TRUE(results.empty());
}
TEST_F(HelmetUnitTest, RunInferenceWithCameraId) {
cv::Mat frame = TestUtils::CreateTestFrame(640, 480);
auto results = detector.RunInference(frame, "test_cam_01");
EXPECT_TRUE(results.empty());
}
TEST_F(HelmetUnitTest, ConfigureParametersReturnsValidConfig) {
CustomParams params;
bool result = detector.ConfigureParameters(params);
EXPECT_TRUE(result);
}
TEST_F(HelmetUnitTest, DestroySucceeds) {
EXPECT_TRUE(detector.Destroy());
}
TEST_F(HelmetUnitTest, DestroyCanBeCalledMultipleTimes) {
EXPECT_TRUE(detector.Destroy());
EXPECT_TRUE(detector.Destroy());
}
TEST_F(HelmetUnitTest, InitializeWithInvalidDirectoryFails) {
std::string labelMap;
bool result = detector.Initialize("C:\\NonExistent\\Path\\Model", 0.5f, labelMap);
EXPECT_FALSE(result);
}
TEST_F(HelmetUnitTest, OptimizeBeforeInitializeReturnsFalse) {
EXPECT_FALSE(detector.OptimizeModel(true));
}
// ===========================================================================
// Integration Tests — require model files on disk
// ===========================================================================
class HelmetIntegrationTest : public ::testing::Test {
protected:
ANSCustomHMD detector;
std::string labelMap;
std::vector<std::string> classes;
void SetUp() override {
if (!TestConfig::ModelExists(TestConfig::HELMET_MODEL_DIR)) {
GTEST_SKIP() << "Helmet model not found at: " << TestConfig::HELMET_MODEL_DIR;
}
bool ok = detector.Initialize(TestConfig::HELMET_MODEL_DIR, 0.6f, labelMap);
ASSERT_TRUE(ok) << "Failed to initialize Helmet detector";
classes = TestUtils::ParseLabelMap(labelMap);
}
void TearDown() override {
detector.Destroy();
}
};
TEST_F(HelmetIntegrationTest, InitializeProducesLabelMap) {
EXPECT_FALSE(labelMap.empty());
EXPECT_FALSE(classes.empty());
}
TEST_F(HelmetIntegrationTest, InferenceOnSolidFrameReturnsNoDetections) {
cv::Mat frame = TestUtils::CreateTestFrame(1920, 1080);
auto results = detector.RunInference(frame, "test_cam");
EXPECT_TRUE(results.empty()) << "Solid gray frame should not trigger helmet detection";
}
TEST_F(HelmetIntegrationTest, InferenceOnSmallFrame) {
cv::Mat frame = TestUtils::CreateTestFrame(320, 240);
auto results = detector.RunInference(frame, "test_cam");
SUCCEED();
}
TEST_F(HelmetIntegrationTest, InferenceOnLargeFrame) {
cv::Mat frame = TestUtils::CreateTestFrame(3840, 2160);
auto results = detector.RunInference(frame, "test_cam");
SUCCEED();
}
TEST_F(HelmetIntegrationTest, DetectionResultFieldsAreValid) {
if (!TestConfig::VideoExists(TestConfig::HELMET_VIDEO)) {
GTEST_SKIP() << "Helmet test video not found";
}
cv::VideoCapture cap(TestConfig::HELMET_VIDEO);
ASSERT_TRUE(cap.isOpened());
bool detectionFound = false;
for (int i = 0; i < 300 && !detectionFound; i++) {
cv::Mat frame;
if (!cap.read(frame)) break;
auto results = detector.RunInference(frame, "test_cam");
for (const auto& obj : results) {
detectionFound = true;
EXPECT_GE(obj.confidence, 0.0f);
EXPECT_LE(obj.confidence, 1.0f);
EXPECT_GE(obj.box.width, 0);
EXPECT_GE(obj.box.height, 0);
EXPECT_GE(obj.classId, 0);
}
}
cap.release();
}
TEST_F(HelmetIntegrationTest, PerformanceBenchmark) {
if (!TestConfig::VideoExists(TestConfig::HELMET_VIDEO)) {
GTEST_SKIP() << "Helmet test video not found";
}
auto [totalDetections, avgMs] = TestUtils::RunVideoFrames(detector, TestConfig::HELMET_VIDEO, 100);
ASSERT_GE(totalDetections, 0) << "Video could not be opened";
std::cout << "[Helmet] 100 frames: avg=" << avgMs << "ms/frame, "
<< "detections=" << totalDetections << std::endl;
EXPECT_LT(avgMs, 200.0) << "Average inference time exceeds 200ms";
}
TEST_F(HelmetIntegrationTest, ThreadSafetyConcurrentInference) {
cv::Mat frame1 = TestUtils::CreateTestFrame(640, 480, cv::Scalar(100, 100, 100));
cv::Mat frame2 = TestUtils::CreateTestFrame(640, 480, cv::Scalar(200, 200, 200));
std::vector<CustomObject> results1, results2;
std::thread t1([&]() { results1 = detector.RunInference(frame1, "cam_1"); });
std::thread t2([&]() { results2 = detector.RunInference(frame2, "cam_2"); });
t1.join();
t2.join();
SUCCEED();
}

116
tests/TestCommon.h Normal file
View File

@@ -0,0 +1,116 @@
#pragma once
#include <gtest/gtest.h>
#include <opencv2/opencv.hpp>
#include <string>
#include <vector>
#include <sstream>
#include <chrono>
#include <filesystem>
#include "ANSLIB.h"
// ---------------------------------------------------------------------------
// Model directory paths — update these to match your local environment
// ---------------------------------------------------------------------------
namespace TestConfig {
inline const std::string FIRE_SMOKE_MODEL_DIR =
"C:\\Programs\\DemoAssets\\ModelsForANSVIS\\ANS_FireSmoke_v2.0";
inline const std::string HELMET_MODEL_DIR =
"C:\\Programs\\DemoAssets\\ModelsForANSVIS\\ANS_Helmet(GPU)_v1.0";
inline const std::string WEAPON_MODEL_DIR =
"C:\\Programs\\DemoAssets\\ModelsForANSVIS\\ANS_WeaponDetection(GPU)_1.0";
inline const std::string FIRE_SMOKE_VIDEO =
"C:\\Programs\\DemoAssets\\Videos\\FireNSmoke\\ANSFireFull.mp4";
inline const std::string HELMET_VIDEO =
"C:\\Programs\\DemoAssets\\Videos\\Helmet\\HM2.mp4";
inline const std::string WEAPON_VIDEO =
"C:\\Programs\\DemoAssets\\Videos\\Weapon\\AK47 Glock.mp4";
// Check if model directory exists
inline bool ModelExists(const std::string& path) {
return std::filesystem::exists(path) && std::filesystem::is_directory(path);
}
// Check if video file exists
inline bool VideoExists(const std::string& path) {
return std::filesystem::exists(path);
}
} // namespace TestConfig
// ---------------------------------------------------------------------------
// Helper utilities
// ---------------------------------------------------------------------------
namespace TestUtils {
// Parse comma-separated label map into vector of class names
inline std::vector<std::string> ParseLabelMap(const std::string& labelMap) {
std::vector<std::string> classes;
std::stringstream ss(labelMap);
std::string item;
while (std::getline(ss, item, ',')) {
classes.push_back(item);
}
return classes;
}
// Create a solid-color test frame (no model required)
inline cv::Mat CreateTestFrame(int width, int height, cv::Scalar color = cv::Scalar(128, 128, 128)) {
return cv::Mat(height, width, CV_8UC3, color);
}
// Create a frame with a bright red/orange region to simulate fire-like colors
inline cv::Mat CreateFireLikeFrame(int width, int height) {
cv::Mat frame(height, width, CV_8UC3, cv::Scalar(50, 50, 50));
cv::Rect fireRegion(width / 4, height / 4, width / 2, height / 2);
frame(fireRegion) = cv::Scalar(0, 80, 255); // BGR: orange-red
return frame;
}
// Create a frame with a gray haze to simulate smoke-like colors
inline cv::Mat CreateSmokeLikeFrame(int width, int height) {
cv::Mat frame(height, width, CV_8UC3, cv::Scalar(30, 30, 30));
cv::Rect smokeRegion(width / 4, height / 4, width / 2, height / 2);
frame(smokeRegion) = cv::Scalar(180, 180, 190); // BGR: light gray
return frame;
}
// Measure inference time in milliseconds
template <typename Func>
double MeasureMs(Func&& func) {
auto start = std::chrono::high_resolution_clock::now();
func();
auto end = std::chrono::high_resolution_clock::now();
return std::chrono::duration<double, std::milli>(end - start).count();
}
// Run N frames of video through a detector, return (totalDetections, avgMs)
template <typename Detector>
std::pair<int, double> RunVideoFrames(Detector& detector, const std::string& videoPath, int maxFrames) {
cv::VideoCapture cap(videoPath);
if (!cap.isOpened()) return { -1, 0.0 };
int totalDetections = 0;
double totalMs = 0.0;
int frameCount = 0;
while (frameCount < maxFrames) {
cv::Mat frame;
if (!cap.read(frame)) break;
double ms = MeasureMs([&]() {
auto results = detector.RunInference(frame);
totalDetections += static_cast<int>(results.size());
});
totalMs += ms;
frameCount++;
}
cap.release();
double avgMs = (frameCount > 0) ? totalMs / frameCount : 0.0;
return { totalDetections, avgMs };
}
} // namespace TestUtils

View File

@@ -0,0 +1,54 @@
project(WeaponDetection_Tests LANGUAGES CXX)
add_executable(${PROJECT_NAME}
WeaponDetectionTest.cpp
)
target_compile_features(${PROJECT_NAME} PRIVATE cxx_std_17)
target_compile_definitions(${PROJECT_NAME} PRIVATE
WIN32_LEAN_AND_MEAN
NOMINMAX
$<$<CONFIG:Debug>:_DEBUG>
$<$<CONFIG:Release>:NDEBUG>
)
target_include_directories(${PROJECT_NAME} PRIVATE
${TEST_COMMON_DIR}
${ANSLIB_INCLUDE_DIR}
${OPENCV_INCLUDE_DIR}
${CMAKE_SOURCE_DIR}/ANSCustomWeaponDetection
)
target_link_directories(${PROJECT_NAME} PRIVATE
${ANSLIB_LIB_DIR}
${OPENCV_LIB_DIR}
)
target_link_libraries(${PROJECT_NAME} PRIVATE
gtest
gtest_main
ANSLIB
opencv_world4130
ANSCustomWeaponDetection
)
if(MSVC)
target_compile_options(${PROJECT_NAME} PRIVATE /W3 /sdl /permissive-)
endif()
# Copy required DLLs next to the test executable so Windows can find them
add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD
# ANSLIB.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different
"${ANSLIB_LIB_DIR}/ANSLIB.dll"
"$<TARGET_FILE_DIR:${PROJECT_NAME}>"
# OpenCV DLL
COMMAND ${CMAKE_COMMAND} -E copy_if_different
"${OPENCV_BIN_DIR}/opencv_world4130.dll"
"$<TARGET_FILE_DIR:${PROJECT_NAME}>"
COMMENT "Copying runtime DLLs for ${PROJECT_NAME}"
)
include(GoogleTest)
gtest_discover_tests(${PROJECT_NAME} DISCOVERY_MODE PRE_TEST)

View File

@@ -0,0 +1,162 @@
#include "TestCommon.h"
#include "ANSCustomCodeWeaponDetection.h"
// ===========================================================================
// Unit Tests — no model files required
// ===========================================================================
class WeaponUnitTest : public ::testing::Test {
protected:
ANSCustomWD detector;
};
TEST_F(WeaponUnitTest, EmptyFrameReturnsNoDetections) {
cv::Mat empty;
auto results = detector.RunInference(empty);
EXPECT_TRUE(results.empty());
}
TEST_F(WeaponUnitTest, TinyFrameReturnsNoDetections) {
cv::Mat tiny = TestUtils::CreateTestFrame(5, 5);
auto results = detector.RunInference(tiny);
EXPECT_TRUE(results.empty());
}
TEST_F(WeaponUnitTest, UninitializedDetectorReturnsNoDetections) {
cv::Mat frame = TestUtils::CreateTestFrame(640, 480);
auto results = detector.RunInference(frame);
EXPECT_TRUE(results.empty());
}
TEST_F(WeaponUnitTest, RunInferenceWithCameraId) {
cv::Mat frame = TestUtils::CreateTestFrame(640, 480);
auto results = detector.RunInference(frame, "test_cam_01");
EXPECT_TRUE(results.empty());
}
TEST_F(WeaponUnitTest, ConfigureParametersReturnsValidConfig) {
CustomParams params;
bool result = detector.ConfigureParameters(params);
EXPECT_TRUE(result);
}
TEST_F(WeaponUnitTest, DestroySucceeds) {
EXPECT_TRUE(detector.Destroy());
}
TEST_F(WeaponUnitTest, DestroyCanBeCalledMultipleTimes) {
EXPECT_TRUE(detector.Destroy());
EXPECT_TRUE(detector.Destroy());
}
TEST_F(WeaponUnitTest, InitializeWithInvalidDirectoryFails) {
std::string labelMap;
bool result = detector.Initialize("C:\\NonExistent\\Path\\Model", 0.5f, labelMap);
EXPECT_FALSE(result);
}
TEST_F(WeaponUnitTest, OptimizeBeforeInitializeReturnsFalse) {
EXPECT_FALSE(detector.OptimizeModel(true));
}
// ===========================================================================
// Integration Tests — require model files on disk
// ===========================================================================
class WeaponIntegrationTest : public ::testing::Test {
protected:
ANSCustomWD detector;
std::string labelMap;
std::vector<std::string> classes;
void SetUp() override {
if (!TestConfig::ModelExists(TestConfig::WEAPON_MODEL_DIR)) {
GTEST_SKIP() << "Weapon model not found at: " << TestConfig::WEAPON_MODEL_DIR;
}
bool ok = detector.Initialize(TestConfig::WEAPON_MODEL_DIR, 0.6f, labelMap);
ASSERT_TRUE(ok) << "Failed to initialize Weapon detector";
classes = TestUtils::ParseLabelMap(labelMap);
}
void TearDown() override {
detector.Destroy();
}
};
TEST_F(WeaponIntegrationTest, InitializeProducesLabelMap) {
EXPECT_FALSE(labelMap.empty());
EXPECT_FALSE(classes.empty());
}
TEST_F(WeaponIntegrationTest, InferenceOnSolidFrameReturnsNoDetections) {
cv::Mat frame = TestUtils::CreateTestFrame(1920, 1080);
auto results = detector.RunInference(frame, "test_cam");
EXPECT_TRUE(results.empty()) << "Solid gray frame should not trigger weapon detection";
}
TEST_F(WeaponIntegrationTest, InferenceOnSmallFrame) {
cv::Mat frame = TestUtils::CreateTestFrame(320, 240);
auto results = detector.RunInference(frame, "test_cam");
SUCCEED();
}
TEST_F(WeaponIntegrationTest, InferenceOnLargeFrame) {
cv::Mat frame = TestUtils::CreateTestFrame(3840, 2160);
auto results = detector.RunInference(frame, "test_cam");
SUCCEED();
}
TEST_F(WeaponIntegrationTest, DetectionResultFieldsAreValid) {
if (!TestConfig::VideoExists(TestConfig::WEAPON_VIDEO)) {
GTEST_SKIP() << "Weapon test video not found";
}
cv::VideoCapture cap(TestConfig::WEAPON_VIDEO);
ASSERT_TRUE(cap.isOpened());
bool detectionFound = false;
for (int i = 0; i < 300 && !detectionFound; i++) {
cv::Mat frame;
if (!cap.read(frame)) break;
auto results = detector.RunInference(frame, "test_cam");
for (const auto& obj : results) {
detectionFound = true;
EXPECT_GE(obj.confidence, 0.0f);
EXPECT_LE(obj.confidence, 1.0f);
EXPECT_GE(obj.box.width, 0);
EXPECT_GE(obj.box.height, 0);
EXPECT_GE(obj.classId, 0);
}
}
cap.release();
}
TEST_F(WeaponIntegrationTest, PerformanceBenchmark) {
if (!TestConfig::VideoExists(TestConfig::WEAPON_VIDEO)) {
GTEST_SKIP() << "Weapon test video not found";
}
auto [totalDetections, avgMs] = TestUtils::RunVideoFrames(detector, TestConfig::WEAPON_VIDEO, 100);
ASSERT_GE(totalDetections, 0) << "Video could not be opened";
std::cout << "[Weapon] 100 frames: avg=" << avgMs << "ms/frame, "
<< "detections=" << totalDetections << std::endl;
EXPECT_LT(avgMs, 200.0) << "Average inference time exceeds 200ms";
}
TEST_F(WeaponIntegrationTest, ThreadSafetyConcurrentInference) {
cv::Mat frame1 = TestUtils::CreateTestFrame(640, 480, cv::Scalar(100, 100, 100));
cv::Mat frame2 = TestUtils::CreateTestFrame(640, 480, cv::Scalar(200, 200, 200));
std::vector<CustomObject> results1, results2;
std::thread t1([&]() { results1 = detector.RunInference(frame1, "cam_1"); });
std::thread t2([&]() { results2 = detector.RunInference(frame2, "cam_2"); });
t1.join();
t2.join();
SUCCEED();
}