#include "ANSWEBCAM.h" #include "ANSMatRegistry.h" #include "ANSGpuFrameRegistry.h" #include "ANSCVVendorGate.h" // anscv_vendor_gate::IsNvidiaGpuAvailable() #include #include extern "C" { #include } // Custom pixel format for BGR full-res Mat entries in ANSGpuFrameRegistry. static constexpr int ANSCV_PIX_FMT_BGR24 = 1000; #include #include #include namespace ANSCENTER { #define CHECK_CUDAWEB(call) do { \ cudaError_t err = call; \ if (err != cudaSuccess) { \ std::cout<<"CUDA error: " + std::string(cudaGetErrorString(err)); \ } \ } while (0) #define CHECK_NVJPEGWEB(call) do { \ nvjpegStatus_t status = call; \ if (status != NVJPEG_STATUS_SUCCESS) { \ std::cout<<"nvJPEG error: " + std::to_string(status); \ } \ } while (0) void DeleteMediaType(AM_MEDIA_TYPE* pmt) { // allow NULL pointers for coding simplicity if (pmt == NULL) { return; } if (pmt->cbFormat != 0) { CoTaskMemFree((PVOID)pmt->pbFormat); // Strictly unnecessary but tidier pmt->cbFormat = 0; pmt->pbFormat = NULL; } if (pmt->pUnk != NULL) { pmt->pUnk->Release(); pmt->pUnk = NULL; } } ANSWEBCAMPlayer::ANSWEBCAMPlayer() { _deviceId = 0; _resWidth = 0; _resHeight = 0; m_bPaused = false; _isPlaying = false; _lastJpegImage = ""; _cameraNames.clear(); _bbox = cv::Rect(0, 0, 0, 0); _crop = false; _previousPTS = 0; _jpegCompressor = nullptr; } ANSWEBCAMPlayer::~ANSWEBCAMPlayer() noexcept { try { Destroy(); } catch (...) {} if (_jpegCompressor) { tjDestroy(_jpegCompressor); _jpegCompressor = nullptr; } } void ANSWEBCAMPlayer::Destroy() { std::lock_guard lock(_mutex); try { if (cap.isOpened()) { cap.release(); } _resWidth = 0; _resHeight = 0; _deviceId = 0; _lastJpegImage = ""; _previousImage.release(); _isPlaying = false; if (!_cameraNames.empty()) _cameraNames.clear(); } catch (const std::exception& e) { _logger.LogError("ANSWEBCAMPlayer::Destroy. Exception:", e.what(), __FILE__, __LINE__); } catch (...) { _logger.LogError("ANSWEBCAMPlayer::Destroy.", "Unknown exception", __FILE__, __LINE__); } } std::vector ANSWEBCAMPlayer::GetAvailableResolutions(int deviceId) { std::vector resolutions; try { cv::VideoCapture testCap(deviceId, cv::CAP_DSHOW); if (!testCap.isOpened()) { testCap.open(deviceId, cv::CAP_ANY); if (!testCap.isOpened()) { _logger.LogError("ANSWEBCAMPlayer::GetAvailableResolutions", "Unable to open camera", __FILE__, __LINE__); return resolutions; } } // Common resolutions to test std::vector> commonResolutions = { {3264, 2448}, {3264, 1836}, {2592, 1944}, {2048, 1536}, {1920, 1080}, {1600, 1200}, {1280, 1024}, {1280, 720}, {1024, 768}, {1920, 1080}, {800, 600}, {640, 480} }; for (const auto& res : commonResolutions) { testCap.set(cv::CAP_PROP_FRAME_WIDTH, res.first); testCap.set(cv::CAP_PROP_FRAME_HEIGHT, res.second); int actualWidth = static_cast(testCap.get(cv::CAP_PROP_FRAME_WIDTH)); int actualHeight = static_cast(testCap.get(cv::CAP_PROP_FRAME_HEIGHT)); // Check if the camera actually supports this resolution if (actualWidth == res.first && actualHeight == res.second) { resolutions.emplace_back(actualWidth, actualHeight); _logger.LogDebug("ANSWEBCAMPlayer::GetAvailableResolutions", "Supported resolution: " + std::to_string(actualWidth) + "x" + std::to_string(actualHeight), __FILE__, __LINE__); } } testCap.release(); // Sort resolutions by area (largest first) std::sort(resolutions.rbegin(), resolutions.rend()); } catch (const std::exception& e) { _logger.LogError("ANSWEBCAMPlayer::GetAvailableResolutions", e.what(), __FILE__, __LINE__); } return resolutions; } void ANSWEBCAMPlayer::SetEnableMaxResolution(bool enable) { std::lock_guard lock(_mutex); _enableMaxResolution = enable; } bool ANSWEBCAMPlayer::SetPreferredResolution(int deviceId) { // Try Full HD first, then fall back to lower resolutions until one is supported try { std::vector> preferredResolutions = { {1920, 1080}, // Full HD {1280, 720}, // HD {1024, 768}, // XGA {800, 600}, // SVGA {640, 480} // VGA }; for (const auto& res : preferredResolutions) { cap.set(cv::CAP_PROP_FRAME_WIDTH, res.first); cap.set(cv::CAP_PROP_FRAME_HEIGHT, res.second); int actualWidth = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); int actualHeight = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); if (actualWidth == res.first && actualHeight == res.second) { _resWidth = actualWidth; _resHeight = actualHeight; _savedResWidth = _resWidth; _savedResHeight = _resHeight; _logger.LogDebug("ANSWEBCAMPlayer::SetPreferredResolution", "Resolution set to: " + std::to_string(actualWidth) + "x" + std::to_string(actualHeight), __FILE__, __LINE__); return true; } } // None of the preferred resolutions worked — use whatever the camera defaults to _resWidth = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); _resHeight = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); _savedResWidth = _resWidth; _savedResHeight = _resHeight; _logger.LogWarn("ANSWEBCAMPlayer::SetPreferredResolution", "No preferred resolution supported. Using camera default: " + std::to_string(_resWidth) + "x" + std::to_string(_resHeight), __FILE__, __LINE__); return false; } catch (const std::exception& e) { _logger.LogError("ANSWEBCAMPlayer::SetPreferredResolution", e.what(), __FILE__, __LINE__); return false; } } bool ANSWEBCAMPlayer::SetBestResolution(int deviceId) { try { std::vector availableResolutions = GetAvailableResolutions(deviceId); if (availableResolutions.empty()) { return false; } // Use the largest available resolution (first in sorted list) Resolution bestRes = availableResolutions[0]; _logger.LogDebug("ANSWEBCAMPlayer::SetBestResolution", "Setting resolution to: " + std::to_string(bestRes.width) + "x" + std::to_string(bestRes.height), __FILE__, __LINE__); cap.set(cv::CAP_PROP_FRAME_WIDTH, bestRes.width); cap.set(cv::CAP_PROP_FRAME_HEIGHT, bestRes.height); // Verify the resolution was set int actualWidth = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); int actualHeight = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); _resWidth = actualWidth; _resHeight = actualHeight; _savedResWidth = _resWidth; _savedResHeight = _resHeight; _logger.LogDebug("ANSWEBCAMPlayer::SetBestResolution", "Actual resolution set: " + std::to_string(actualWidth) + "x" + std::to_string(actualHeight), __FILE__, __LINE__); return (actualWidth == bestRes.width && actualHeight == bestRes.height); } catch (const std::exception& e) { _logger.LogError("ANSWEBCAMPlayer::SetBestResolution", e.what(), __FILE__, __LINE__); return false; } } bool ANSWEBCAMPlayer::SetResolution(int width, int height) { std::lock_guard lock(_mutex); try { if (cap.isOpened()) { cap.set(cv::CAP_PROP_FRAME_WIDTH, width); cap.set(cv::CAP_PROP_FRAME_HEIGHT, height); // Verify the resolution was set int actualWidth = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); int actualHeight = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); _resWidth = actualWidth; _resHeight = actualHeight; _savedResWidth = _resWidth; _savedResHeight = _resHeight; if (actualWidth == width && actualHeight == height) { _logger.LogDebug("ANSWEBCAMPlayer::SetResolution", "Resolution set successfully: " + std::to_string(width) + "x" + std::to_string(height), __FILE__, __LINE__); return true; } else { _logger.LogWarn("ANSWEBCAMPlayer::SetResolution", "Requested resolution " + std::to_string(width) + "x" + std::to_string(height) + " not supported. Using: " + std::to_string(actualWidth) + "x" + std::to_string(actualHeight), __FILE__, __LINE__); return false; } } // If camera is not opened, just store the values for later use _resWidth = width; _resHeight = height; _savedResWidth = width; _savedResHeight = height; return true; } catch (const std::exception& e) { _logger.LogError("ANSWEBCAMPlayer::SetResolution", e.what(), __FILE__, __LINE__); return false; } } std::string ANSWEBCAMPlayer::GetAvailableResolutionsString(int deviceId) { std::vector resolutions = GetAvailableResolutions(deviceId); std::stringstream ss; for (size_t i = 0; i < resolutions.size(); ++i) { if (i != 0) ss << ";"; ss << resolutions[i].width << "x" << resolutions[i].height; } return ss.str(); } std::vector ANSWEBCAMPlayer::ScanWebcamDevices() { std::vector cameraNames; std::lock_guard guard(mtx); // Locks the mutex for the scope of the function try { ICreateDevEnum* pDevEnum = nullptr; IEnumMoniker* pEnum = nullptr; CoInitialize(nullptr); HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, nullptr, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pDevEnum)); if (SUCCEEDED(hr)) { hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0); if (hr == S_OK) { IMoniker* pMoniker = nullptr; while (pEnum->Next(1, &pMoniker, nullptr) == S_OK) { IPropertyBag* pPropBag; hr = pMoniker->BindToStorage(0, 0, IID_PPV_ARGS(&pPropBag)); if (SUCCEEDED(hr)) { VARIANT var; VariantInit(&var); // Get the description or friendly name. hr = pPropBag->Read(L"Description", &var, 0); if (FAILED(hr)) { hr = pPropBag->Read(L"FriendlyName", &var, 0); } if (SUCCEEDED(hr)) { std::wstring wcameraName(var.bstrVal); auto s = std::filesystem::path(wcameraName).string(); std::string cameraName = s; cameraNames.push_back(cameraName); VariantClear(&var); } pPropBag->Release(); } pMoniker->Release(); } pEnum->Release(); } pDevEnum->Release(); } CoUninitialize(); } catch (std::exception& e) { std::cout << "ANSWEBCAMPlayer::ScanWebcamDevices - Exception: " << e.what() << std::endl; } return cameraNames; } // It does not work std::string ANSWEBCAMPlayer::GetSupportedResolutions(int deviceId) { try { std::vector resolutions; SPDLogger& _logger = SPDLogger::GetInstance("ANSWebcamPlayer", false); // Initialize COM HRESULT hr = CoInitialize(NULL); if (FAILED(hr)) { // Log error and return _logger.LogError("ANSWEBCAMPlayer::GetSupportedResolutions:", "Failed to initialize COM.", __FILE__, __LINE__); return ""; } // Create the System Device Enumerator. ICreateDevEnum* pDevEnum; hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pDevEnum)); if (FAILED(hr)) { // Log error and return _logger.LogError("ANSWEBCAMPlayer::GetSupportedResolutions:", "Failed to create System Device Enumerator.", __FILE__, __LINE__); return ""; } // Access the specific video capture device // Create an enumerator for the video capture devices. IEnumMoniker* pEnum; pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0); // Access the specific video capture device IMoniker* pMoniker; pEnum->Next(deviceId + 1, &pMoniker, NULL); IBaseFilter* pCap; hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap); if (FAILED(hr) || pCap == NULL) { // Log error and return _logger.LogError("ANSWEBCAMPlayer::GetSupportedResolutions:", "Failed to bind to object.", __FILE__, __LINE__); return ""; } IAMStreamConfig* pConfig; hr = pCap->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig); if (FAILED(hr)) { // Log error and return _logger.LogError("ANSWEBCAMPlayer::GetSupportedResolutions:", "Failed to get IAMStreamConfig interface.", __FILE__, __LINE__); return ""; } if (FAILED(hr)) { // Log error and return _logger.LogError("ANSWEBCAMPlayer::GetSupportedResolutions:", "Failed to get IAMStreamConfig interface.", __FILE__, __LINE__); return ""; } int iCount = 0, iSize = 0; pConfig->GetNumberOfCapabilities(&iCount, &iSize); // Check the size to make sure we pass in the correct structure. if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS)) { // Use the video capabilities structure. for (int iFormat = 0; iFormat < iCount; iFormat++) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE* pmtConfig; HRESULT hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc); if (SUCCEEDED(hr)) { // Check the media type is valid if (pmtConfig->formattype == FORMAT_VideoInfo) { // Get the width and height VIDEOINFOHEADER* videoInfoHeader = reinterpret_cast(pmtConfig->pbFormat); std::string resolution = std::to_string(videoInfoHeader->bmiHeader.biWidth) + "x" + std::to_string(videoInfoHeader->bmiHeader.biHeight); resolutions.push_back(resolution); } // Delete the media type when done. DeleteMediaType(pmtConfig); } } } // Clean up DirectShow objects pConfig->Release(); pCap->Release(); pMoniker->Release(); pEnum->Release(); pDevEnum->Release(); // Uninitialize COM when done CoUninitialize(); // Concatenate the resolutions with a semicolon separator std::stringstream ss; for (size_t i = 0; i < resolutions.size(); ++i) { if (i != 0) ss << ";"; ss << resolutions[i]; } return ss.str(); } catch (std::exception& e) { std::cout << "ANSWEBCAMPlayer::GetSupportedResolutions - Exception: " << e.what() << std::endl; return ""; } } void ANSWEBCAMPlayer::CheckLicense() { _licenseValid = true; // No need license for webcam //try { // _licenseValid = ANSCENTER::ANSLicenseHelper::LicenseVerification(_licenseKey, 1007, "ANSCV");//Default productId=1005 //} //catch (std::exception& e) { // this->_logger.LogFatal("ANSWEBCAMPlayer::CheckLicense. Error:", e.what(), __FILE__, __LINE__); //} } bool ANSWEBCAMPlayer::Init(std::string licenseKey, std::string url) { std::lock_guard lock(_mutex); _licenseKey = licenseKey; _resWidth = 0; _resHeight = 0; engineType = ANSCENTER::CPU;// ANSLicenseHelper::CheckHardwareInformation(); _jpegCompressor = tjInitCompress(); CheckLicense(); if (!_licenseValid) { _logger.LogError("ANSWEBCAMPlayer::Init.", "Invalid license", __FILE__, __LINE__); return false; } _deviceId = 0; // Default _cameraNames = ANSCENTER::ANSWEBCAMPlayer::ScanWebcamDevices(); auto it = std::find(_cameraNames.begin(), _cameraNames.end(), url); if (it != _cameraNames.end()) { _deviceId = std::distance(_cameraNames.begin(), it); // Verify if the device can be opened cv::VideoCapture cap(_deviceId); if (!cap.isOpened()) { _logger.LogError("ANSWEBCAMPlayer::Init.", "Unable to open camera with ID: " + std::to_string(_deviceId), __FILE__, __LINE__); return false; } cap.release(); // Close the camera after testing return Setup(); } return false; } bool ANSWEBCAMPlayer::Setup() { _previousPTS = 0; std::lock_guard lock(_mutex); try { // Open the camera if not already opened if (!cap.isOpened()) { cap.open(_deviceId, cv::CAP_DSHOW); if (!cap.isOpened()) { cap.open(_deviceId, cv::CAP_ANY); if (!cap.isOpened()) { _logger.LogError("ANSWEBCAMPlayer::Setup", "Unable to open camera", __FILE__, __LINE__); return false; } } } // Restore saved resolution if available (stop/start cycle) if (_savedResWidth > 0 && _savedResHeight > 0) { cap.set(cv::CAP_PROP_FRAME_WIDTH, _savedResWidth); cap.set(cv::CAP_PROP_FRAME_HEIGHT, _savedResHeight); int actualWidth = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); int actualHeight = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); _resWidth = actualWidth; _resHeight = actualHeight; if (actualWidth != _savedResWidth || actualHeight != _savedResHeight) { _logger.LogWarn("ANSWEBCAMPlayer::Setup", "Could not restore saved resolution " + std::to_string(_savedResWidth) + "x" + std::to_string(_savedResHeight) + ". Using: " + std::to_string(actualWidth) + "x" + std::to_string(actualHeight), __FILE__, __LINE__); } else { _logger.LogDebug("ANSWEBCAMPlayer::Setup", "Restored saved resolution: " + std::to_string(actualWidth) + "x" + std::to_string(actualHeight), __FILE__, __LINE__); } } // If resolution is not set or is default, find and set resolution based on mode else if ((_resWidth == 0) && (_resHeight == 0)) { if (_enableMaxResolution) { // Max resolution mode: pick the highest available resolution if (!SetBestResolution(_deviceId)) { _resWidth = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); _resHeight = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); _savedResWidth = _resWidth; _savedResHeight = _resHeight; _logger.LogWarn("ANSWEBCAMPlayer::Setup", "Using default camera resolution: " + std::to_string(_resWidth) + "x" + std::to_string(_resHeight), __FILE__, __LINE__); } } else { // Default mode: prefer Full HD, fall back to lower resolutions if (!SetPreferredResolution(_deviceId)) { _resWidth = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); _resHeight = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); _savedResWidth = _resWidth; _savedResHeight = _resHeight; _logger.LogWarn("ANSWEBCAMPlayer::Setup", "Using default camera resolution: " + std::to_string(_resWidth) + "x" + std::to_string(_resHeight), __FILE__, __LINE__); } } } else { // User has specified a resolution, try to set it cap.set(cv::CAP_PROP_FRAME_WIDTH, _resWidth); cap.set(cv::CAP_PROP_FRAME_HEIGHT, _resHeight); int actualWidth = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); int actualHeight = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); if (actualWidth != _resWidth || actualHeight != _resHeight) { _logger.LogWarn("ANSWEBCAMPlayer::Setup", "Requested resolution " + std::to_string(_resWidth) + "x" + std::to_string(_resHeight) + " not supported. Using: " + std::to_string(actualWidth) + "x" + std::to_string(actualHeight), __FILE__, __LINE__); _resWidth = actualWidth; _resHeight = actualHeight; } _savedResWidth = _resWidth; _savedResHeight = _resHeight; } // Set additional camera properties for better quality cap.set(cv::CAP_PROP_FPS, 30); // Try to set 30 FPS cap.set(cv::CAP_PROP_BUFFERSIZE, 1); // Reduce buffer size for lower latency _logger.LogDebug("ANSWEBCAMPlayer::Setup", "Camera setup complete. Resolution: " + std::to_string(_resWidth) + "x" + std::to_string(_resHeight), __FILE__, __LINE__); return true; } catch (std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::Setup:", e.what(), __FILE__, __LINE__); return false; } } bool ANSWEBCAMPlayer::Reconnect() { std::lock_guard lock(_mutex); try { if (cap.isOpened()) { cap.release(); } Setup(); _isPlaying = cap.isOpened() && cap.grab(); return _isPlaying; } catch (const std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::Reconnect. Exception:", e.what(), __FILE__, __LINE__); return false; } } bool ANSWEBCAMPlayer::Start() { std::lock_guard lock(_mutex); try { if (!cap.isOpened()) { // Reopen the camera (after Stop released it) cap.open(_deviceId, cv::CAP_DSHOW); if (!cap.isOpened()) { cap.open(_deviceId, cv::CAP_ANY); if (!cap.isOpened()) { _logger.LogError("ANSWEBCAMPlayer::Start", "Unable to open camera", __FILE__, __LINE__); return false; } } } // Restore saved resolution if (_savedResWidth > 0 && _savedResHeight > 0) { cap.set(cv::CAP_PROP_FRAME_WIDTH, _savedResWidth); cap.set(cv::CAP_PROP_FRAME_HEIGHT, _savedResHeight); int actualWidth = static_cast(cap.get(cv::CAP_PROP_FRAME_WIDTH)); int actualHeight = static_cast(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); _resWidth = actualWidth; _resHeight = actualHeight; _logger.LogDebug("ANSWEBCAMPlayer::Start", "Restored resolution: " + std::to_string(actualWidth) + "x" + std::to_string(actualHeight), __FILE__, __LINE__); } // Set camera properties cap.set(cv::CAP_PROP_FPS, 30); cap.set(cv::CAP_PROP_BUFFERSIZE, 1); _isPlaying = cap.isOpened() && cap.grab(); return _isPlaying; } catch (const std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::Start. Exception:", e.what(), __FILE__, __LINE__); return false; } } bool ANSWEBCAMPlayer::Stop() { std::lock_guard lock(_mutex); try { _isPlaying = false; // Save current resolution before releasing camera if (_resWidth > 0 && _resHeight > 0) { _savedResWidth = _resWidth; _savedResHeight = _resHeight; } // Actually release the camera so the device is freed if (cap.isOpened()) { cap.release(); } _logger.LogDebug("ANSWEBCAMPlayer::Stop", "Camera stopped. Saved resolution: " + std::to_string(_savedResWidth) + "x" + std::to_string(_savedResHeight), __FILE__, __LINE__); return true; } catch (const std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::Stop. Exception:", e.what(), __FILE__, __LINE__); return false; } } void ANSWEBCAMPlayer::SetBBox(cv::Rect bbox) { std::lock_guard lock(_mutex); _bbox = bbox; } void ANSWEBCAMPlayer::SetCrop(bool crop) { std::lock_guard lock(_mutex); _crop = crop; } bool ANSWEBCAMPlayer::IsPaused() { std::lock_guard lock(_mutex); return !cap.isOpened(); } bool ANSWEBCAMPlayer::IsPlaying() { std::lock_guard lock(_mutex); return cap.isOpened(); } bool ANSWEBCAMPlayer::IsRecording() { std::lock_guard lock(_mutex); return false;// do not support recording for webcam } //cv::Mat ANSWEBCAMPlayer::GetImage(int& width, int& height, int64_t& pts) { // std::lock_guard lock(_mutex); // if (!_isPlaying) { // if (!_previousImage.empty()) { // width = _previousImage.cols; // height = _previousImage.rows; // pts = _previousPTS; // return _previousImage; // Avoid unnecessary cloning // } // return cv::Mat(); // } // cv::Mat frame, result; // try { // if (!cap.isOpened()) { // if (!_previousImage.empty()) { // width = _previousImage.cols; // height = _previousImage.rows; // pts = _previousPTS; // return _previousImage; // Avoid unnecessary cloning // } // return cv::Mat(); // } // if (!cap.read(frame) || frame.empty()) { // return cv::Mat(); // } // if (_crop) { // // Ensure valid crop region // _bbox.x = max(0, _bbox.x); // _bbox.y = max(0, _bbox.y); // _bbox.width = min(_bbox.width, frame.cols - _bbox.x); // _bbox.height = min(_bbox.height, frame.rows - _bbox.y); // cv::Rect roi = _bbox; // Define roi after updating _bbox // if (roi.width > 0 && roi.height > 0) { // result = frame(roi); // } // else { // result = frame; // } // } // else { // result = frame; // } // if (_imageRotateDeg > 0) { // cv::Point2f center(result.cols / 2.0f, result.rows / 2.0f); // cv::Mat rotationMatrix = cv::getRotationMatrix2D(center, _imageRotateDeg, 1.0); // cv::warpAffine(result, result, rotationMatrix, result.size(), cv::INTER_CUBIC, cv::BORDER_CONSTANT, cv::Scalar()); // } // if (_previousPTS < INT64_MAX) { // _previousPTS++; // } // else { // _previousPTS = 0; // Reset to zero when max is reached // } // width = result.cols; // height = result.rows; // pts = _previousPTS; // _previousImage = result; // Store reference instead of clone // return _previousImage; // } // catch (const std::exception& e) { // this->_logger.LogError("ANSWEBCAMPlayer::GetImage. Exception occurred:", e.what(), __FILE__, __LINE__); // } // return cv::Mat(); //} cv::Mat ANSWEBCAMPlayer::GetImage(int& width, int& height, int64_t& pts) { std::lock_guard lock(_mutex); if (!_isPlaying) { if (!_previousImage.empty()) { width = _previousImage.cols; height = _previousImage.rows; pts = _previousPTS; return _previousImage; } return cv::Mat(); } try { if (!cap.isOpened()) { if (!_previousImage.empty()) { width = _previousImage.cols; height = _previousImage.rows; pts = _previousPTS; return _previousImage; } return cv::Mat(); } cv::Mat frame; if (!cap.read(frame) || frame.empty()) { // Return last good frame if available if (!_previousImage.empty()) { width = _previousImage.cols; height = _previousImage.rows; pts = _previousPTS; return _previousImage; } return cv::Mat(); } cv::Mat result; // Apply cropping if enabled if (_crop) { // Validate and clamp crop region _bbox.x = max(0, _bbox.x); _bbox.y = max(0, _bbox.y); _bbox.width = min(_bbox.width, frame.cols - _bbox.x); _bbox.height = min(_bbox.height, frame.rows - _bbox.y); if (_bbox.width > 0 && _bbox.height > 0) { // CRITICAL: Clone to avoid dangling reference result = frame(_bbox).clone(); } else { result = frame.clone(); } } else { result = frame.clone(); } // Apply rotation if specified if (_imageRotateDeg > 0) { // Fast path for 90-degree rotations if (std::abs(_imageRotateDeg - 90.0) < 0.01) { cv::rotate(result, result, cv::ROTATE_90_CLOCKWISE); } else if (std::abs(_imageRotateDeg - 180.0) < 0.01) { cv::rotate(result, result, cv::ROTATE_180); } else if (std::abs(_imageRotateDeg - 270.0) < 0.01) { cv::rotate(result, result, cv::ROTATE_90_COUNTERCLOCKWISE); } else { // Arbitrary angle rotation const cv::Point2f center(result.cols / 2.0f, result.rows / 2.0f); cv::Mat rotationMatrix = cv::getRotationMatrix2D(center, _imageRotateDeg, 1.0); cv::Mat rotated; // Use INTER_LINEAR instead of INTER_CUBIC (2-3x faster) cv::warpAffine(result, rotated, rotationMatrix, result.size(), cv::INTER_LINEAR, cv::BORDER_CONSTANT, cv::Scalar()); result = rotated; } } // Update PTS if (_previousPTS < INT64_MAX) { _previousPTS++; } else { _previousPTS = 0; } // Store full-res frame for inference (before display resize) _inferenceImage = result; // Resize for display if display resolution is set if (_displayWidth > 0 && _displayHeight > 0 && (result.cols != _displayWidth || result.rows != _displayHeight)) { cv::Mat displayResult; cv::resize(result, displayResult, cv::Size(_displayWidth, _displayHeight), 0, 0, cv::INTER_LINEAR); result = displayResult; } // Update cached frame _previousImage = result; // Set output parameters width = result.cols; height = result.rows; pts = _previousPTS; return result; } catch (const cv::Exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::GetImage. OpenCV exception:", e.what(), __FILE__, __LINE__); } catch (const std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::GetImage. Exception:", e.what(), __FILE__, __LINE__); } catch (...) { this->_logger.LogError("ANSWEBCAMPlayer::GetImage. Unknown exception", "", __FILE__, __LINE__); } return cv::Mat(); } cv::Mat ANSWEBCAMPlayer::GetInferenceImage() { std::lock_guard lock(_mutex); return _inferenceImage; } void ANSWEBCAMPlayer::SetDisplayResolution(int width, int height) { std::lock_guard lock(_mutex); _displayWidth = width; _displayHeight = height; } void ANSWEBCAMPlayer::EnableAudio(bool status) { // please support audio enable for webcam } void ANSWEBCAMPlayer::SetAudioVolume(int volume) { // support audio volumne } std::string ANSWEBCAMPlayer::VectorToCommaSeparatedString(const std::vector& inputVector) { std::string result; for (size_t i = 0; i < inputVector.size(); ++i) { result += inputVector[i]; if (i < inputVector.size() - 1) { result += ";"; } } return result; } void ANSWEBCAMPlayer::uploadPlanarBGRToGPU(const cv::Mat& inputMat, unsigned char** data) { // Refuse on non-NVIDIA — cudaMalloc/cudaMemcpy are NVIDIA-only. // The public entry point encodeMatToJpegWithNvJPEG() also guards, // but defense-in-depth in case a future caller wires this up directly. if (!anscv_vendor_gate::IsNvidiaGpuAvailable()) { this->_logger.LogWarn("ANSWEBCAMPlayer::uploadPlanarBGRToGPU", "skipped — non-NVIDIA hardware, nvJPEG path unavailable", __FILE__, __LINE__); if (data) *data = nullptr; return; } std::lock_guard lock(_mutex); try { int width = inputMat.cols; int height = inputMat.rows; size_t channelSize = 3 * width * height; // Allocate GPU memory for each channel CHECK_CUDAWEB(cudaMalloc(data, channelSize)); // Copy each channel to GPU CHECK_CUDAWEB(cudaMemcpy(*data, inputMat.data, channelSize, cudaMemcpyHostToDevice)); } catch (const std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::uploadPlanarBGRToGPU. Exception occurred:", e.what(), __FILE__, __LINE__); } } std::string ANSWEBCAMPlayer::encodeMatToJpegWithNvJPEG(const cv::Mat& inputMat, int quality) { // nvJPEG encoder is NVIDIA-only (part of CUDA toolkit). Refuse on // AMD/Intel/CPU and let the caller fall back to the turbojpeg path. if (!anscv_vendor_gate::IsNvidiaGpuAvailable()) { this->_logger.LogWarn("ANSWEBCAMPlayer::encodeMatToJpegWithNvJPEG", "nvJPEG requires NVIDIA GPU; falling back to last cached JPEG", __FILE__, __LINE__); return _lastJpegImage; } std::lock_guard lock(_mutex); try { // Image dimensions int width = inputMat.cols; int height = inputMat.rows; // Device memory for planar BGR unsigned char* data = nullptr; uploadPlanarBGRToGPU(inputMat, &data); CHECK_NVJPEGWEB(nvjpegEncoderParamsSetSamplingFactors(nv_enc_params, NVJPEG_CSS_444, stream)); // Set JPEG encoding parameters CHECK_NVJPEGWEB(nvjpegEncoderParamsSetQuality(nv_enc_params, quality, stream)); CHECK_NVJPEGWEB(nvjpegEncoderParamsSetOptimizedHuffman(nv_enc_params, 1, stream)); // Configure nvjpegImage_t for planar BGR nvjpegImage_t nv_image = {}; nv_image.channel[0] = data; nv_image.pitch[0] = 3 * width; // Red pitch // Compress the image using nvJPEG CHECK_NVJPEGWEB(nvjpegEncodeImage(nv_handle, nv_enc_state, nv_enc_params, &nv_image, NVJPEG_INPUT_BGRI, width, height, stream)); // Get compressed JPEG size size_t length; CHECK_NVJPEGWEB(nvjpegEncodeRetrieveBitstream(nv_handle, nv_enc_state, nullptr, &length, stream)); // Retrieve the JPEG bitstream std::vector jpegBuffer(length); CHECK_NVJPEGWEB(nvjpegEncodeRetrieveBitstream(nv_handle, nv_enc_state, jpegBuffer.data(), &length, stream)); // Synchronize CUDA stream CHECK_CUDAWEB(cudaStreamSynchronize(stream)); // convert to string std::string jpegString(reinterpret_cast(jpegBuffer.data()), length); // Cleanup CHECK_CUDAWEB(cudaFree(data)); return jpegString; } catch (const std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::encodeMatToJpegWithNvJPEG. Exception occurred:", e.what(), __FILE__, __LINE__); return _lastJpegImage; } } std::string ANSWEBCAMPlayer::encodeJpegString(const cv::Mat& img, int quality) { std::lock_guard lock(_mutex); if (!_isPlaying) return _lastJpegImage; try { unsigned char* jpegBuf = nullptr; if (!_jpegCompressor) { this->_logger.LogError("ANSWEBCAMPlayer::encodeJpegString. Failed to initialize TurboJPEG compressor.", tjGetErrorStr(), __FILE__, __LINE__); return _lastJpegImage; } int maxBufferSize = img.cols * img.rows * 3; jpegBuf = new unsigned char[maxBufferSize]; // Pre-allocated buffer long unsigned int jpegSize = maxBufferSize; // Size of the JPEG image (output) int subsamp = TJSAMP_444; // Chroma subsampling: TJSAMP_444, TJSAMP_422, TJSAMP_420, etc. int pixelFormat = img.channels() == 3 ? TJPF_BGR : TJPF_GRAY; // Pixel format based on channels try { // Compress the image into the pre-allocated buffer int result = tjCompress2(_jpegCompressor, img.data, img.cols, 0, img.rows, pixelFormat, &jpegBuf, &jpegSize, subsamp, quality, TJFLAG_FASTDCT); // Handle compression errors if (result != 0) { this->_logger.LogError("ANSWEBCAMPlayer::encodeJpegString. Compression error:", tjGetErrorStr(), __FILE__, __LINE__); if (jpegBuf) { tjFree(jpegBuf); // Free the buffer if allocated } return _lastJpegImage; } // Create a string from the JPEG buffer std::string jpegString(reinterpret_cast(jpegBuf), jpegSize); _lastJpegImage = jpegString; // Clean up resources tjFree(jpegBuf); return jpegString; } catch (const std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::encodeJpegString. Exception occurred:", e.what(), __FILE__, __LINE__); // Clean up resources if (jpegBuf) { tjFree(jpegBuf); // Free the buffer if allocated } return _lastJpegImage; } } catch (std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::encodeJpegString:", e.what(), __FILE__, __LINE__); // Clean up resources in case of an exception } // Return an empty string in case of failure return ""; } std::string ANSWEBCAMPlayer::MatToBinaryData(const cv::Mat& image) { std::lock_guard lock(_mutex); if (!_isPlaying) return _lastJpegImage; // Check if the image is empty or has invalid data if (image.empty() || !image.data || !image.u) { return _lastJpegImage; } try { // Encode the image to a memory buffer return encodeJpegString(image, 85); } catch (const std::exception& e) { this->_logger.LogFatal("ANSWEBCAMPlayer::MatToBinaryData. Exception occurred:", e.what(), __FILE__, __LINE__); } catch (...) { this->_logger.LogFatal("ANSWEBCAMPlayer::MatToBinaryData.", "Unknown exception occurred.", __FILE__, __LINE__); } // Return an empty string in case of failure return _lastJpegImage; } std::string ANSWEBCAMPlayer::GetJpegStringImage(int& width, int& height, int64_t& pts) { std::lock_guard lock(_mutex); if (!_isPlaying) return _lastJpegImage; try { cv::Mat image = GetImage(width, height, pts); std::string jpegString = MatToBinaryData(image); image.release(); return jpegString; } catch (const std::exception& e) { this->_logger.LogError("ANSWEBCAMPlayer::GetJpegStringImage. Exception occurred:", e.what(), __FILE__, __LINE__); return _lastJpegImage; } } } extern "C" __declspec(dllexport) int CreateANSWebcamPlayerHandle(ANSCENTER::ANSWEBCAMPlayer * *Handle, const char* licenseKey, const char* url) { if (!Handle || !licenseKey || !url) return -1; try { auto ptr = std::make_unique(); bool result = ptr->Init(licenseKey, url); if (result) { *Handle = ptr.release(); extern void anscv_unregister_handle(void*); extern void anscv_register_handle(void*, void(*)(void*)); anscv_register_handle(*Handle, [](void* p) { auto* h = static_cast(p); try { h->Stop(); } catch (...) {} try { h->Destroy(); } catch (...) {} try { delete h; } catch (...) {} }); return 1; } *Handle = nullptr; return 0; } catch (...) { return 0; } } extern "C" __declspec(dllexport) int CreateANSWebcamPlayerWithMaxResoHandle(ANSCENTER::ANSWEBCAMPlayer * *Handle, const char* licenseKey, const char* url) { if (!Handle || !licenseKey || !url) return -1; try { auto ptr = std::make_unique(); ptr->SetEnableMaxResolution(true); bool result = ptr->Init(licenseKey, url); if (result) { *Handle = ptr.release(); extern void anscv_unregister_handle(void*); extern void anscv_register_handle(void*, void(*)(void*)); anscv_register_handle(*Handle, [](void* p) { auto* h = static_cast(p); try { h->Stop(); } catch (...) {} try { h->Destroy(); } catch (...) {} try { delete h; } catch (...) {} }); return 1; } *Handle = nullptr; return 0; } catch (const std::exception& e) { std::cerr << "CreateANSWebcamPlayerWithMaxResoHandle - Exception: " << e.what() << std::endl; if (Handle) *Handle = nullptr; return 0; } catch (...) { if (Handle) *Handle = nullptr; return 0; } } extern "C" __declspec(dllexport) int ReleaseANSWebcamPlayerHandle(ANSCENTER::ANSWEBCAMPlayer * *Handle) { if (Handle == nullptr || *Handle == nullptr) return -1; try { extern void anscv_unregister_handle(void*); anscv_unregister_handle(*Handle); // Destructor calls Destroy() — no need to call it explicitly (avoids double-destroy) std::unique_ptr ptr(*Handle); *Handle = nullptr; return 0; } catch (...) { if (Handle) *Handle = nullptr; return 0; } } extern "C" __declspec(dllexport) int ScanStrANSWebcamPlayer(std::vector &cameraNameList) { try { cameraNameList = ANSCENTER::ANSWEBCAMPlayer::ScanWebcamDevices(); int result = 0; if (cameraNameList.empty()) { if (cameraNameList.size() > 0) return 1; } return result; } catch (std::exception& e) { std::cout << "ScanStrANSWebcamPlayer - Exception: " << e.what() << std::endl; return 0; } } extern "C" __declspec(dllexport) int ScanANSWebcamPlayer(LStrHandle cameraNames) { try { std::vector cameraNameList = ANSCENTER::ANSWEBCAMPlayer::ScanWebcamDevices(); std::string st = ANSCENTER::ANSWEBCAMPlayer::VectorToCommaSeparatedString(cameraNameList); int size = st.length(); if (size > 0) { MgErr error; error = DSSetHandleSize(cameraNames, sizeof(int32) + size * sizeof(uChar)); if (error != mFullErr && error != mZoneErr) { (*cameraNames)->cnt = size; memcpy((*cameraNames)->str, st.c_str(), size); return 1; } else return 0; } else return 0; } catch (std::exception& e) { std::cout << "ScanANSWebcamPlayer - Exception: " << e.what() << std::endl; return 0; } } extern "C" __declspec(dllexport) int GetWebcamStrImage(ANSCENTER::ANSWEBCAMPlayer * *Handle, int& width, int& height, int64_t & timeStamp, std::string & jpegImage) { if (Handle == nullptr || *Handle == nullptr) return -1; try { jpegImage = (*Handle)->GetJpegStringImage(width, height, timeStamp); if (!jpegImage.empty()) return 1; else return 0; } catch (std::exception& e) { std::cout << "GetWebcamStrImage - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int GetWebcamImage(ANSCENTER::ANSWEBCAMPlayer * *Handle, int& width, int& height, int64_t & timeStamp, LStrHandle jpegImage) { if (Handle == nullptr || *Handle == nullptr) return -1; try { std::string jpegString = (*Handle)->GetJpegStringImage(width, height, timeStamp); int size = jpegString.length(); if (size > 0) { MgErr error; error = DSSetHandleSize(jpegImage, sizeof(int32) + size * sizeof(uChar)); if (error != mFullErr && error != mZoneErr) { (*jpegImage)->cnt = size; memcpy((*jpegImage)->str, jpegString.c_str(), size); return 1; } else return 0; } else return 0; } catch (std::exception& e) { std::cout << "GetWebcamImage - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } //extern "C" __declspec(dllexport) int GetWebcamCVImage(ANSCENTER::ANSWEBCAMPlayer** Handle, int& width, int& height, int64_t& timeStamp, cv::Mat** image) { // if (!Handle || !(*Handle) || !image) { // std::cerr << "Error: Invalid input parameters in GetRTSPCVImage." << std::endl; // return -1; // Error code for invalid parameters // } // try { // cv::Mat img = (*Handle)->GetImage(width, height, timeStamp); // std::lock_guard lock(webcamMutex); // Ensures thread safety // if (img.empty()) { // return 0; // No valid image retrieved // } // // If an image is already allocated, release it // if (*image) { // delete* image; // *image = nullptr; // } // // // Allocate new image // *image = new cv::Mat(std::move(img)); // Move constructor avoids deep copy // // return 1; // Success // } // catch (const std::exception& e) { // std::cerr << "Exception in GetRTSPCVImage: " << e.what() << std::endl; // return -2; // Error code for exceptions // } // catch (...) { // std::cerr << "Exception in GetRTSPCVImage: Unknown exception." << std::endl; // return -2; // Generic error code for exceptions // } //} extern "C" __declspec(dllexport) int GetWebcamCVImage( ANSCENTER::ANSWEBCAMPlayer** Handle, int& width, int& height, int64_t& timeStamp, cv::Mat** image) { // Validate input parameters if (!Handle || !*Handle || !image) { std::cerr << "Error: Invalid input parameters in GetWebcamCVImage" << std::endl; return -1; } try { // Get image (display-res if SetDisplayResolution was called, otherwise original) cv::Mat img = (*Handle)->GetImage(width, height, timeStamp); if (img.empty()) { return 0; // No valid image retrieved } // anscv_mat_replace has its own internal registry_mutex — no global mutex needed anscv_mat_replace(image, std::move(img)); // Attach full-res BGR frame to ANSGpuFrameRegistry for inference fast-path. // ANSRTYOLO::DetectObjects will find this via gpu_frame_lookup and use the // full-res image for inference instead of the display-res input. cv::Mat infImg = (*Handle)->GetInferenceImage(); if (!infImg.empty() && infImg.data != (*image)->data) { (*Handle)->_inferenceClonePrev = (*Handle)->_inferenceCloneCurr; (*Handle)->_inferenceCloneCurr = infImg.clone(); GpuFrameData data{}; data.avframe = nullptr; data.yPlane = (*Handle)->_inferenceCloneCurr.data; data.uvPlane = nullptr; data.yLinesize = static_cast((*Handle)->_inferenceCloneCurr.step[0]); data.uvLinesize = 0; data.width = (*Handle)->_inferenceCloneCurr.cols; data.height = (*Handle)->_inferenceCloneCurr.rows; data.pixelFormat = ANSCV_PIX_FMT_BGR24; data.gpuIndex = -1; data.pts = timeStamp; data.isCudaDevicePtr = false; data.cpuAvframe = nullptr; data.cpuYPlane = nullptr; data.cpuUvPlane = nullptr; data.cpuYLinesize = 0; data.cpuUvLinesize = 0; ANSGpuFrameRegistry::instance().attach(*image, std::move(data)); auto pending = ANSGpuFrameRegistry::instance().drain_pending(); for (void* p : pending) { // pending only contains AVFrame* from RTSP/RTMP/SRT/FLV entries (our entries have avframe=nullptr) auto* stale = static_cast(p); av_frame_free(&stale); } } return 1; // Success } catch (const cv::Exception& e) { std::cerr << "OpenCV exception in GetWebcamCVImage: " << e.what() << std::endl; return -2; } catch (const std::exception& e) { std::cerr << "Exception in GetWebcamCVImage: " << e.what() << std::endl; return -2; } catch (...) { std::cerr << "Unknown exception in GetWebcamCVImage" << std::endl; return -2; } } extern "C" __declspec(dllexport) int StartWebcamPlayer(ANSCENTER::ANSWEBCAMPlayer * *Handle) { if (Handle == nullptr || *Handle == nullptr) return -1; try { bool result = (*Handle)->Start(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "StartWebcamPlayer - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int ReconnectWebcamPlayer(ANSCENTER::ANSWEBCAMPlayer * *Handle) { if (Handle == nullptr || *Handle == nullptr) return -1; try { bool result = (*Handle)->Reconnect(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "ReconnectWebcamPlayer - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int StopWebcamPlayer(ANSCENTER::ANSWEBCAMPlayer * *Handle) { if (Handle == nullptr || *Handle == nullptr) return -1; try { bool result = (*Handle)->Stop(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "StopWebcamPlayer - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int IsWebcamPlayerPaused(ANSCENTER::ANSWEBCAMPlayer * *Handle) { if (Handle == nullptr || *Handle == nullptr) return -1; try { bool result = (*Handle)->IsPaused(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "IsWebcamPlayerPaused - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int IsWebcamPlayerRunning(ANSCENTER::ANSWEBCAMPlayer * *Handle) { if (Handle == nullptr || *Handle == nullptr) return -1; try { bool result = (*Handle)->IsPlaying(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "IsWebcamPlayerRunning - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int IsWebcamPlayerRecording(ANSCENTER::ANSWEBCAMPlayer * *Handle) { if (Handle == nullptr || *Handle == nullptr) return -1; try { bool result = (*Handle)->IsRecording(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "IsWebcamPlayerRecording - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) void SetWebcamPlayerAudioVolume(ANSCENTER::ANSWEBCAMPlayer * *Handle, int volume) { if (Handle == nullptr || *Handle == nullptr) return; try { (*Handle)->SetAudioVolume(volume); } catch (...) { } } extern "C" __declspec(dllexport) void EnableWebcamPlayerAudioVolume(ANSCENTER::ANSWEBCAMPlayer * *Handle, int status) { if (Handle == nullptr || *Handle == nullptr) return; try { bool audioStatus = false; if (status == 1)audioStatus = true; (*Handle)->EnableAudio(audioStatus); } catch (std::exception& e) { std::cout << "EnableWebcamPlayerAudioVolume - Exception: " << e.what() << std::endl; } catch (...) { } } extern "C" __declspec(dllexport) void SetWebcamImageRotation(ANSCENTER::ANSWEBCAMPlayer * *Handle, double rotationAngle) { if (Handle == nullptr || *Handle == nullptr) return; try { (*Handle)->SetImageRotate(rotationAngle); } catch (...) { } } extern "C" __declspec(dllexport) int ScanSupportedResolutions(std::string cameraName, std::vector cameraNameList, std::string & supportedResolution) { try { for (size_t i = 0; i < cameraNameList.size(); ++i) { if (cameraNameList.at(i) == cameraName) { supportedResolution = ANSCENTER::ANSWEBCAMPlayer::GetSupportedResolutions(i); if (!supportedResolution.empty()) return 1; else return 0; } } return 0; } catch (std::exception& e) { std::cout << "ScanSupportedResolutions - Exception: " << e.what() << std::endl; return 0; } catch (...) { return 0; } } extern "C" __declspec(dllexport) int SetBBoxANSWebcamPlayer(ANSCENTER::ANSWEBCAMPlayer** Handle, int x, int y, int width, int height) { if (Handle == nullptr || *Handle == nullptr) return -1; try { cv::Rect bbox(x, y, width, height); (*Handle)->SetBBox(bbox); return 1; } catch (std::exception& e) { std::cout << "SetBBoxANSWebcamPlayer - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int SetCropFlagANSWebcamPlayer(ANSCENTER::ANSWEBCAMPlayer** Handle, int cropFlag) { if (Handle == nullptr || *Handle == nullptr) return -1; try { bool crop = false; if (cropFlag == 1)crop = true; (*Handle)->SetCrop(crop); return 1; } catch (std::exception& e) { std::cout << "SetCropFlagANSWebcamPlayer - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) void SetWebcamDisplayResolution(ANSCENTER::ANSWEBCAMPlayer** Handle, int width, int height) { if (Handle == nullptr || *Handle == nullptr) return; try { (*Handle)->SetDisplayResolution(width, height); } catch (...) { } } // ============================================================================ // V2 entry points — accept uint64_t handle by value (LabVIEW safe) // ============================================================================ extern "C" __declspec(dllexport) int GetWebcamImage_V2(uint64_t handleVal, int& width, int& height, int64_t& timeStamp, LStrHandle jpegImage) { auto* h = reinterpret_cast(handleVal); if (!h) return -1; try { std::string jpegString = h->GetJpegStringImage(width, height, timeStamp); int size = jpegString.length(); if (size > 0) { MgErr error; error = DSSetHandleSize(jpegImage, sizeof(int32) + size * sizeof(uChar)); if (error != mFullErr && error != mZoneErr) { (*jpegImage)->cnt = size; memcpy((*jpegImage)->str, jpegString.c_str(), size); return 1; } else return 0; } else return 0; } catch (std::exception& e) { std::cout << "GetWebcamImage_V2 - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int GetWebcamCVImage_V2( uint64_t handleVal, int& width, int& height, int64_t& timeStamp, cv::Mat** image) { auto* h = reinterpret_cast(handleVal); if (!h || !image) return -1; try { cv::Mat img = h->GetImage(width, height, timeStamp); if (img.empty()) { return 0; } anscv_mat_replace(image, std::move(img)); // Attach full-res BGR frame to ANSGpuFrameRegistry for inference fast-path cv::Mat infImg = h->GetInferenceImage(); if (!infImg.empty() && infImg.data != (*image)->data) { h->_inferenceClonePrev = h->_inferenceCloneCurr; h->_inferenceCloneCurr = infImg.clone(); GpuFrameData data{}; data.avframe = nullptr; data.yPlane = h->_inferenceCloneCurr.data; data.uvPlane = nullptr; data.yLinesize = static_cast(h->_inferenceCloneCurr.step[0]); data.uvLinesize = 0; data.width = h->_inferenceCloneCurr.cols; data.height = h->_inferenceCloneCurr.rows; data.pixelFormat = ANSCV_PIX_FMT_BGR24; data.gpuIndex = -1; data.pts = timeStamp; data.isCudaDevicePtr = false; data.cpuAvframe = nullptr; data.cpuYPlane = nullptr; data.cpuUvPlane = nullptr; data.cpuYLinesize = 0; data.cpuUvLinesize = 0; ANSGpuFrameRegistry::instance().attach(*image, std::move(data)); auto pending = ANSGpuFrameRegistry::instance().drain_pending(); for (void* p : pending) { auto* stale = static_cast(p); av_frame_free(&stale); } } return 1; } catch (const cv::Exception& e) { std::cerr << "OpenCV exception in GetWebcamCVImage_V2: " << e.what() << std::endl; return -2; } catch (const std::exception& e) { std::cerr << "Exception in GetWebcamCVImage_V2: " << e.what() << std::endl; return -2; } catch (...) { std::cerr << "Unknown exception in GetWebcamCVImage_V2" << std::endl; return -2; } } extern "C" __declspec(dllexport) int StartWebcamPlayer_V2(uint64_t handleVal) { auto* h = reinterpret_cast(handleVal); if (!h) return -1; try { bool result = h->Start(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "StartWebcamPlayer_V2 - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int ReconnectWebcamPlayer_V2(uint64_t handleVal) { auto* h = reinterpret_cast(handleVal); if (!h) return -1; try { bool result = h->Reconnect(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "ReconnectWebcamPlayer_V2 - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int StopWebcamPlayer_V2(uint64_t handleVal) { auto* h = reinterpret_cast(handleVal); if (!h) return -1; try { bool result = h->Stop(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "StopWebcamPlayer_V2 - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int IsWebcamPlayerPaused_V2(uint64_t handleVal) { auto* h = reinterpret_cast(handleVal); if (!h) return -1; try { bool result = h->IsPaused(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "IsWebcamPlayerPaused_V2 - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int IsWebcamPlayerRunning_V2(uint64_t handleVal) { auto* h = reinterpret_cast(handleVal); if (!h) return -1; try { bool result = h->IsPlaying(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "IsWebcamPlayerRunning_V2 - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int IsWebcamPlayerRecording_V2(uint64_t handleVal) { auto* h = reinterpret_cast(handleVal); if (!h) return -1; try { bool result = h->IsRecording(); if (result) return 1; else return 0; } catch (std::exception& e) { std::cout << "IsWebcamPlayerRecording_V2 - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) void SetWebcamPlayerAudioVolume_V2(uint64_t handleVal, int volume) { auto* h = reinterpret_cast(handleVal); if (!h) return; try { h->SetAudioVolume(volume); } catch (...) { } } extern "C" __declspec(dllexport) void EnableWebcamPlayerAudioVolume_V2(uint64_t handleVal, int status) { auto* h = reinterpret_cast(handleVal); if (!h) return; try { bool audioStatus = false; if (status == 1) audioStatus = true; h->EnableAudio(audioStatus); } catch (std::exception& e) { std::cout << "EnableWebcamPlayerAudioVolume_V2 - Exception: " << e.what() << std::endl; } catch (...) { } } extern "C" __declspec(dllexport) void SetWebcamImageRotation_V2(uint64_t handleVal, double rotationAngle) { auto* h = reinterpret_cast(handleVal); if (!h) return; try { h->SetImageRotate(rotationAngle); } catch (...) { } } extern "C" __declspec(dllexport) int SetBBoxANSWebcamPlayer_V2(uint64_t handleVal, int x, int y, int width, int height) { auto* h = reinterpret_cast(handleVal); if (!h) return -1; try { cv::Rect bbox(x, y, width, height); h->SetBBox(bbox); return 1; } catch (std::exception& e) { std::cout << "SetBBoxANSWebcamPlayer_V2 - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } } extern "C" __declspec(dllexport) int SetCropFlagANSWebcamPlayer_V2(uint64_t handleVal, int cropFlag) { auto* h = reinterpret_cast(handleVal); if (!h) return -1; try { bool crop = false; if (cropFlag == 1) crop = true; h->SetCrop(crop); return 1; } catch (std::exception& e) { std::cout << "SetCropFlagANSWebcamPlayer_V2 - Exception: " << e.what() << std::endl; return -1; } catch (...) { return -1; } }