Files
ANSCORE/modules/ANSCV/ANSVideoPlayer.cpp

1184 lines
38 KiB
C++
Raw Normal View History

2026-03-28 16:54:11 +11:00
#ifndef NOMINMAX
#define NOMINMAX // Prevent Windows min/max macros from conflicting with std::min/std::max
#endif
#include "ANSVideoPlayer.h"
#include "ANSMatRegistry.h"
#include "ANSGpuFrameRegistry.h"
#include "ANSGpuFrameOps.h" // gpu_frame_attach, gpu_frame_attach_cuda
#include <memory>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <libavutil/frame.h>
#include "media_codec.h"
// Custom pixel format for BGR full-res Mat entries in ANSGpuFrameRegistry.
// ANSRTYOLO checks this to use the full-res BGR image instead of the display-res input.
static constexpr int ANSCV_PIX_FMT_BGR24 = 1000;
static bool ansvideoplayerLicenceValid = false;
// Global once_flag to protect license checking
static std::once_flag ansvideoplayerLicenseOnceFlag;
namespace ANSCENTER {
// other class
ANSVIDEOPLAYER::ANSVIDEOPLAYER()
{
_resWidth = 0;
_resHeight = 0;
m_bPaused = false;
_crop = false;
_isPlaying = false;
_lastJpegImage = "";
_bbox = cv::Rect(0, 0, 0, 0);
_jpegCompressor = nullptr;
}
ANSVIDEOPLAYER::~ANSVIDEOPLAYER() noexcept {
try {
Destroy();
}
catch (...) {}
}
void ANSVIDEOPLAYER::Destroy() {
// Move HW player out of lock scope — close() does CUDA cleanup
// (cuArrayDestroy/cuMemFree) which must not run under _mutex
// to avoid deadlocking with nvcuda64 SRW lock held by inference.
decltype(_hwPlayer) hwPlayerToClose;
{
std::lock_guard<std::recursive_mutex> lock(_mutex);
try {
if (_hwPlayer) {
try { _hwPlayer->stop(); } catch (...) {}
}
hwPlayerToClose = std::move(_hwPlayer);
_hwDecodeActive = false;
_hwGpuIndex = -1;
_hwCudaAccel = false;
_hwEOF = false;
_hwFrameCount = 0;
2026-03-28 16:54:11 +11:00
// --- cv::VideoCapture cleanup ---
_previousImage.release();
_inferenceImage.release();
_inferenceCloneCurr.release();
_inferenceClonePrev.release();
_lastJpegImage = "";
_isPlaying = false;
_resWidth = 0;
_resHeight = 0;
_currentFrame = 0;
_previousPTS = 0;
2026-03-28 16:54:11 +11:00
if (cap.isOpened()) {
cap.release();
}
}
catch (const std::exception& e) {
_logger.LogError("ANSVIDEOPLAYER::Destroy. Exception:", e.what(), __FILE__, __LINE__);
}
catch (...) {
_logger.LogError("ANSVIDEOPLAYER::Destroy.", "Unknown exception", __FILE__, __LINE__);
}
} // end lock scope
// CUDA cleanup happens here, outside the mutex
if (hwPlayerToClose) {
try { hwPlayerToClose->close(); } catch (...) {}
hwPlayerToClose.reset();
}
2026-03-28 16:54:11 +11:00
}
static void VerifyGlobalANSVPLicense(const std::string& licenseKey) {
try {
ansvideoplayerLicenceValid = ANSCENTER::ANSLicenseHelper::LicenseVerification(licenseKey, 1007, "ANSCV");//Default productId=1005
if (!ansvideoplayerLicenceValid) { // we also support ANSTS license
ansvideoplayerLicenceValid = ANSCENTER::ANSLicenseHelper::LicenseVerification(licenseKey, 1003, "ANSVIS");//Default productId=1003 (ANSVIS)
}
if (!ansvideoplayerLicenceValid) { // we also support ANSTS license
ansvideoplayerLicenceValid = ANSCENTER::ANSLicenseHelper::LicenseVerification(licenseKey, 1008, "ANSTS");//Default productId=1008 (ANSTS)
}
}
catch (std::exception& e) {
ansvideoplayerLicenceValid = false;
}
}
void ANSVIDEOPLAYER::CheckLicense() {
std::lock_guard<std::recursive_mutex> lock(_mutex);
try {
// Check once globally
std::call_once(ansvideoplayerLicenseOnceFlag, [this]() {
VerifyGlobalANSVPLicense(_licenseKey);
});
// Update this instance's local license flag
_licenseValid = ansvideoplayerLicenceValid;
}
catch (const std::exception& e) {
this->_logger.LogFatal("ANSVIDEOPLAYER::CheckLicense. Error:", e.what(), __FILE__, __LINE__);
}
}
bool ANSVIDEOPLAYER::Init(std::string licenseKey, std::string url) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
_licenseKey = licenseKey;
CheckLicense();
if (!_licenseValid) {
_logger.LogError("ANSVIDEOPLAYER::Init.", "Invalid license", __FILE__, __LINE__);
return false;
}
_url = url;
return Setup();
}
bool ANSVIDEOPLAYER::Setup() {
std::lock_guard<std::recursive_mutex> lock(_mutex);
_currentFrame = 0;
// --- Try GPU hardware decode via CFilePlayer ---
if (!_hwDecodeActive && !_hwPlayer) {
try {
auto hwp = std::make_unique<CFilePlayer>();
hwp->setHWDecoding(HW_DECODING_AUTO); // CUDA → D3D11VA → DXVA2 → software
if (hwp->open(_url)) {
_hwPlayer = std::move(hwp);
_hwDecodeActive = true;
_hwEOF = false;
_hwFrameCount = 0;
// GPU index and resolution resolved lazily in GetImage/GetVideoPlayerCVImage
// because CFilePlayer's decode threads haven't started yet (play() not called)
_hwGpuIndex = -1;
_hwCudaAccel = false;
_logger.LogInfo("ANSVIDEOPLAYER::Setup",
"HW decode opened, ready for play()", __FILE__, __LINE__);
return true;
}
}
catch (const std::exception& e) {
_logger.LogInfo("ANSVIDEOPLAYER::Setup",
"HW decode init failed (" + std::string(e.what()) + "), using cv::VideoCapture",
__FILE__, __LINE__);
}
catch (...) {
_logger.LogInfo("ANSVIDEOPLAYER::Setup",
"HW decode init failed, using cv::VideoCapture", __FILE__, __LINE__);
}
// Reset on failure
if (_hwPlayer) {
try { _hwPlayer->stop(); _hwPlayer->close(); } catch (...) {}
_hwPlayer.reset();
}
_hwDecodeActive = false;
_hwGpuIndex = -1;
_hwCudaAccel = false;
}
if (_hwDecodeActive) return true;
// --- Fallback: cv::VideoCapture (CPU software decode) ---
if (cap.isOpened()) return true;
cap.open(_url, cv::CAP_FFMPEG); // Use FFMPEG for better codec support
if (!cap.isOpened()) {
cap.open(_url, cv::CAP_ANY);
if (!cap.isOpened())return false;
}
try {
_resWidth = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_WIDTH));
_resHeight = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_HEIGHT));
_totalFrames = static_cast<int64_t>(cap.get(cv::CAP_PROP_FRAME_COUNT));
_fps = cap.get(cv::CAP_PROP_FPS);
cap.set(cv::CAP_PROP_POS_FRAMES, _currentFrame);
return true;
}
catch (std::exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::Setup:", e.what(), __FILE__, __LINE__);
return false;
}
}
bool ANSVIDEOPLAYER::Reconnect() {
// HW decoder close() does CUDA cleanup — run outside _mutex
// to avoid deadlocking with nvcuda64 SRW lock held by inference.
decltype(_hwPlayer) hwPlayerToClose;
{
std::lock_guard<std::recursive_mutex> lock(_mutex);
_isPlaying = false; // GetImage() returns cached frame while we reconnect
if (_hwPlayer) {
try { _hwPlayer->stop(); } catch (...) {}
hwPlayerToClose = std::move(_hwPlayer);
}
}
if (hwPlayerToClose) {
try { hwPlayerToClose->close(); } catch (...) {}
hwPlayerToClose.reset();
}
2026-03-28 16:54:11 +11:00
std::lock_guard<std::recursive_mutex> lock(_mutex);
try {
_currentFrame = 0;
_hwDecodeActive = false;
_hwGpuIndex = -1;
_hwCudaAccel = false;
_hwEOF = false;
_hwFrameCount = 0;
if (cap.isOpened()) {
cap.release();
}
if (!Setup()) return false;
if (_hwDecodeActive) {
return Start();
}
_isPlaying = cap.isOpened() && cap.grab();
return _isPlaying;
}
catch (const std::exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::Reconnect. Exception occurred:", e.what(), __FILE__, __LINE__);
_currentFrame = 0;
return false;
}
}
bool ANSVIDEOPLAYER::Start() {
std::lock_guard<std::recursive_mutex> lock(_mutex);
try {
// --- HW decode path ---
if (_hwDecodeActive && _hwPlayer) {
_hwPlayer->play(); // starts read/video/audio threads
_hwEOF = false;
_hwFrameCount = 0;
_isPlaying = true;
// Wait for first frame outside the mutex to let decode threads run
// Resolution and GPU index will be resolved lazily in GetImage()
_logger.LogInfo("ANSVIDEOPLAYER::Start",
"HW decode play() called, threads started", __FILE__, __LINE__);
return true;
}
// --- cv::VideoCapture fallback ---
if (!cap.isOpened()) {
cap.open(_url, cv::CAP_FFMPEG);
if (!cap.isOpened()) {
cap.open(_url, cv::CAP_ANY);
}
if (!cap.isOpened()) {
this->_logger.LogError("ANSVIDEOPLAYER::Start. Exception occurred:", "Failed to open video source: ", __FILE__, __LINE__);
return false;
}
try {
// Always seek to the beginning of the video
if (!cap.set(cv::CAP_PROP_POS_FRAMES, 0)) {
this->_logger.LogError("ANSVIDEOPLAYER::Start. Exception occurred:", "Warning: Unable to seek to frame", __FILE__, __LINE__);
}
}
catch (const std::exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::Start. Exception occurred:", e.what(), __FILE__, __LINE__);
}
}
_isPlaying = cap.isOpened() && cap.grab();
return _isPlaying;
}
catch (const std::exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::Start. Exception occurred:", e.what(), __FILE__, __LINE__);
return false;
}
}
bool ANSVIDEOPLAYER::Stop() {
decltype(_hwPlayer.get()) hwPlayer = nullptr;
{
std::lock_guard<std::recursive_mutex> lock(_mutex);
try {
// --- HW decode path ---
if (_hwDecodeActive && _hwPlayer) {
_isPlaying = false;
hwPlayer = _hwPlayer.get();
// stop() called outside the lock below; skip cap path
2026-03-28 16:54:11 +11:00
}
else {
// --- cv::VideoCapture fallback ---
if (cap.isOpened()) {
try {
double frame_pos = cap.get(cv::CAP_PROP_POS_FRAMES);
if (frame_pos >= 0) {
_currentFrame = static_cast<int64_t>(frame_pos);
}
else {
_currentFrame = 0;
this->_logger.LogError("ANSVIDEOPLAYER::Stop. Exception occurred:", "Unable to retrieve current frame position", __FILE__, __LINE__);
}
}
catch (const std::exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::Stop. Exception occurred:", e.what(), __FILE__, __LINE__);
_currentFrame = 0;
}
cap.release();
}
_isPlaying = false;
return true;
2026-03-28 16:54:11 +11:00
}
}
catch (const std::exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::Stop. Exception occurred:", e.what(), __FILE__, __LINE__);
return false;
}
2026-03-28 16:54:11 +11:00
}
if (hwPlayer) {
hwPlayer->stop();
2026-03-28 16:54:11 +11:00
}
return true;
2026-03-28 16:54:11 +11:00
}
void ANSVIDEOPLAYER::SetBBox(cv::Rect bbox) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
_bbox = bbox;
}
void ANSVIDEOPLAYER::SetCrop(bool crop) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
_crop = crop;
}
bool ANSVIDEOPLAYER::IsPaused() {
std::lock_guard<std::recursive_mutex> lock(_mutex);
return !cap.isOpened();
}
bool ANSVIDEOPLAYER::IsPlaying() {
std::lock_guard<std::recursive_mutex> lock(_mutex);
return cap.isOpened();
}
bool ANSVIDEOPLAYER::IsRecording() {
std::lock_guard<std::recursive_mutex> lock(_mutex);
return false;// do not support recording for webcam
}
//cv::Mat ANSVIDEOPLAYER::GetImage(int& width, int& height, int64_t& pts) {
// std::lock_guard<std::recursive_mutex> lock(_mutex);
// if (!_isPlaying) {
// if (!_previousImage.empty()) {
// width = _previousImage.cols;
// height = _previousImage.rows;
// pts = _previousPTS;
// return _previousImage; // Avoid unnecessary cloning
// }
// return cv::Mat();
// }
// // Check if the final frame is reached
// _currentFrame = static_cast<int64_t>(cap.get(cv::CAP_PROP_POS_FRAMES));
// if (_currentFrame >= _totalFrames - 1) {
// if (_resHeight <= 0 || _resWidth <= 0) {
// _resHeight = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_HEIGHT));
// _resWidth = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_WIDTH));
// }
// _previousImage = cv::Mat(_resHeight, _resWidth, CV_8UC3, cv::Scalar(0, 0, 0));
// width = _previousImage.cols;
// height = _previousImage.rows;
// if (_previousPTS < std::numeric_limits<int64_t>::max()) {
// _previousPTS++;
// }
// else {
// _previousPTS = 0;
// }
// pts = _previousPTS;
// return _previousImage;
// }
// cv::Mat frame, result;
// try {
// if (!cap.isOpened()) {
// if (!_previousImage.empty()) {
// width = _previousImage.cols;
// height = _previousImage.rows;
// pts = _previousPTS;
// return _previousImage;
// }
// return cv::Mat();
// }
// if (!cap.read(frame) || frame.empty()) {
// return cv::Mat();
// }
// if (_crop) {
// // Ensure valid crop region
// _bbox.x = std::max(0, _bbox.x);
// _bbox.y = std::max(0, _bbox.y);
// _bbox.width = std::min(_bbox.width, frame.cols - _bbox.x);
// _bbox.height = std::min(_bbox.height, frame.rows - _bbox.y);
// cv::Rect roi = _bbox; // Define roi after updating _bbox
// if (roi.width > 0 && roi.height > 0) {
// result = frame(roi);
// }
// else {
// result = frame;
// }
// }
// else {
// result = frame;
// }
// if (_imageRotateDeg > 0) {
// cv::Point2f center(result.cols / 2.0f, result.rows / 2.0f);
// cv::Mat rotationMatrix = cv::getRotationMatrix2D(center, _imageRotateDeg, 1.0);
// cv::warpAffine(result, result, rotationMatrix, result.size(), cv::INTER_CUBIC, cv::BORDER_CONSTANT, cv::Scalar());
// }
// if (_previousPTS < INT64_MAX) {
// _previousPTS++;
// }
// else {
// _previousPTS = 0; // Reset to zero when max is reached
// }
// width = result.cols;
// height = result.rows;
// pts = _previousPTS;
// _previousImage = result; // Store reference instead of clone
// return _previousImage; // Return reference instead of new allocation
// }
// catch (const std::exception& e) {
// this->_logger.LogError("ANSVIDEOPLAYER::GetImage. Exception occurred:", e.what(), __FILE__, __LINE__);
// }
// return cv::Mat();
//}
cv::Mat ANSVIDEOPLAYER::GetImage(int& width, int& height, int64_t& pts) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
if (!_isPlaying) {
if (!_previousImage.empty()) {
width = _previousImage.cols;
height = _previousImage.rows;
pts = _previousPTS;
return _previousImage;
}
return cv::Mat();
}
// =====================================================================
// HW decode path (CFilePlayer)
// =====================================================================
if (_hwDecodeActive && _hwPlayer) {
// EOF: return black frame (same behavior as cv::VideoCapture path)
if (_hwEOF) {
if (_resHeight <= 0 || _resWidth <= 0) { _resHeight = 1080; _resWidth = 1920; }
_previousImage = cv::Mat(_resHeight, _resWidth, CV_8UC3, cv::Scalar(0, 0, 0));
width = _previousImage.cols;
height = _previousImage.rows;
if (_previousPTS < std::numeric_limits<int64_t>::max()) _previousPTS++;
else _previousPTS = 0;
pts = _previousPTS;
return _previousImage;
}
try {
int imgW = 0, imgH = 0;
int64_t imgPts = 0;
cv::Mat frame = _hwPlayer->getImage(imgW, imgH, imgPts);
if (frame.empty()) {
// CFilePlayer may have stopped or no frame ready
if (!_previousImage.empty()) {
width = _previousImage.cols;
height = _previousImage.rows;
pts = _previousPTS;
return _previousImage;
}
return cv::Mat();
}
_hwFrameCount++;
// Lazy init: resolve resolution and GPU index on first valid frame
if (_hwFrameCount == 1) {
_resWidth = imgW;
_resHeight = imgH;
_hwGpuIndex = _hwPlayer->getHWDecodingGpuIndex();
_hwCudaAccel = _hwPlayer->isCudaHWAccel();
_totalFrames = static_cast<int64_t>(_hwPlayer->getDuration() / 1000.0 * 30.0);
_fps = 30.0;
std::string hwType = _hwCudaAccel ? "CUDA" : "D3D11VA/DXVA2";
_logger.LogInfo("ANSVIDEOPLAYER::GetImage",
"HW decode ACTIVE (" + hwType + ") GPU[" + std::to_string(_hwGpuIndex) +
"] " + std::to_string(_resWidth) + "x" + std::to_string(_resHeight),
__FILE__, __LINE__);
}
// EOF detection: CFilePlayer auto-loops, but ANSVideoPlayer should stop.
// Detect when PTS wraps backwards (CFilePlayer seeked to start for looping).
if (_hwFrameCount > 10 && _hwLastPts > 0 && imgPts < _hwLastPts - 1000) {
// Position wrapped back to start → video reached end
_hwEOF = true;
if (_resHeight <= 0 || _resWidth <= 0) { _resHeight = imgH; _resWidth = imgW; }
_previousImage = cv::Mat(_resHeight, _resWidth, CV_8UC3, cv::Scalar(0, 0, 0));
width = _previousImage.cols;
height = _previousImage.rows;
if (_previousPTS < std::numeric_limits<int64_t>::max()) _previousPTS++;
else _previousPTS = 0;
pts = _previousPTS;
return _previousImage;
}
_hwLastPts = imgPts; // track for EOF detection (PTS wrap)
cv::Mat result = frame; // CFilePlayer returns owned Mat (already cloned internally)
// Apply cropping if enabled
if (_crop) {
_bbox.x = std::max(0, _bbox.x);
_bbox.y = std::max(0, _bbox.y);
_bbox.width = std::min(_bbox.width, result.cols - _bbox.x);
_bbox.height = std::min(_bbox.height, result.rows - _bbox.y);
if (_bbox.width > 0 && _bbox.height > 0) {
result = result(_bbox).clone();
}
}
// Apply rotation if specified
if (_imageRotateDeg > 0) {
if (std::abs(_imageRotateDeg - 90.0) < 0.01) {
cv::rotate(result, result, cv::ROTATE_90_CLOCKWISE);
} else if (std::abs(_imageRotateDeg - 180.0) < 0.01) {
cv::rotate(result, result, cv::ROTATE_180);
} else if (std::abs(_imageRotateDeg - 270.0) < 0.01) {
cv::rotate(result, result, cv::ROTATE_90_COUNTERCLOCKWISE);
} else {
const cv::Point2f center(result.cols / 2.0f, result.rows / 2.0f);
cv::Mat rotationMatrix = cv::getRotationMatrix2D(center, _imageRotateDeg, 1.0);
cv::Mat rotated;
cv::warpAffine(result, rotated, rotationMatrix, result.size(),
cv::INTER_LINEAR, cv::BORDER_CONSTANT, cv::Scalar());
result = rotated;
}
}
// Update PTS
if (_previousPTS < INT64_MAX) _previousPTS++;
else _previousPTS = 0;
// Store full-res frame for inference (before display resize)
_inferenceImage = result;
// Resize for display if display resolution is set
if (_displayWidth > 0 && _displayHeight > 0 &&
(result.cols != _displayWidth || result.rows != _displayHeight)) {
cv::Mat displayResult;
cv::resize(result, displayResult, cv::Size(_displayWidth, _displayHeight),
0, 0, cv::INTER_LINEAR);
result = displayResult;
}
_previousImage = result;
width = result.cols;
height = result.rows;
pts = _previousPTS;
return result;
}
catch (const std::exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::GetImage (HW). Exception:", e.what(), __FILE__, __LINE__);
}
catch (...) {
this->_logger.LogError("ANSVIDEOPLAYER::GetImage (HW). Unknown exception", "", __FILE__, __LINE__);
}
return cv::Mat();
}
// =====================================================================
// cv::VideoCapture fallback path (original code, unchanged)
// =====================================================================
// Check if the final frame is reached
_currentFrame = static_cast<int64_t>(cap.get(cv::CAP_PROP_POS_FRAMES));
if (_currentFrame >= _totalFrames - 1) {
if (_resHeight <= 0 || _resWidth <= 0) {
_resHeight = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_HEIGHT));
_resWidth = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_WIDTH));
}
_previousImage = cv::Mat(_resHeight, _resWidth, CV_8UC3, cv::Scalar(0, 0, 0));
width = _previousImage.cols;
height = _previousImage.rows;
if (_previousPTS < std::numeric_limits<int64_t>::max()) {
_previousPTS++;
}
else {
_previousPTS = 0;
}
pts = _previousPTS;
return _previousImage;
}
try {
if (!cap.isOpened()) {
if (!_previousImage.empty()) {
width = _previousImage.cols;
height = _previousImage.rows;
pts = _previousPTS;
return _previousImage;
}
return cv::Mat();
}
cv::Mat frame;
if (!cap.read(frame) || frame.empty()) {
// Return last good frame if available
if (!_previousImage.empty()) {
width = _previousImage.cols;
height = _previousImage.rows;
pts = _previousPTS;
return _previousImage;
}
return cv::Mat();
}
cv::Mat result;
// Apply cropping if enabled
if (_crop) {
// Validate and clamp crop region
_bbox.x = std::max(0, _bbox.x);
_bbox.y = std::max(0, _bbox.y);
_bbox.width = std::min(_bbox.width, frame.cols - _bbox.x);
_bbox.height = std::min(_bbox.height, frame.rows - _bbox.y);
if (_bbox.width > 0 && _bbox.height > 0) {
// CRITICAL: Clone to avoid dangling reference
result = frame(_bbox).clone();
}
else {
result = frame.clone();
}
}
else {
result = frame.clone();
}
// Apply rotation if specified
if (_imageRotateDeg > 0) {
// Fast path for 90-degree rotations
if (std::abs(_imageRotateDeg - 90.0) < 0.01) {
cv::rotate(result, result, cv::ROTATE_90_CLOCKWISE);
}
else if (std::abs(_imageRotateDeg - 180.0) < 0.01) {
cv::rotate(result, result, cv::ROTATE_180);
}
else if (std::abs(_imageRotateDeg - 270.0) < 0.01) {
cv::rotate(result, result, cv::ROTATE_90_COUNTERCLOCKWISE);
}
else {
// Arbitrary angle rotation
const cv::Point2f center(result.cols / 2.0f, result.rows / 2.0f);
cv::Mat rotationMatrix = cv::getRotationMatrix2D(center, _imageRotateDeg, 1.0);
cv::Mat rotated;
// Use INTER_LINEAR instead of INTER_CUBIC (2-3x faster)
cv::warpAffine(result, rotated, rotationMatrix, result.size(),
cv::INTER_LINEAR, cv::BORDER_CONSTANT, cv::Scalar());
result = rotated;
}
}
// Update PTS
if (_previousPTS < INT64_MAX) {
_previousPTS++;
}
else {
_previousPTS = 0;
}
// Store full-res frame for inference (before display resize)
_inferenceImage = result;
// Resize for display if display resolution is set
if (_displayWidth > 0 && _displayHeight > 0 &&
(result.cols != _displayWidth || result.rows != _displayHeight)) {
cv::Mat displayResult;
cv::resize(result, displayResult, cv::Size(_displayWidth, _displayHeight),
0, 0, cv::INTER_LINEAR);
result = displayResult;
}
// Update cached frame
_previousImage = result;
// Set output parameters
width = result.cols;
height = result.rows;
pts = _previousPTS;
return result;
}
catch (const cv::Exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::GetImage. OpenCV exception:", e.what(), __FILE__, __LINE__);
}
catch (const std::exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::GetImage. Exception:", e.what(), __FILE__, __LINE__);
}
catch (...) {
this->_logger.LogError("ANSVIDEOPLAYER::GetImage. Unknown exception", "", __FILE__, __LINE__);
}
return cv::Mat();
}
cv::Mat ANSVIDEOPLAYER::GetInferenceImage() {
std::lock_guard<std::recursive_mutex> lock(_mutex);
return _inferenceImage; // Full-res, no display resize applied
}
void ANSVIDEOPLAYER::SetDisplayResolution(int width, int height) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
_displayWidth = width;
_displayHeight = height;
}
void ANSVIDEOPLAYER::EnableAudio(bool status) {
// please support audio enable for webcam
}
void ANSVIDEOPLAYER::SetAudioVolume(int volume) {
// support audio volumne
}
std::string ANSVIDEOPLAYER::encodeJpegString(const cv::Mat& img, int quality) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
if (!_isPlaying) return _lastJpegImage;
unsigned char* jpegBuf = nullptr;
tjhandle jpegCompressor = nullptr;
try {
// Initialize TurboJPEG compressor
jpegCompressor = tjInitCompress();
if (!jpegCompressor) {
this->_logger.LogError("Failed to initialize TurboJPEG compressor.", tjGetErrorStr(), __FILE__, __LINE__);
return _lastJpegImage;
}
int maxBufferSize = img.cols * img.rows * 3;
jpegBuf = tjAlloc(maxBufferSize);
if (!jpegBuf) {
this->_logger.LogError("Failed to allocate memory for JPEG buffer.", tjGetErrorStr(), __FILE__, __LINE__);
tjDestroy(jpegCompressor);
return _lastJpegImage;
}
long unsigned int jpegSize = maxBufferSize;
int subsamp = TJSAMP_444;
int pixelFormat = img.channels() == 3 ? TJPF_BGR : TJPF_GRAY;
// Compress the image
if (tjCompress2(jpegCompressor, img.data, img.cols, 0, img.rows, pixelFormat,
&jpegBuf, &jpegSize, subsamp, quality, TJFLAG_FASTDCT) != 0) {
this->_logger.LogError("Compression error:", tjGetErrorStr(), __FILE__, __LINE__);
tjFree(jpegBuf);
tjDestroy(jpegCompressor);
return _lastJpegImage;
}
// Create string from JPEG buffer
std::string jpegString(reinterpret_cast<char*>(jpegBuf), jpegSize);
_lastJpegImage = jpegString;
}
catch (const std::exception& e) {
this->_logger.LogError("Exception occurred:", e.what(), __FILE__, __LINE__);
}
// Cleanup resources
if (jpegBuf) tjFree(jpegBuf);
if (jpegCompressor) tjDestroy(jpegCompressor);
return _lastJpegImage;
}
std::string ANSVIDEOPLAYER::MatToBinaryData(const cv::Mat& image) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
// Check if the image is empty or has invalid data
if (!_isPlaying) return _lastJpegImage;
if (image.empty() || !image.data || !image.u) {
return _lastJpegImage;
}
try {
// Encode the image to a memory buffer
return encodeJpegString(image, 85);
}
catch (const std::exception& e) {
this->_logger.LogFatal("ANSVIDEOPLAYER::MatToBinaryData. Exception occurred:", e.what(), __FILE__, __LINE__);
}
catch (...) {
this->_logger.LogFatal("ANSVIDEOPLAYER::MatToBinaryData.", "Unknown exception occurred.", __FILE__, __LINE__);
}
// Return an empty string in case of failure
return _lastJpegImage;
}
std::string ANSVIDEOPLAYER::GetJpegStringImage(int& width, int& height, int64_t& pts) {
std::lock_guard<std::recursive_mutex> lock(_mutex);
if (!_isPlaying) return _lastJpegImage;
try {
cv::Mat image = GetImage(width, height, pts);
std::string jpegString = MatToBinaryData(image);
image.release();
return jpegString;
}
catch (const std::exception& e) {
this->_logger.LogError("ANSVIDEOPLAYER::GetJpegStringImage. Exception occurred:", e.what(), __FILE__, __LINE__);
return _lastJpegImage;
}
}
}
extern "C" __declspec(dllexport) int CreateANSVideoPlayerHandle(ANSCENTER::ANSVIDEOPLAYER** Handle, const char* licenseKey, const char* url) {
if (!Handle || !licenseKey || !url) return -1;
try {
auto ptr = std::make_unique<ANSCENTER::ANSVIDEOPLAYER>();
bool result = ptr->Init(licenseKey, url);
if (result) {
*Handle = ptr.release();
extern void anscv_unregister_handle(void*);
extern void anscv_register_handle(void*, void(*)(void*));
anscv_register_handle(*Handle, [](void* p) {
auto* h = static_cast<ANSCENTER::ANSVIDEOPLAYER*>(p);
try { h->Stop(); } catch (...) {}
try { h->Destroy(); } catch (...) {}
try { delete h; } catch (...) {}
});
return 1;
}
*Handle = nullptr;
return 0;
}
catch (...) { return 0; }
}
extern "C" __declspec(dllexport) int ReleaseANSVideoPlayerHandle(ANSCENTER::ANSVIDEOPLAYER** Handle) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
extern void anscv_unregister_handle(void*);
anscv_unregister_handle(*Handle);
// Destructor calls Destroy() — no need to call it explicitly (avoids double-destroy)
std::unique_ptr<ANSCENTER::ANSVIDEOPLAYER> ptr(*Handle);
*Handle = nullptr;
return 0;
}
catch (...) {
if (Handle) *Handle = nullptr;
return 0;
}
}
extern "C" __declspec(dllexport) int GetVideoPlayerStrImage(ANSCENTER::ANSVIDEOPLAYER** Handle, int& width, int& height, int64_t& timeStamp, std::string& jpegImage) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
jpegImage = (*Handle)->GetJpegStringImage(width, height, timeStamp);
if (!jpegImage.empty()) return 1;
else return 0;
}
catch (const std::exception& e) {
std::cerr << "Error getting image data from VideoPlayer client: " << e.what() << std::endl;
return -1;
}
catch (...) {
return -1;
}
}
extern "C" __declspec(dllexport) int GetVideoPlayerImage(ANSCENTER::ANSVIDEOPLAYER** Handle, int& width, int& height, int64_t& timeStamp, LStrHandle jpegImage) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
std::string jpegString = (*Handle)->GetJpegStringImage(width, height, timeStamp);
int size = jpegString.length();
if (size > 0) {
MgErr error;
// Resize the jpegImage handle to hold the image data
error = DSSetHandleSize(jpegImage, sizeof(int32) + size * sizeof(uChar));
// Check if resizing the handle was successful
if (error == noErr) {
// Set the size of the image in the handle
(*jpegImage)->cnt = size;
// Use memcpy to copy the data from the std::string to the LStrHandle's str buffer
memcpy((*jpegImage)->str, jpegString.c_str(), size);
// Return success
return 1;
}
else {
// Return failure if there was an error in resizing the handle
std::cerr << "Error resizing jpegImage handle: " << error << std::endl;
return 0;
}
}
else {
// If the JPEG image string is empty, return failure
std::cerr << "No image data retrieved from VideoPlayer client." << std::endl;
return 0;
}
}
catch (const std::exception& e) {
std::cerr << "Error getting image data from VideoPlayer client: " << e.what() << std::endl;
return -1;
}
catch (...) {
return -1;
}
}
//extern "C" __declspec(dllexport) int GetVideoPlayerCVImage(ANSCENTER::ANSVIDEOPLAYER** Handle, int& width, int& height, int64_t& timeStamp, cv::Mat** image) {
// if (!Handle || !(*Handle) || !image) {
// std::cerr << "Error: Invalid input parameters in GetRTSPCVImage." << std::endl;
// return -1; // Error code for invalid parameters
// }
// try {
// cv::Mat img = (*Handle)->GetImage(width, height, timeStamp);
// std::lock_guard<std::mutex> lock(videoPlayerMutex); // Ensures thread safety
// if (img.empty()) {
// return 0; // No valid image retrieved
// }
// // If an image is already allocated, release it
// if (*image) {
// delete* image;
// *image = nullptr;
// }
//
// // Allocate new image
// *image = new cv::Mat(std::move(img)); // Move constructor avoids deep copy
//
// return 1; // Success
// }
// catch (const std::exception& e) {
// std::cerr << "Exception in GetRTSPCVImage: " << e.what() << std::endl;
// return -2; // Error code for exceptions
// }
// catch (...) {
// std::cerr << "Exception in GetRTSPCVImage: Unknown exception." << std::endl;
// return -2; // Generic error code for exceptions
// }
//}
extern "C" __declspec(dllexport) int GetVideoPlayerCVImage(
ANSCENTER::ANSVIDEOPLAYER** Handle,
int& width,
int& height,
int64_t& timeStamp,
cv::Mat** image)
{
// Validate input parameters
if (!Handle || !*Handle || !image) {
std::cerr << "Error: Invalid input parameters in GetVideoPlayerCVImage" << std::endl;
return -1;
}
try {
// Get image (display-res if SetDisplayResolution was called, otherwise original)
cv::Mat img = (*Handle)->GetImage(width, height, timeStamp);
// Check if valid image was retrieved
if (img.empty()) {
return 0; // No valid image available
}
// anscv_mat_replace has its own internal registry_mutex — no global mutex needed
anscv_mat_replace(image, std::move(img));
// Attach frame data to ANSGpuFrameRegistry for inference fast-path.
// When HW decode is active, attach NV12/CUDA frames (same as ANSRTSP).
// When using cv::VideoCapture fallback, attach BGR clone (pixFmt=1000).
if ((*Handle)->_hwDecodeActive && (*Handle)->_hwPlayer) {
// --- HW decode: NV12/CUDA attachment (same pattern as ANSRTSP.cpp:847-862) ---
int gpuIdx = (*Handle)->_hwPlayer->getHWDecodingGpuIndex();
AVFrame* cudaHW = (*Handle)->_hwPlayer->getCudaHWFrame();
if (cudaHW) {
// CUDA zero-copy: frame data[0]/data[1] are CUDA device pointers.
// Also attach CPU NV12 as fallback for cross-GPU inference.
AVFrame* cpuNV12 = (*Handle)->_hwPlayer->getNV12Frame();
gpu_frame_attach_cuda(*image, cudaHW, gpuIdx, timeStamp, cpuNV12);
} else {
AVFrame* nv12 = (*Handle)->_hwPlayer->getNV12Frame();
if (nv12) {
gpu_frame_attach(*image, nv12, gpuIdx, timeStamp);
}
}
} else {
// --- cv::VideoCapture fallback: BGR clone attachment ---
cv::Mat infImg = (*Handle)->GetInferenceImage();
if (!infImg.empty() && infImg.data != (*image)->data) {
// Rotate clones: keep previous alive so ANSRTYOLO can finish reading it
(*Handle)->_inferenceClonePrev = (*Handle)->_inferenceCloneCurr;
(*Handle)->_inferenceCloneCurr = infImg.clone(); // deep copy, independent lifetime
GpuFrameData data{};
data.avframe = nullptr; // no AVFrame — lifecycle managed by _inferenceCloneCurr
data.yPlane = (*Handle)->_inferenceCloneCurr.data;
data.uvPlane = nullptr;
data.yLinesize = static_cast<int>((*Handle)->_inferenceCloneCurr.step[0]);
data.uvLinesize = 0;
data.width = (*Handle)->_inferenceCloneCurr.cols;
data.height = (*Handle)->_inferenceCloneCurr.rows;
data.pixelFormat = ANSCV_PIX_FMT_BGR24; // 1000 = BGR full-res (not NV12)
data.gpuIndex = -1; // CPU data, no GPU affinity
data.pts = timeStamp;
data.isCudaDevicePtr = false;
data.cpuAvframe = nullptr;
data.cpuYPlane = nullptr;
data.cpuUvPlane = nullptr;
data.cpuYLinesize = 0;
data.cpuUvLinesize = 0;
// attach returns old avframe (nullptr for our entries) — safe to ignore
ANSGpuFrameRegistry::instance().attach(*image, std::move(data));
// Drain pending — may contain AVFrame* from RTSP entries evicted by time
auto pending = ANSGpuFrameRegistry::instance().drain_pending();
for (void* p : pending) {
AVFrame* stale = static_cast<AVFrame*>(p);
av_frame_free(&stale); // safe for nullptr (no-op)
}
}
}
return 1; // Success
}
catch (const cv::Exception& e) {
std::cerr << "OpenCV exception in GetVideoPlayerCVImage: " << e.what() << std::endl;
return -2;
}
catch (const std::exception& e) {
std::cerr << "Exception in GetVideoPlayerCVImage: " << e.what() << std::endl;
return -2;
}
catch (...) {
std::cerr << "Unknown exception in GetVideoPlayerCVImage" << std::endl;
return -2;
}
}
extern "C" __declspec(dllexport) int StartVideoPlayer(ANSCENTER::ANSVIDEOPLAYER** Handle) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
bool result = (*Handle)->Start();
if (result) return 1;
else return 0;
}
catch (const std::exception& e) {
std::cerr << "Error starting file player: " << e.what() << std::endl;
return -1;
}
catch (...) {
return -1;
}
}
extern "C" __declspec(dllexport) int ReconnectVideoPlayer(ANSCENTER::ANSVIDEOPLAYER** Handle) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
bool result = (*Handle)->Reconnect();
if (result) return 1;
else return 0;
}
catch (const std::exception& e) {
std::cerr << "Error reconnecting file player: " << e.what() << std::endl;
return -1;
}
catch (...) {
return -1;
}
}
extern "C" __declspec(dllexport) int StopVideoPlayer(ANSCENTER::ANSVIDEOPLAYER** Handle) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
bool result = (*Handle)->Stop();
if (result) return 1;
else return 0;
}
catch (const std::exception& e) {
std::cerr << "Error stopping file player: " << e.what() << std::endl;
return -1;
}
catch (...) {
return -1;
}
}
extern "C" __declspec(dllexport) int IsVideoPlayerPaused(ANSCENTER::ANSVIDEOPLAYER** Handle) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
bool result = (*Handle)->IsPaused();
if (result) return 1;
else return 0;
}
catch (const std::exception& e) {
std::cerr << "Error checking if VideoPlayer is paused: " << e.what() << std::endl;
return 0;
}
catch (...) { return 0; }
}
extern "C" __declspec(dllexport) int IsVideoPlayerRunning(ANSCENTER::ANSVIDEOPLAYER** Handle) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
bool result = (*Handle)->IsPlaying();
if (result) return 1;
else return 0;
}
catch (const std::exception& e) {
std::cerr << "Error checking if VideoPlayer is running: " << e.what() << std::endl;
return 0;
}
catch (...) { return 0; }
}
extern "C" __declspec(dllexport) int IsVideoPlayerRecording(ANSCENTER::ANSVIDEOPLAYER** Handle) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
bool result = (*Handle)->IsRecording();
if (result) return 1;
else return 0;
}
catch (const std::exception& e) {
std::cerr << "Error checking if VideoPlayer is recording: " << e.what() << std::endl;
return 0;
}
catch (...) { return 0; }
}
extern "C" __declspec(dllexport) void SetVideoPlayerAudioVolume(ANSCENTER::ANSVIDEOPLAYER** Handle, int volume)
{
if (Handle == nullptr || *Handle == nullptr) return;
try {
(*Handle)->SetAudioVolume(volume);
}
catch (...) { }
}
extern "C" __declspec(dllexport) void EnableVideoPlayerAudioVolume(ANSCENTER::ANSVIDEOPLAYER** Handle, int status)
{
if (Handle == nullptr || *Handle == nullptr) return;
try {
bool audioStatus = false;
if (status == 1)audioStatus = true;
(*Handle)->EnableAudio(audioStatus);
}
catch (...) { }
}
extern "C" __declspec(dllexport) void SetVideoPlayerImageRotation(ANSCENTER::ANSVIDEOPLAYER** Handle, double rotationAngle) {
if (Handle == nullptr || *Handle == nullptr) return;
try {
(*Handle)->SetImageRotate(rotationAngle);
}
catch (...) { }
}
extern "C" __declspec(dllexport) int SetBBoxVideoPlayer(ANSCENTER::ANSVIDEOPLAYER** Handle, int x, int y, int width, int height) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
cv::Rect bbox(x, y, width, height);
(*Handle)->SetBBox(bbox);
return 1;
}
catch (const std::exception& e) {
std::cerr << "Error setting bounding box for VideoPlayer: " << e.what() << std::endl;
return 0;
}
catch (...) { return 0; }
}
extern "C" __declspec(dllexport) int SetCropFlagVideoPlayer(ANSCENTER::ANSVIDEOPLAYER** Handle, int cropFlag) {
if (Handle == nullptr || *Handle == nullptr) return -1;
try {
bool crop = false;
if (cropFlag == 1)crop = true;
(*Handle)->SetCrop(crop);
return 1;
}
catch (const std::exception& e) {
std::cerr << "Error setting crop flag for VideoPlayer: " << e.what() << std::endl;
return 0;
}
catch (...) { return 0; }
}
extern "C" __declspec(dllexport) void SetVideoPlayerDisplayResolution(ANSCENTER::ANSVIDEOPLAYER** Handle, int width, int height) {
if (Handle == nullptr || *Handle == nullptr) return;
try {
(*Handle)->SetDisplayResolution(width, height);
} catch (...) { }
}