Files
2026-03-28 11:39:04 +11:00

733 lines
17 KiB
C++

/***************************************************************************************
*
* IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
*
* By downloading, copying, installing or using the software you agree to this license.
* If you do not agree to this license, do not download, install,
* copy or use the software.
*
* Copyright (C) 2014-2024, Happytimesoft Corporation, all rights reserved.
*
* Redistribution and use in binary forms, with or without modification, are permitted.
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*
****************************************************************************************/
#include "sys_inc.h"
#include "VideoWidget.h"
#include "utils.h"
#include "InstMsgDialog.h"
#include "rtsp_player.h"
#include "rtmp_player.h"
#include "http_flv_player.h"
#include "http_mjpeg_player.h"
#include "srt_player.h"
#include "file_player.h"
#include "http_test.h"
#include <QPainter>
#include <QMessageBox>
#include <QScreen>
#include <QPaintEvent>
#include <QDir>
#include <QFile>
#include <QDateTime>
#include <QApplication>
#include <QOpenGLShaderProgram>
#include <QOpenGLTexture>
#if defined(ANDROID)
#include <QJniObject>
#endif
/*********************************************************************************************/
#define VERTEXIN 0
#define TEXTUREIN 1
/*********************************************************************************************/
VideoWidget::VideoWidget(QWidget * parent, Qt::WindowFlags f)
: QOpenGLWidget(parent, f)
, m_pPlayer(NULL)
, m_bMute(FALSE)
, m_bRecording(FALSE)
, m_pRenderFrame(NULL)
#ifdef BACKCHANNEL
, m_nBackChannelFlag(0)
#endif
{
setAttribute(Qt::WA_OpaquePaintEvent);
m_timerReconn.setSingleShot(true);
connect(&m_timerReconn, SIGNAL(timeout()), this, SLOT(slotReconn()));
connect(this, SIGNAL(imageReady()), this, SLOT(update()), Qt::QueuedConnection);
}
VideoWidget::~VideoWidget()
{
closeVideo();
makeCurrent();
vbo.destroy();
textureY->destroy();
textureU->destroy();
textureV->destroy();
doneCurrent();
}
void VideoWidget::play(QString url, QString acct, QString pass)
{
if (m_url == url && m_acct == acct && m_pass == pass)
{
return;
}
closeVideo();
m_url = url;
m_acct = acct;
m_pass = pass;
m_base = getBaseName(url);
if (!m_url.isEmpty())
{
makeCall();
}
}
void VideoWidget::pause()
{
if (m_pPlayer)
{
m_pPlayer->pause();
}
}
void VideoWidget::stop()
{
closeVideo();
}
void VideoWidget::closePlayer()
{
m_timerReconn.stop();
if (m_pPlayer)
{
delete m_pPlayer;
m_pPlayer = NULL;
}
}
void VideoWidget::closeVideo()
{
closePlayer();
m_url = "";
m_acct = "";
m_pass = "";
m_bRecording = FALSE;
QMutexLocker locker(&m_mutex);
if (m_pRenderFrame)
{
av_frame_free(&m_pRenderFrame);
}
update();
}
BOOL VideoWidget::isRecording()
{
if (m_pPlayer)
{
return m_pPlayer->isRecording();
}
return FALSE;
}
void VideoWidget::makeCall()
{
BOOL isFile = 0;
BOOL isRtsp = FALSE;
if (isRtspUrl(m_url))
{
isRtsp = TRUE;
m_pPlayer = new CRtspPlayer(this);
}
else if (isRtmpUrl(m_url))
{
m_pPlayer = new CRtmpPlayer(this);
}
else if (isHttpUrl(m_url))
{
HTTPCTT ctt;
if (http_test(m_url.toStdString().c_str(), m_acct.toStdString().c_str(), m_pass.toStdString().c_str(), &ctt, 2*1000))
{
if (CTT_RTSP_TUNNELLED == ctt)
{
isRtsp = TRUE;
m_pPlayer = new CRtspPlayer(this);
}
else if (CTT_FLV == ctt)
{
m_pPlayer = new CHttpFlvPlayer(this);
}
else if (CTT_MULTIPART == ctt)
{
m_pPlayer = new CHttpMjpegPlayer(this);
}
}
else
{
m_pPlayer = new CHttpFlvPlayer(this);
}
}
else if (isSrtUrl(m_url))
{
m_pPlayer = new CSrtPlayer(this);
}
else
{
isFile = 1;
m_pPlayer = new CFilePlayer(this);
}
if (NULL == m_pPlayer)
{
return;
}
connect(m_pPlayer, SIGNAL(notify(int)), this, SLOT(slotPlayerNotify(int)), Qt::QueuedConnection);
connect(m_pPlayer, SIGNAL(snapshoted(AVFrame*)), this, SLOT(slotSnapshoted(AVFrame*)), Qt::QueuedConnection);
connect(m_pPlayer, SIGNAL(imageReady(AVFrame*)), this, SLOT(slotImageReady(AVFrame*)), Qt::QueuedConnection);
connect(m_pPlayer, SIGNAL(updateStatistics(int)), this, SIGNAL(updateStatistics(int)));
if (m_pPlayer->open(m_url, 0))
{
m_pPlayer->setAuthInfo(m_acct, m_pass);
m_pPlayer->setHWDecoding(getHWDecoding());
if (isRtsp)
{
m_pPlayer->setRtpOverUdp(getRtpOverUdp());
m_pPlayer->setRtpMulticast(getRtpMulticast());
#ifdef OVER_HTTP
m_pPlayer->setRtspOverHttp(getRtspOverHttp(), getRtspOverHttpPort());
#endif
#ifdef OVER_WEBSOCKET
m_pPlayer->setRtspOverWs(getRtspOverWs(), getRtspOverWsPort());
#endif
#ifdef BACKCHANNEL
m_pPlayer->setBCFlag(m_nBackChannelFlag);
#endif
}
else if (isFile)
{
setMute(m_bMute);
}
m_pPlayer->play();
}
else
{
closePlayer();
}
}
void VideoWidget::mousePressEvent(QMouseEvent * event)
{
emit widgetSelecting(this);
}
BOOL VideoWidget::micphone()
{
#ifdef BACKCHANNEL
if (NULL == m_pPlayer)
{
return FALSE;
}
#if defined(ANDROID)
QJniObject str = QJniObject::fromString("android.permission.RECORD_AUDIO");
QJniObject::callStaticMethod<jint>("org/happytimesoft/util/HtUtil",
"requestPermission",
"(Landroid/content/Context;Ljava/lang/String;)I",
QNativeInterface::QAndroidApplication::context(),
str.object<jstring>());
#endif
closePlayer();
if (m_nBackChannelFlag)
{
m_nBackChannelFlag = 0;
}
else
{
m_nBackChannelFlag = 1;
}
makeCall();
if (m_pPlayer)
{
m_pPlayer->setBCDataFlag(m_nBackChannelFlag);
return m_pPlayer->getBCFlag();
}
#endif
return FALSE;
}
void VideoWidget::setMute(BOOL flag)
{
m_bMute = flag;
if (m_pPlayer)
{
m_pPlayer->setVolume(flag ? HTVOLUME_MIN : HTVOLUME_MAX);
}
}
BOOL VideoWidget::isPlaying()
{
if (m_pPlayer)
{
return m_pPlayer->isPlaying() || m_pPlayer->isPaused();
}
return FALSE;
}
void VideoWidget::snapshot()
{
if (m_pPlayer)
{
m_pPlayer->snapshot(VIDEO_FMT_RGB24);
}
}
void VideoWidget::slotSnapshoted(AVFrame * frame)
{
QImage image = QImage(frame->data[0], frame->width, frame->height, frame->linesize[0], QImage::Format_RGB888);
QString file = getSnapshotPath() + "/" + getTempFile(m_base, ".jpg");
if (!image.save(file, "JPG"))
{
emit snapshotResult(false);
}
else
{
emit snapshotResult(true);
}
}
BOOL VideoWidget::record()
{
if (NULL == m_pPlayer)
{
return FALSE;
}
if (m_pPlayer->isRecording())
{
stopRecord();
}
else
{
startRecord();
}
m_bRecording = m_pPlayer->isRecording();
return m_bRecording;
}
void VideoWidget::startRecord()
{
if (NULL == m_pPlayer)
{
return;
}
if (m_pPlayer->isRecording())
{
return;
}
QString file = getRecordPath() + "/" + getTempFile(m_base, ".avi");
m_pPlayer->record(file);
}
void VideoWidget::stopRecord()
{
if (NULL == m_pPlayer)
{
return;
}
if (m_pPlayer->isRecording())
{
m_pPlayer->stopRecord();
emit recordResult(true);
}
}
QRect VideoWidget::getVideoRenderRect(int videoW, int videoH)
{
QRect rect = this->rect();
qreal ratio = QGuiApplication::primaryScreen()->devicePixelRatio();
int w = rect.width() * ratio;
int h = rect.height() * ratio;
if (getVideoRenderMode() == RENDER_MODE_KEEP) // keep the original aspect ratio
{
int iw = videoW;
int ih = videoH;
int nw, nh;
double vratio = iw / (double)ih;
double wratio = w / (double)h;
if (vratio > wratio)
{
nw = w;
nh = w * ih / iw;
}
else
{
nw = h * iw / ih;
nh = h;
}
rect.setLeft((w - nw) / 2);
rect.setTop((h - nh) / 2);
rect.setRight(rect.left() + nw);
rect.setBottom(rect.top() + nh);
}
else // fill the whole window
{
rect.setLeft(0);
rect.setTop(0);
rect.setRight(w);
rect.setBottom(h);
}
return rect;
}
void VideoWidget::slotImageReady(AVFrame * frame)
{
QMutexLocker locker(&m_mutex);
if (m_pRenderFrame)
{
av_frame_free(&m_pRenderFrame);
}
if (m_pPlayer)
{
m_pRenderFrame = frame;
emit imageReady();
}
else
{
av_frame_free(&frame);
}
}
void VideoWidget::slotPlayerNotify(int event)
{
if (event == RTSP_EVE_CONNFAIL ||
event == RTMP_EVE_CONNFAIL ||
event == HTTP_FLV_EVE_CONNFAIL ||
event == MJPEG_EVE_CONNFAIL ||
event == SRT_EVE_CONNFAIL)
{
m_timerReconn.start(5 * 1000);
}
else if (event == RTSP_EVE_CONNSUCC ||
event == MJPEG_EVE_CONNSUCC)
{
setMute(m_bMute);
// Re-record after reconnect
if (m_bRecording)
{
startRecord();
}
}
else if (event == RTMP_EVE_VIDEOREADY ||
event == HTTP_FLV_EVE_VIDEOREADY ||
event == SRT_EVE_VIDEOREADY)
{
// Re-record after reconnect
if (m_bRecording)
{
startRecord();
}
}
else if (event == RTMP_EVE_AUDIOREADY ||
event == HTTP_FLV_EVE_AUDIOREADY ||
event == SRT_EVE_AUDIOREADY)
{
setMute(m_bMute);
}
else if (event == RTSP_EVE_NOSIGNAL ||
event == RTMP_EVE_NOSIGNAL ||
event == HTTP_FLV_EVE_NOSIGNAL ||
event == MJPEG_EVE_NOSIGNAL ||
event == SRT_EVE_NOSIGNAL)
{
m_timerReconn.start(5 * 1000);
}
else if (event == RTSP_EVE_NODATA ||
event == RTMP_EVE_NODATA ||
event == HTTP_FLV_EVE_NODATA ||
event == MJPEG_EVE_NODATA ||
event == SRT_EVE_NODATA)
{
m_timerReconn.start(5 * 1000);
}
else if (event == RTSP_EVE_RESUME ||
event == RTMP_EVE_RESUME ||
event == HTTP_FLV_EVE_RESUME ||
event == MJPEG_EVE_RESUME ||
event == SRT_EVE_RESUME)
{
m_timerReconn.stop();
}
else if (event == RTSP_EVE_STOPPED ||
event == RTMP_EVE_STOPPED ||
event == HTTP_FLV_EVE_STOPPED ||
event == MJPEG_EVE_STOPPED ||
event == SRT_EVE_STOPPED)
{
m_timerReconn.start(5 * 1000);
}
emit callState(this, event);
}
void VideoWidget::slotReconn()
{
closePlayer();
makeCall();
}
QString VideoWidget::getBaseName(QString &url)
{
if (isUrl(url))
{
char host[100] = {'\0'};
url_split(url.toStdString().c_str(), NULL, 0, NULL, 0, NULL, 0, host, sizeof(host), NULL, NULL, 0);
return QString(host);
}
else
{
QFileInfo fileInfo(url);
return fileInfo.baseName();
}
}
void VideoWidget::initializeGL()
{
initializeOpenGLFunctions();
glEnable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
static const GLfloat vertices[]
{
-1.0f, -1.0f,
-1.0f, +1.0f,
+1.0f, +1.0f,
+1.0f, -1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
if (!vbo.create())
{
log_print(HT_LOG_ERR, "%s, vbo.create failed\r\n", __FUNCTION__);
}
if (!vbo.bind())
{
log_print(HT_LOG_ERR, "%s, vbo.bind failed\r\n", __FUNCTION__);
}
vbo.allocate(vertices, sizeof(vertices));
QOpenGLShader * vshader = new QOpenGLShader(QOpenGLShader::Vertex, this);
const char * vsrc =
"attribute vec4 vertexIn; \n"
"attribute vec2 textureIn; \n"
"varying highp vec2 textureOut; \n"
"void main(void) \n"
"{ \n"
" gl_Position = vertexIn; \n"
" textureOut = textureIn; \n"
"}";
if (!vshader->compileSourceCode(vsrc))
{
log_print(HT_LOG_ERR, "%s, compile vertex source failed\r\n", __FUNCTION__);
}
QOpenGLShader * fshader = new QOpenGLShader(QOpenGLShader::Fragment, this);
const char * fsrc =
"varying highp vec2 textureOut; \n"
"uniform sampler2D tex_y; \n"
"uniform sampler2D tex_u; \n"
"uniform sampler2D tex_v; \n"
"void main(void) \n"
"{ \n"
" lowp vec3 yuv; \n"
" lowp vec3 rgb; \n"
" yuv.x = texture2D(tex_y, textureOut).r; \n"
" yuv.y = texture2D(tex_u, textureOut).r - 0.5; \n"
" yuv.z = texture2D(tex_v, textureOut).r - 0.5; \n"
" rgb = mat3( 1, 1, 1, \n"
" 0, -0.39465, 2.03211, \n"
" 1.13983, -0.58060, 0) * yuv; \n"
" gl_FragColor = vec4(rgb, 1); \n"
"}";
if (!fshader->compileSourceCode(fsrc))
{
log_print(HT_LOG_ERR, "%s, compile fragment source failed\r\n", __FUNCTION__);
}
program = new QOpenGLShaderProgram(this);
if (!program->addShader(vshader))
{
log_print(HT_LOG_ERR, "%s, add vertex shader failed\r\n", __FUNCTION__);
}
if (!program->addShader(fshader))
{
log_print(HT_LOG_ERR, "%s, add fragment shader failed\r\n", __FUNCTION__);
}
program->bindAttributeLocation("vertexIn", VERTEXIN);
program->bindAttributeLocation("textureIn", TEXTUREIN);
if (!program->link())
{
log_print(HT_LOG_ERR, "%s, link failed. %s\r\n", __FUNCTION__, program->log().toStdString().c_str());
}
if (!program->bind())
{
log_print(HT_LOG_ERR, "%s, program bind failed\r\n", __FUNCTION__);
}
program->enableAttributeArray(VERTEXIN);
program->enableAttributeArray(TEXTUREIN);
program->setAttributeBuffer(VERTEXIN, GL_FLOAT, 0, 2, 2*sizeof(GLfloat));
program->setAttributeBuffer(TEXTUREIN, GL_FLOAT, 8*sizeof(GLfloat), 2, 2*sizeof(GLfloat));
textureUniformY = program->uniformLocation("tex_y");
textureUniformU = program->uniformLocation("tex_u");
textureUniformV = program->uniformLocation("tex_v");
textureY = new QOpenGLTexture(QOpenGLTexture::Target2D);
textureU = new QOpenGLTexture(QOpenGLTexture::Target2D);
textureV = new QOpenGLTexture(QOpenGLTexture::Target2D);
if (!textureY->create())
{
log_print(HT_LOG_ERR, "%s, textureY create failed\r\n", __FUNCTION__);
}
if (!textureU->create())
{
log_print(HT_LOG_ERR, "%s, textureU create failed\r\n", __FUNCTION__);
}
if (!textureV->create())
{
log_print(HT_LOG_ERR, "%s, textureV create failed\r\n", __FUNCTION__);
}
idY = textureY->textureId();
idU = textureU->textureId();
idV = textureV->textureId();
glClearColor(0.0, 0.0, 0.0, 1.0f);
}
void VideoWidget::paintGL()
{
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
QMutexLocker locker(&m_mutex);
if (NULL == m_pRenderFrame)
{
return;
}
int videoW = m_pRenderFrame->width;
int videoH = m_pRenderFrame->height;
QRect rect = getVideoRenderRect(videoW, videoH);
glViewport(rect.left(), rect.top(), rect.width(), rect.height());
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, idY);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, videoW, videoH, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pRenderFrame->data[0]);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, idU);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, videoW >> 1, videoH >> 1, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pRenderFrame->data[1]);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, idV);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, videoW >> 1, videoH >> 1, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pRenderFrame->data[2]);
glUniform1i(textureUniformY, 0);
glUniform1i(textureUniformU, 1);
glUniform1i(textureUniformV, 2);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
}