mirror of
https://github.com/f4exb/sdrangel.git
synced 2024-12-23 01:55:48 -05:00
DATV demod: player cleanup (4)
This commit is contained in:
parent
d22cd45b14
commit
8ecdb3fdc1
@ -219,7 +219,7 @@ DATVDemodGUI::DATVDemodGUI(PluginAPI* objPluginAPI, DeviceUISet *deviceUISet, Ba
|
|||||||
m_objDATVDemod->setCNRMeter(ui->cnrMeter);
|
m_objDATVDemod->setCNRMeter(ui->cnrMeter);
|
||||||
m_objDATVDemod->SetVideoRender(ui->screenTV_2);
|
m_objDATVDemod->SetVideoRender(ui->screenTV_2);
|
||||||
|
|
||||||
connect(m_objDATVDemod->getVideoStream(), &DATVideostream::onDataPackets, this, &DATVDemodGUI::on_StreamDataAvailable);
|
connect(m_objDATVDemod->getVideoStream(), &DATVideostream::fifoData, this, &DATVDemodGUI::on_StreamDataAvailable);
|
||||||
connect(ui->screenTV_2, &DATVideoRender::onMetaDataChanged, this, &DATVDemodGUI::on_StreamMetaDataChanged);
|
connect(ui->screenTV_2, &DATVideoRender::onMetaDataChanged, this, &DATVDemodGUI::on_StreamMetaDataChanged);
|
||||||
|
|
||||||
m_intPreviousDecodedData=0;
|
m_intPreviousDecodedData=0;
|
||||||
@ -704,7 +704,7 @@ void DATVDemodGUI::on_chkAllowDrift_clicked()
|
|||||||
|
|
||||||
void DATVDemodGUI::on_fullScreen_clicked()
|
void DATVDemodGUI::on_fullScreen_clicked()
|
||||||
{
|
{
|
||||||
ui->screenTV_2->SetFullScreen(true);
|
ui->screenTV_2->setFullScreen(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void DATVDemodGUI::on_mouseEvent(QMouseEvent* obj)
|
void DATVDemodGUI::on_mouseEvent(QMouseEvent* obj)
|
||||||
|
@ -75,7 +75,7 @@ bool DATVideoRender::eventFilter(QObject *obj, QEvent *event)
|
|||||||
{
|
{
|
||||||
if (event->type() == QEvent::MouseButtonRelease)
|
if (event->type() == QEvent::MouseButtonRelease)
|
||||||
{
|
{
|
||||||
SetFullScreen(false);
|
setFullScreen(false);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -85,7 +85,7 @@ bool DATVideoRender::eventFilter(QObject *obj, QEvent *event)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void DATVideoRender::SetFullScreen(bool fullScreen)
|
void DATVideoRender::setFullScreen(bool fullScreen)
|
||||||
{
|
{
|
||||||
if (m_isFullScreen == fullScreen)
|
if (m_isFullScreen == fullScreen)
|
||||||
{
|
{
|
||||||
@ -94,7 +94,7 @@ void DATVideoRender::SetFullScreen(bool fullScreen)
|
|||||||
|
|
||||||
if (fullScreen == true)
|
if (fullScreen == true)
|
||||||
{
|
{
|
||||||
qDebug("DATVideoRender::SetFullScreen: go to fullscreen");
|
qDebug("DATVideoRender::setFullScreen: go to fullscreen");
|
||||||
// m_originalWindowFlags = this->windowFlags();
|
// m_originalWindowFlags = this->windowFlags();
|
||||||
// m_originalSize = this->size();
|
// m_originalSize = this->size();
|
||||||
// m_parentWidget->layout()->removeWidget(this);
|
// m_parentWidget->layout()->removeWidget(this);
|
||||||
@ -108,7 +108,7 @@ void DATVideoRender::SetFullScreen(bool fullScreen)
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
qDebug("DATVideoRender::SetFullScreen: come back from fullscreen");
|
qDebug("DATVideoRender::setFullScreen: come back from fullscreen");
|
||||||
// //this->setParent(m_parentWidget);
|
// //this->setParent(m_parentWidget);
|
||||||
// this->resize(m_originalSize);
|
// this->resize(m_originalSize);
|
||||||
// this->overrideWindowFlags(m_originalWindowFlags);
|
// this->overrideWindowFlags(m_originalWindowFlags);
|
||||||
@ -122,16 +122,16 @@ void DATVideoRender::SetFullScreen(bool fullScreen)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static int ReadFunction(void *opaque, uint8_t *buf, int buf_size)
|
int DATVideoRender::ReadFunction(void *opaque, uint8_t *buf, int buf_size)
|
||||||
{
|
{
|
||||||
QIODevice *stream = reinterpret_cast<QIODevice *>(opaque);
|
DATVideostream *stream = reinterpret_cast<DATVideostream *>(opaque);
|
||||||
int nbBytes = stream->read((char *)buf, buf_size);
|
int nbBytes = stream->read((char *)buf, buf_size);
|
||||||
return nbBytes;
|
return nbBytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int64_t SeekFunction(void *opaque, int64_t offset, int whence)
|
int64_t DATVideoRender::SeekFunction(void *opaque, int64_t offset, int whence)
|
||||||
{
|
{
|
||||||
QIODevice *stream = reinterpret_cast<QIODevice *>(opaque);
|
DATVideostream *stream = reinterpret_cast<DATVideostream *>(opaque);
|
||||||
|
|
||||||
if (whence == AVSEEK_SIZE)
|
if (whence == AVSEEK_SIZE)
|
||||||
{
|
{
|
||||||
@ -151,13 +151,13 @@ static int64_t SeekFunction(void *opaque, int64_t offset, int whence)
|
|||||||
return stream->pos();
|
return stream->pos();
|
||||||
}
|
}
|
||||||
|
|
||||||
void DATVideoRender::ResetMetaData()
|
void DATVideoRender::resetMetaData()
|
||||||
{
|
{
|
||||||
m_metaData.reset();
|
m_metaData.reset();
|
||||||
emit onMetaDataChanged(new DataTSMetaData2(m_metaData));
|
emit onMetaDataChanged(new DataTSMetaData2(m_metaData));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DATVideoRender::PreprocessStream()
|
bool DATVideoRender::preprocessStream()
|
||||||
{
|
{
|
||||||
AVDictionary *opts = nullptr;
|
AVDictionary *opts = nullptr;
|
||||||
AVCodec *videoCodec = nullptr;
|
AVCodec *videoCodec = nullptr;
|
||||||
@ -166,15 +166,14 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
int intRet = -1;
|
int intRet = -1;
|
||||||
char *buffer = nullptr;
|
char *buffer = nullptr;
|
||||||
|
|
||||||
ResetMetaData();
|
resetMetaData();
|
||||||
|
|
||||||
//Identify stream
|
//Identify stream
|
||||||
|
|
||||||
if (avformat_find_stream_info(m_formatCtx, nullptr) < 0)
|
if (avformat_find_stream_info(m_formatCtx, nullptr) < 0)
|
||||||
{
|
{
|
||||||
avformat_close_input(&m_formatCtx);
|
avformat_close_input(&m_formatCtx);
|
||||||
m_formatCtx = nullptr;
|
qDebug() << "DATVideoRender::preprocessStream cannot find stream info";
|
||||||
qDebug() << "DATVideoRender::PreprocessStream cannot find stream info";
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -184,8 +183,7 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
if (intRet < 0)
|
if (intRet < 0)
|
||||||
{
|
{
|
||||||
avformat_close_input(&m_formatCtx);
|
avformat_close_input(&m_formatCtx);
|
||||||
m_formatCtx = nullptr;
|
qDebug() << "DATVideoRender::preprocessStream cannot find video stream";
|
||||||
qDebug() << "DATVideoRender::PreprocessStream cannot find video stream";
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -195,7 +193,7 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
intRet = av_find_best_stream(m_formatCtx, AVMEDIA_TYPE_AUDIO, -1, -1, nullptr, 0);
|
intRet = av_find_best_stream(m_formatCtx, AVMEDIA_TYPE_AUDIO, -1, -1, nullptr, 0);
|
||||||
|
|
||||||
if (intRet < 0) {
|
if (intRet < 0) {
|
||||||
qDebug() << "DATVideoRender::PreprocessStream cannot find audio stream";
|
qDebug() << "DATVideoRender::preprocessStream cannot find audio stream";
|
||||||
}
|
}
|
||||||
|
|
||||||
m_audioStreamIndex = intRet;
|
m_audioStreamIndex = intRet;
|
||||||
@ -211,8 +209,6 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
m_videoDecoderCtx = avcodec_alloc_context3(nullptr);
|
m_videoDecoderCtx = avcodec_alloc_context3(nullptr);
|
||||||
avcodec_parameters_to_context(m_videoDecoderCtx, parms);
|
avcodec_parameters_to_context(m_videoDecoderCtx, parms);
|
||||||
|
|
||||||
// m_videoDecoderCtx = m_formatCtx->streams[m_videoStreamIndex]->codec; // old style
|
|
||||||
|
|
||||||
//Meta Data
|
//Meta Data
|
||||||
|
|
||||||
m_metaData.PID = m_formatCtx->streams[m_videoStreamIndex]->id;
|
m_metaData.PID = m_formatCtx->streams[m_videoStreamIndex]->id;
|
||||||
@ -249,13 +245,12 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
if (videoCodec == nullptr)
|
if (videoCodec == nullptr)
|
||||||
{
|
{
|
||||||
avformat_close_input(&m_formatCtx);
|
avformat_close_input(&m_formatCtx);
|
||||||
m_formatCtx = nullptr;
|
qDebug() << "DATVideoRender::preprocessStream cannot find associated video CODEC";
|
||||||
qDebug() << "DATVideoRender::PreprocessStream cannot find associated video CODEC";
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::PreprocessStream: video CODEC found: " << videoCodec->name;
|
qDebug() << "DATVideoRender::preprocessStream: video CODEC found: " << videoCodec->name;
|
||||||
}
|
}
|
||||||
|
|
||||||
av_dict_set(&opts, "refcounted_frames", "1", 0);
|
av_dict_set(&opts, "refcounted_frames", "1", 0);
|
||||||
@ -263,8 +258,7 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
if (avcodec_open2(m_videoDecoderCtx, videoCodec, &opts) < 0)
|
if (avcodec_open2(m_videoDecoderCtx, videoCodec, &opts) < 0)
|
||||||
{
|
{
|
||||||
avformat_close_input(&m_formatCtx);
|
avformat_close_input(&m_formatCtx);
|
||||||
m_formatCtx = nullptr;
|
qDebug() << "DATVideoRender::preprocessStream cannot open associated video CODEC";
|
||||||
qDebug() << "DATVideoRender::PreprocessStream cannot open associated video CODEC";
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -274,8 +268,7 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
if (!m_frame)
|
if (!m_frame)
|
||||||
{
|
{
|
||||||
avformat_close_input(&m_formatCtx);
|
avformat_close_input(&m_formatCtx);
|
||||||
m_formatCtx = nullptr;
|
qDebug() << "DATVideoRender::preprocessStream cannot allocate frame";
|
||||||
qDebug() << "DATVideoRender::PreprocessStream cannot allocate frame";
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -289,7 +282,7 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
|
|
||||||
QString metaStr;
|
QString metaStr;
|
||||||
m_metaData.formatString(metaStr);
|
m_metaData.formatString(metaStr);
|
||||||
qDebug() << "DATVideoRender::PreprocessStream: video: " << metaStr;
|
qDebug() << "DATVideoRender::preprocessStream: video: " << metaStr;
|
||||||
|
|
||||||
emit onMetaDataChanged(new DataTSMetaData2(m_metaData));
|
emit onMetaDataChanged(new DataTSMetaData2(m_metaData));
|
||||||
|
|
||||||
@ -308,7 +301,7 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
|
|
||||||
//m_audioDecoderCtx = m_formatCtx->streams[m_audioStreamIndex]->codec; // old style
|
//m_audioDecoderCtx = m_formatCtx->streams[m_audioStreamIndex]->codec; // old style
|
||||||
|
|
||||||
qDebug() << "DATVideoRender::PreprocessStream: audio: "
|
qDebug() << "DATVideoRender::preprocessStream: audio: "
|
||||||
<< " channels: " << m_audioDecoderCtx->channels
|
<< " channels: " << m_audioDecoderCtx->channels
|
||||||
<< " channel_layout: " << m_audioDecoderCtx->channel_layout
|
<< " channel_layout: " << m_audioDecoderCtx->channel_layout
|
||||||
<< " sample_rate: " << m_audioDecoderCtx->sample_rate
|
<< " sample_rate: " << m_audioDecoderCtx->sample_rate
|
||||||
@ -319,16 +312,16 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
|
|
||||||
if (audioCodec == nullptr)
|
if (audioCodec == nullptr)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::PreprocessStream cannot find associated audio CODEC";
|
qDebug() << "DATVideoRender::preprocessStream cannot find associated audio CODEC";
|
||||||
m_audioStreamIndex = -1; // invalidate audio
|
m_audioStreamIndex = -1; // invalidate audio
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::PreprocessStream: audio CODEC found: " << audioCodec->name;
|
qDebug() << "DATVideoRender::preprocessStream: audio CODEC found: " << audioCodec->name;
|
||||||
|
|
||||||
if (avcodec_open2(m_audioDecoderCtx, audioCodec, nullptr) < 0)
|
if (avcodec_open2(m_audioDecoderCtx, audioCodec, nullptr) < 0)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::PreprocessStream cannot open associated audio CODEC";
|
qDebug() << "DATVideoRender::preprocessStream cannot open associated audio CODEC";
|
||||||
m_audioStreamIndex = -1; // invalidate audio
|
m_audioStreamIndex = -1; // invalidate audio
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -341,7 +334,7 @@ bool DATVideoRender::PreprocessStream()
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DATVideoRender::OpenStream(DATVideostream *device)
|
bool DATVideoRender::openStream(DATVideostream *device)
|
||||||
{
|
{
|
||||||
int ioBufferSize = DATVideostream::m_defaultMemoryLimit;
|
int ioBufferSize = DATVideostream::m_defaultMemoryLimit;
|
||||||
unsigned char *ptrIOBuffer = nullptr;
|
unsigned char *ptrIOBuffer = nullptr;
|
||||||
@ -349,13 +342,13 @@ bool DATVideoRender::OpenStream(DATVideostream *device)
|
|||||||
|
|
||||||
if (device == nullptr)
|
if (device == nullptr)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::OpenStream QIODevice is nullptr";
|
qDebug() << "DATVideoRender::openStream QIODevice is nullptr";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (m_isOpen)
|
if (m_isOpen)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::OpenStream already open";
|
qDebug() << "DATVideoRender::openStream already open";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -363,7 +356,7 @@ bool DATVideoRender::OpenStream(DATVideostream *device)
|
|||||||
|
|
||||||
if (device->bytesAvailable() <= 0)
|
if (device->bytesAvailable() <= 0)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::OpenStream no data available";
|
qDebug() << "DATVideoRender::openStream no data available";
|
||||||
m_metaData.OK_Data = false;
|
m_metaData.OK_Data = false;
|
||||||
emit onMetaDataChanged(new DataTSMetaData2(m_metaData));
|
emit onMetaDataChanged(new DataTSMetaData2(m_metaData));
|
||||||
return false;
|
return false;
|
||||||
@ -374,7 +367,7 @@ bool DATVideoRender::OpenStream(DATVideostream *device)
|
|||||||
|
|
||||||
if (!device->open(QIODevice::ReadOnly))
|
if (!device->open(QIODevice::ReadOnly))
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::OpenStream cannot open QIODevice";
|
qDebug() << "DATVideoRender::openStream cannot open QIODevice";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -384,7 +377,7 @@ bool DATVideoRender::OpenStream(DATVideostream *device)
|
|||||||
|
|
||||||
if (m_formatCtx == nullptr)
|
if (m_formatCtx == nullptr)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::OpenStream cannot alloc format FFMPEG context";
|
qDebug() << "DATVideoRender::openStream cannot alloc format FFMPEG context";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -395,9 +388,9 @@ bool DATVideoRender::OpenStream(DATVideostream *device)
|
|||||||
ioBufferSize,
|
ioBufferSize,
|
||||||
0,
|
0,
|
||||||
reinterpret_cast<void *>(device),
|
reinterpret_cast<void *>(device),
|
||||||
&ReadFunction,
|
&DATVideoRender::ReadFunction,
|
||||||
nullptr,
|
nullptr,
|
||||||
&SeekFunction
|
&DATVideoRender::SeekFunction
|
||||||
);
|
);
|
||||||
|
|
||||||
m_formatCtx->pb = ioCtx;
|
m_formatCtx->pb = ioCtx;
|
||||||
@ -405,22 +398,22 @@ bool DATVideoRender::OpenStream(DATVideostream *device)
|
|||||||
|
|
||||||
if (avformat_open_input(&m_formatCtx, nullptr, nullptr, nullptr) < 0)
|
if (avformat_open_input(&m_formatCtx, nullptr, nullptr, nullptr) < 0)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::OpenStream cannot open stream";
|
qDebug() << "DATVideoRender::openStream cannot open stream";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!PreprocessStream())
|
if (!preprocessStream())
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
qDebug("DATVideoRender::OpenStream: successful");
|
qDebug("DATVideoRender::openStream: successful");
|
||||||
m_isOpen = true;
|
m_isOpen = true;
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DATVideoRender::RenderStream()
|
bool DATVideoRender::renderStream()
|
||||||
{
|
{
|
||||||
AVPacket packet;
|
AVPacket packet;
|
||||||
int gotFrame;
|
int gotFrame;
|
||||||
@ -428,17 +421,24 @@ bool DATVideoRender::RenderStream()
|
|||||||
|
|
||||||
if (!m_isOpen)
|
if (!m_isOpen)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::RenderStream Stream not open";
|
qDebug() << "DATVideoRender::renderStream Stream not open";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
//********** Rendering **********
|
//********** Rendering **********
|
||||||
if (av_read_frame(m_formatCtx, &packet) < 0)
|
if (av_read_frame(m_formatCtx, &packet) < 0)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::RenderStream reading packet error";
|
qDebug() << "DATVideoRender::renderStream reading packet error";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (packet.size == 0)
|
||||||
|
{
|
||||||
|
qDebug() << "DATVideoRender::renderStream packet empty";
|
||||||
|
av_packet_unref(&packet);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
//Video channel
|
//Video channel
|
||||||
if ((packet.stream_index == m_videoStreamIndex) && (!m_videoMute))
|
if ((packet.stream_index == m_videoStreamIndex) && (!m_videoMute))
|
||||||
{
|
{
|
||||||
@ -447,7 +447,7 @@ bool DATVideoRender::RenderStream()
|
|||||||
|
|
||||||
gotFrame = 0;
|
gotFrame = 0;
|
||||||
|
|
||||||
if (new_decode(m_videoDecoderCtx, m_frame, &gotFrame, &packet) >= 0)
|
if (newDecode(m_videoDecoderCtx, m_frame, &gotFrame, &packet) >= 0)
|
||||||
{
|
{
|
||||||
m_videoDecodeOK = true;
|
m_videoDecodeOK = true;
|
||||||
|
|
||||||
@ -485,22 +485,24 @@ bool DATVideoRender::RenderStream()
|
|||||||
|
|
||||||
if (sws_init_context(m_swsCtx, nullptr, nullptr) < 0)
|
if (sws_init_context(m_swsCtx, nullptr, nullptr) < 0)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::RenderStream cannont init video data converter";
|
qDebug() << "DATVideoRender::renderStream cannont init video data converter";
|
||||||
m_swsCtx = nullptr;
|
m_swsCtx = nullptr;
|
||||||
|
av_packet_unref(&packet);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((m_currentRenderHeight > 0) && (m_currentRenderWidth > 0))
|
if ((m_currentRenderHeight > 0) && (m_currentRenderWidth > 0))
|
||||||
{
|
{
|
||||||
//av_freep(&m_pbytDecodedData[0]);
|
//av_freep(&m_decodedData[0]);
|
||||||
//av_freep(&m_pintDecodedLineSize[0]);
|
//av_freep(&m_decodedLineSize[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (av_image_alloc(m_pbytDecodedData, m_pintDecodedLineSize, m_frame->width, m_frame->height, AV_PIX_FMT_RGB24, 1) < 0)
|
if (av_image_alloc(m_decodedData, m_decodedLineSize, m_frame->width, m_frame->height, AV_PIX_FMT_RGB24, 1) < 0)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::RenderStream cannont init video image buffer";
|
qDebug() << "DATVideoRender::renderStream cannont init video image buffer";
|
||||||
sws_freeContext(m_swsCtx);
|
sws_freeContext(m_swsCtx);
|
||||||
m_swsCtx = nullptr;
|
m_swsCtx = nullptr;
|
||||||
|
av_packet_unref(&packet);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -521,13 +523,14 @@ bool DATVideoRender::RenderStream()
|
|||||||
|
|
||||||
//Frame rendering
|
//Frame rendering
|
||||||
|
|
||||||
if (sws_scale(m_swsCtx, m_frame->data, m_frame->linesize, 0, m_frame->height, m_pbytDecodedData, m_pintDecodedLineSize) < 0)
|
if (sws_scale(m_swsCtx, m_frame->data, m_frame->linesize, 0, m_frame->height, m_decodedData, m_decodedLineSize) < 0)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::RenderStream error converting video frame to RGB";
|
qDebug() << "DATVideoRender::renderStream error converting video frame to RGB";
|
||||||
|
av_packet_unref(&packet);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
renderImage(m_pbytDecodedData[0]);
|
renderImage(m_decodedData[0]);
|
||||||
av_frame_unref(m_frame);
|
av_frame_unref(m_frame);
|
||||||
m_frameCount++;
|
m_frameCount++;
|
||||||
}
|
}
|
||||||
@ -535,7 +538,7 @@ bool DATVideoRender::RenderStream()
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
m_videoDecodeOK = false;
|
m_videoDecodeOK = false;
|
||||||
// qDebug() << "DATVideoRender::RenderStream video decode error";
|
// qDebug() << "DATVideoRender::renderStream video decode error";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Audio channel
|
// Audio channel
|
||||||
@ -545,7 +548,7 @@ bool DATVideoRender::RenderStream()
|
|||||||
av_frame_unref(m_frame);
|
av_frame_unref(m_frame);
|
||||||
gotFrame = 0;
|
gotFrame = 0;
|
||||||
|
|
||||||
if (new_decode(m_audioDecoderCtx, m_frame, &gotFrame, &packet) >= 0)
|
if (newDecode(m_audioDecoderCtx, m_frame, &gotFrame, &packet) >= 0)
|
||||||
{
|
{
|
||||||
m_audioDecodeOK = true;
|
m_audioDecodeOK = true;
|
||||||
|
|
||||||
@ -555,7 +558,7 @@ bool DATVideoRender::RenderStream()
|
|||||||
av_samples_alloc((uint8_t**) &audioBuffer, nullptr, 2, m_frame->nb_samples, AV_SAMPLE_FMT_S16, 0);
|
av_samples_alloc((uint8_t**) &audioBuffer, nullptr, 2, m_frame->nb_samples, AV_SAMPLE_FMT_S16, 0);
|
||||||
int samples_per_channel = swr_convert(m_audioSWR, (uint8_t**) &audioBuffer, m_frame->nb_samples, (const uint8_t**) m_frame->data, m_frame->nb_samples);
|
int samples_per_channel = swr_convert(m_audioSWR, (uint8_t**) &audioBuffer, m_frame->nb_samples, (const uint8_t**) m_frame->data, m_frame->nb_samples);
|
||||||
if (samples_per_channel < m_frame->nb_samples) {
|
if (samples_per_channel < m_frame->nb_samples) {
|
||||||
qDebug("DATVideoRender::RenderStream: converted samples missing %d/%d returned", samples_per_channel, m_frame->nb_samples);
|
qDebug("DATVideoRender::renderStream: converted samples missing %d/%d returned", samples_per_channel, m_frame->nb_samples);
|
||||||
}
|
}
|
||||||
|
|
||||||
// direct writing:
|
// direct writing:
|
||||||
@ -564,7 +567,7 @@ bool DATVideoRender::RenderStream()
|
|||||||
});
|
});
|
||||||
int ret = m_audioFifo->write((const quint8*) &audioBuffer[0], samples_per_channel);
|
int ret = m_audioFifo->write((const quint8*) &audioBuffer[0], samples_per_channel);
|
||||||
if (ret < samples_per_channel) {
|
if (ret < samples_per_channel) {
|
||||||
// qDebug("DATVideoRender::RenderStream: audio samples missing %d/%d written", ret, samples_per_channel);
|
// qDebug("DATVideoRender::renderStream: audio samples missing %d/%d written", ret, samples_per_channel);
|
||||||
}
|
}
|
||||||
|
|
||||||
// buffered writing:
|
// buffered writing:
|
||||||
@ -582,7 +585,7 @@ bool DATVideoRender::RenderStream()
|
|||||||
// });
|
// });
|
||||||
// int ret = m_audioFifo->write((const quint8*) &m_audioFifoBuffer[0], m_audioFifoBufferSize);
|
// int ret = m_audioFifo->write((const quint8*) &m_audioFifoBuffer[0], m_audioFifoBufferSize);
|
||||||
// if (ret < m_audioFifoBufferSize) {
|
// if (ret < m_audioFifoBufferSize) {
|
||||||
// qDebug("DATVideoRender::RenderStream: audio samples missing %d/%d written", ret, m_audioFifoBufferSize);
|
// qDebug("DATVideoRender::renderStream: audio samples missing %d/%d written", ret, m_audioFifoBufferSize);
|
||||||
// }
|
// }
|
||||||
// std::copy(&audioBuffer[2*remainder], &audioBuffer[2*samples_per_channel], &m_audioFifoBuffer[0]);
|
// std::copy(&audioBuffer[2*remainder], &audioBuffer[2*samples_per_channel], &m_audioFifoBuffer[0]);
|
||||||
// m_audioFifoBufferIndex = samples_per_channel - remainder;
|
// m_audioFifoBufferIndex = samples_per_channel - remainder;
|
||||||
@ -594,7 +597,7 @@ bool DATVideoRender::RenderStream()
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
m_audioDecodeOK = false;
|
m_audioDecodeOK = false;
|
||||||
// qDebug("DATVideoRender::RenderStream: audio decode error");
|
// qDebug("DATVideoRender::renderStream: audio decode error");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -639,48 +642,40 @@ void DATVideoRender::setResampler()
|
|||||||
<< " out_sample_fmt: " << AV_SAMPLE_FMT_S16;
|
<< " out_sample_fmt: " << AV_SAMPLE_FMT_S16;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DATVideoRender::CloseStream(QIODevice *device)
|
bool DATVideoRender::closeStream(QIODevice *device)
|
||||||
{
|
{
|
||||||
qDebug("DATVideoRender::CloseStream");
|
qDebug("DATVideoRender::closeStream");
|
||||||
|
|
||||||
if (!device)
|
if (!device)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::CloseStream QIODevice is nullptr";
|
qDebug() << "DATVideoRender::closeStream QIODevice is nullptr";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!m_isOpen)
|
if (!m_isOpen)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::CloseStream Stream not open";
|
qDebug() << "DATVideoRender::closeStream Stream not open";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!m_formatCtx)
|
if (!m_formatCtx)
|
||||||
{
|
{
|
||||||
qDebug() << "DATVideoRender::CloseStream FFMEG Context is not initialized";
|
qDebug() << "DATVideoRender::closeStream FFMEG Context is not initialized";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
avformat_close_input(&m_formatCtx);
|
avformat_close_input(&m_formatCtx);
|
||||||
m_formatCtx=nullptr;
|
|
||||||
|
|
||||||
if (m_videoDecoderCtx)
|
if (m_videoDecoderCtx) {
|
||||||
{
|
avcodec_free_context(&m_videoDecoderCtx);
|
||||||
avcodec_close(m_videoDecoderCtx);
|
|
||||||
m_videoDecoderCtx = nullptr;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (m_audioDecoderCtx)
|
if (m_audioDecoderCtx) {
|
||||||
{
|
|
||||||
avcodec_free_context(&m_audioDecoderCtx);
|
avcodec_free_context(&m_audioDecoderCtx);
|
||||||
avcodec_close(m_audioDecoderCtx);
|
|
||||||
m_audioDecoderCtx = nullptr;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (m_audioSWR)
|
if (m_audioSWR) {
|
||||||
{
|
|
||||||
swr_free(&m_audioSWR);
|
swr_free(&m_audioSWR);
|
||||||
m_audioSWR = nullptr;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (m_frame)
|
if (m_frame)
|
||||||
@ -700,7 +695,7 @@ bool DATVideoRender::CloseStream(QIODevice *device)
|
|||||||
m_currentRenderWidth = -1;
|
m_currentRenderWidth = -1;
|
||||||
m_currentRenderHeight = -1;
|
m_currentRenderHeight = -1;
|
||||||
|
|
||||||
ResetMetaData();
|
resetMetaData();
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -709,7 +704,7 @@ bool DATVideoRender::CloseStream(QIODevice *device)
|
|||||||
* Replacement of deprecated avcodec_decode_video2 with the same signature
|
* Replacement of deprecated avcodec_decode_video2 with the same signature
|
||||||
* https://blogs.gentoo.org/lu_zero/2016/03/29/new-avcodec-api/
|
* https://blogs.gentoo.org/lu_zero/2016/03/29/new-avcodec-api/
|
||||||
*/
|
*/
|
||||||
int DATVideoRender::new_decode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt)
|
int DATVideoRender::newDecode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt)
|
||||||
{
|
{
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
|
@ -110,12 +110,12 @@ class DATVideoRender : public TVScreen
|
|||||||
explicit DATVideoRender(QWidget *parent);
|
explicit DATVideoRender(QWidget *parent);
|
||||||
~DATVideoRender();
|
~DATVideoRender();
|
||||||
|
|
||||||
void SetFullScreen(bool blnFullScreen);
|
void setFullScreen(bool blnFullScreen);
|
||||||
|
|
||||||
void setAudioFIFO(AudioFifo *fifo) { m_audioFifo = fifo; }
|
void setAudioFIFO(AudioFifo *fifo) { m_audioFifo = fifo; }
|
||||||
bool OpenStream(DATVideostream *objDevice);
|
bool openStream(DATVideostream *objDevice);
|
||||||
bool RenderStream();
|
bool renderStream();
|
||||||
bool CloseStream(QIODevice *objDevice);
|
bool closeStream(QIODevice *objDevice);
|
||||||
|
|
||||||
int getVideoStreamIndex() const { return m_videoStreamIndex; }
|
int getVideoStreamIndex() const { return m_videoStreamIndex; }
|
||||||
int getAudioStreamIndex() const { return m_audioStreamIndex; }
|
int getAudioStreamIndex() const { return m_audioStreamIndex; }
|
||||||
@ -128,16 +128,12 @@ class DATVideoRender : public TVScreen
|
|||||||
bool getVideoDecodeOK() const { return m_videoDecodeOK; }
|
bool getVideoDecodeOK() const { return m_videoDecodeOK; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
struct DataTSMetaData2 m_metaData;
|
struct DataTSMetaData2 m_metaData;
|
||||||
QWidget *m_parentWidget;
|
QWidget *m_parentWidget;
|
||||||
Qt::WindowFlags m_originalWindowFlags;
|
Qt::WindowFlags m_originalWindowFlags;
|
||||||
QSize m_originalSize;
|
QSize m_originalSize;
|
||||||
|
|
||||||
bool m_isFullScreen;
|
bool m_isFullScreen;
|
||||||
|
|
||||||
bool m_isOpen;
|
bool m_isOpen;
|
||||||
|
|
||||||
SwsContext *m_swsCtx;
|
SwsContext *m_swsCtx;
|
||||||
AVFormatContext *m_formatCtx;
|
AVFormatContext *m_formatCtx;
|
||||||
AVCodecContext *m_videoDecoderCtx;
|
AVCodecContext *m_videoDecoderCtx;
|
||||||
@ -153,8 +149,8 @@ class DATVideoRender : public TVScreen
|
|||||||
bool m_videoMute;
|
bool m_videoMute;
|
||||||
float m_audioVolume;
|
float m_audioVolume;
|
||||||
|
|
||||||
uint8_t *m_pbytDecodedData[4];
|
uint8_t *m_decodedData[4];
|
||||||
int m_pintDecodedLineSize[4];
|
int m_decodedLineSize[4];
|
||||||
|
|
||||||
int m_frameCount;
|
int m_frameCount;
|
||||||
int m_videoStreamIndex;
|
int m_videoStreamIndex;
|
||||||
@ -166,10 +162,13 @@ class DATVideoRender : public TVScreen
|
|||||||
bool m_audioDecodeOK;
|
bool m_audioDecodeOK;
|
||||||
bool m_videoDecodeOK;
|
bool m_videoDecodeOK;
|
||||||
|
|
||||||
bool PreprocessStream();
|
static int ReadFunction(void *opaque, uint8_t *buf, int buf_size);
|
||||||
void ResetMetaData();
|
static int64_t SeekFunction(void *opaque, int64_t offset, int whence);
|
||||||
|
|
||||||
int new_decode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt);
|
bool preprocessStream();
|
||||||
|
void resetMetaData();
|
||||||
|
|
||||||
|
int newDecode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt);
|
||||||
void setResampler();
|
void setResampler();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
@ -214,7 +213,7 @@ class DATVideoRenderThread : public QThread
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
m_renderingVideo = m_renderer->OpenStream(m_stream);
|
m_renderingVideo = m_renderer->openStream(m_stream);
|
||||||
|
|
||||||
if (!m_renderingVideo) {
|
if (!m_renderingVideo) {
|
||||||
return;
|
return;
|
||||||
@ -222,12 +221,12 @@ class DATVideoRenderThread : public QThread
|
|||||||
|
|
||||||
while ((m_renderingVideo == true) && (m_renderer))
|
while ((m_renderingVideo == true) && (m_renderer))
|
||||||
{
|
{
|
||||||
if (!m_renderer->RenderStream()) {
|
if (!m_renderer->renderStream()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
m_renderer->CloseStream(m_stream);
|
m_renderer->closeStream(m_stream);
|
||||||
m_renderingVideo = false;
|
m_renderingVideo = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -20,51 +20,51 @@
|
|||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
|
|
||||||
DATVideostream::DATVideostream():
|
DATVideostream::DATVideostream():
|
||||||
m_objMutex(QMutex::NonRecursive)
|
m_mutex(QMutex::NonRecursive)
|
||||||
{
|
{
|
||||||
cleanUp();
|
cleanUp();
|
||||||
m_intTotalReceived = 0;
|
m_totalReceived = 0;
|
||||||
m_intPacketReceived = 0;
|
m_packetReceived = 0;
|
||||||
m_intMemoryLimit = m_defaultMemoryLimit;
|
m_memoryLimit = m_defaultMemoryLimit;
|
||||||
m_multiThreaded = false;
|
m_multiThreaded = false;
|
||||||
m_threadTimeout = -1;
|
m_threadTimeout = -1;
|
||||||
|
|
||||||
m_objeventLoop.connect(this,SIGNAL(onDataAvailable()), &m_objeventLoop, SLOT(quit()),Qt::QueuedConnection);
|
m_eventLoop.connect(this, SIGNAL(dataAvailable()), &m_eventLoop, SLOT(quit()), Qt::QueuedConnection);
|
||||||
}
|
}
|
||||||
|
|
||||||
DATVideostream::~DATVideostream()
|
DATVideostream::~DATVideostream()
|
||||||
{
|
{
|
||||||
m_objeventLoop.disconnect(this,SIGNAL(onDataAvailable()), &m_objeventLoop, SLOT(quit()));
|
m_eventLoop.disconnect(this, SIGNAL(dataAvailable()), &m_eventLoop, SLOT(quit()));
|
||||||
cleanUp();
|
cleanUp();
|
||||||
}
|
}
|
||||||
|
|
||||||
void DATVideostream::cleanUp()
|
void DATVideostream::cleanUp()
|
||||||
{
|
{
|
||||||
if (m_objFIFO.size() > 0) {
|
if (m_fifo.size() > 0) {
|
||||||
m_objFIFO.clear();
|
m_fifo.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (m_objeventLoop.isRunning()) {
|
if (m_eventLoop.isRunning()) {
|
||||||
m_objeventLoop.exit();
|
m_eventLoop.exit();
|
||||||
}
|
}
|
||||||
|
|
||||||
m_intBytesAvailable = 0;
|
m_bytesAvailable = 0;
|
||||||
m_intBytesWaiting = 0;
|
m_bytesWaiting = 0;
|
||||||
m_intPercentBuffer = 0;
|
m_percentBuffer = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DATVideostream::resetTotalReceived()
|
void DATVideostream::resetTotalReceived()
|
||||||
{
|
{
|
||||||
m_intTotalReceived = 0;
|
m_totalReceived = 0;
|
||||||
emit onDataPackets(&m_intBytesWaiting, &m_intPercentBuffer, &m_intTotalReceived);
|
emit fifoData(&m_bytesWaiting, &m_percentBuffer, &m_totalReceived);
|
||||||
}
|
}
|
||||||
|
|
||||||
void DATVideostream::setMultiThreaded(bool multiThreaded)
|
void DATVideostream::setMultiThreaded(bool multiThreaded)
|
||||||
{
|
{
|
||||||
if (multiThreaded)
|
if (multiThreaded)
|
||||||
{
|
{
|
||||||
if (m_objeventLoop.isRunning()) {
|
if (m_eventLoop.isRunning()) {
|
||||||
m_objeventLoop.exit();
|
m_eventLoop.exit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,34 +77,30 @@ int DATVideostream::pushData(const char * chrData, int intSize)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
m_objMutex.lock();
|
m_mutex.lock();
|
||||||
|
|
||||||
m_intPacketReceived++;
|
m_packetReceived++;
|
||||||
m_intBytesWaiting += intSize;
|
m_bytesWaiting += intSize;
|
||||||
|
|
||||||
if (m_intBytesWaiting > m_intMemoryLimit) {
|
if (m_bytesWaiting > m_memoryLimit) {
|
||||||
m_intBytesWaiting -= m_objFIFO.dequeue().size();
|
m_bytesWaiting -= m_fifo.dequeue().size();
|
||||||
}
|
}
|
||||||
|
|
||||||
m_objFIFO.enqueue(QByteArray(chrData,intSize));
|
m_fifo.enqueue(QByteArray(chrData,intSize));
|
||||||
m_intBytesAvailable = m_objFIFO.head().size();
|
m_bytesAvailable = m_fifo.head().size();
|
||||||
m_intTotalReceived += intSize;
|
m_totalReceived += intSize;
|
||||||
|
|
||||||
m_objMutex.unlock();
|
m_mutex.unlock();
|
||||||
|
|
||||||
if (m_objeventLoop.isRunning()) {
|
if (m_eventLoop.isRunning()) {
|
||||||
emit onDataAvailable();
|
emit dataAvailable();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (m_intPacketReceived % m_minStackSize == 1)
|
m_percentBuffer = (100*m_bytesWaiting) / m_memoryLimit;
|
||||||
{
|
m_percentBuffer = m_percentBuffer > 100 ? 100 : m_percentBuffer;
|
||||||
m_intPercentBuffer = (100*m_intBytesWaiting)/m_intMemoryLimit;
|
|
||||||
|
|
||||||
if (m_intPercentBuffer > 100) {
|
if (m_packetReceived % 10 == 1) {
|
||||||
m_intPercentBuffer = 100;
|
emit fifoData(&m_bytesWaiting, &m_percentBuffer, &m_totalReceived);
|
||||||
}
|
|
||||||
|
|
||||||
emit onDataPackets(&m_intBytesWaiting, &m_intPercentBuffer, &m_intTotalReceived);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return intSize;
|
return intSize;
|
||||||
@ -117,7 +113,7 @@ bool DATVideostream::isSequential() const
|
|||||||
|
|
||||||
qint64 DATVideostream::bytesAvailable() const
|
qint64 DATVideostream::bytesAvailable() const
|
||||||
{
|
{
|
||||||
return m_intBytesAvailable;
|
return m_bytesAvailable;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DATVideostream::close()
|
void DATVideostream::close()
|
||||||
@ -136,40 +132,38 @@ bool DATVideostream::open(OpenMode mode)
|
|||||||
|
|
||||||
qint64 DATVideostream::readData(char *data, qint64 len)
|
qint64 DATVideostream::readData(char *data, qint64 len)
|
||||||
{
|
{
|
||||||
QByteArray objCurrentArray;
|
QByteArray currentArray;
|
||||||
int intEffectiveLen = 0;
|
int effectiveLen = 0;
|
||||||
int intExpectedLen = 0;
|
int expectedLen = (int) len;
|
||||||
int intThreadLoop = 0;
|
int threadLoop = 0;
|
||||||
|
|
||||||
intExpectedLen = (int) len;
|
if (expectedLen <= 0) {
|
||||||
|
|
||||||
if (intExpectedLen <= 0) {
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (m_objeventLoop.isRunning()) {
|
if (m_eventLoop.isRunning()) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
m_objMutex.lock();
|
m_mutex.lock();
|
||||||
|
|
||||||
//DATA in FIFO ? -> Waiting for DATA
|
//DATA in FIFO ? -> Waiting for DATA
|
||||||
if ((m_objFIFO.isEmpty()) || (m_objFIFO.count() < m_minStackSize))
|
if ((m_fifo.isEmpty()) || (m_fifo.count() < m_minStackSize))
|
||||||
{
|
{
|
||||||
m_objMutex.unlock();
|
m_mutex.unlock();
|
||||||
|
|
||||||
if (m_multiThreaded == true)
|
if (m_multiThreaded == true)
|
||||||
{
|
{
|
||||||
intThreadLoop=0;
|
threadLoop = 0;
|
||||||
|
|
||||||
while ((m_objFIFO.isEmpty()) || (m_objFIFO.count() < m_minStackSize))
|
while ((m_fifo.isEmpty()) || (m_fifo.count() < m_minStackSize))
|
||||||
{
|
{
|
||||||
QThread::msleep(5);
|
QThread::msleep(5);
|
||||||
intThreadLoop++;
|
threadLoop++;
|
||||||
|
|
||||||
if (m_threadTimeout >= 0)
|
if (m_threadTimeout >= 0)
|
||||||
{
|
{
|
||||||
if (intThreadLoop*5 > m_threadTimeout) {
|
if (threadLoop*5 > m_threadTimeout) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -177,42 +171,53 @@ qint64 DATVideostream::readData(char *data, qint64 len)
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
m_objeventLoop.exec();
|
m_eventLoop.exec();
|
||||||
}
|
}
|
||||||
|
|
||||||
m_objMutex.lock();
|
m_mutex.lock();
|
||||||
}
|
}
|
||||||
|
|
||||||
//Read DATA
|
//Read DATA
|
||||||
intEffectiveLen = m_objFIFO.head().size();
|
effectiveLen = m_fifo.head().size();
|
||||||
|
|
||||||
if (intExpectedLen < intEffectiveLen)
|
if (expectedLen < effectiveLen)
|
||||||
{
|
{
|
||||||
//Partial Read
|
//Partial Read
|
||||||
objCurrentArray = m_objFIFO.head();
|
currentArray = m_fifo.head();
|
||||||
memcpy((void *)data,objCurrentArray.constData(),intExpectedLen);
|
std::copy(
|
||||||
m_objFIFO.head().remove(0,intExpectedLen);
|
currentArray.constData(),
|
||||||
intEffectiveLen = intExpectedLen;
|
currentArray.constData() + expectedLen,
|
||||||
m_intBytesWaiting -= intExpectedLen;
|
data
|
||||||
|
);
|
||||||
|
m_fifo.head().remove(0, expectedLen);
|
||||||
|
effectiveLen = expectedLen;
|
||||||
|
m_bytesWaiting -= expectedLen;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
//Complete Read
|
//Complete Read
|
||||||
objCurrentArray = m_objFIFO.dequeue();
|
currentArray = m_fifo.dequeue();
|
||||||
memcpy((void *)data,objCurrentArray.constData(),intEffectiveLen);
|
std::copy(
|
||||||
m_intBytesWaiting -= intEffectiveLen;
|
currentArray.constData(),
|
||||||
|
currentArray.constData() + effectiveLen,
|
||||||
|
data
|
||||||
|
);
|
||||||
|
m_bytesWaiting -= effectiveLen;
|
||||||
}
|
}
|
||||||
|
|
||||||
m_intPercentBuffer = (100*m_intBytesWaiting) / m_intMemoryLimit;
|
m_percentBuffer = (100*m_bytesWaiting) / m_memoryLimit;
|
||||||
|
m_percentBuffer = m_percentBuffer > 100 ? 100 : m_percentBuffer;
|
||||||
|
|
||||||
emit onDataPackets(&m_intBytesWaiting, &m_intPercentBuffer, &m_intTotalReceived);
|
if (m_packetReceived % 10 == 0) {
|
||||||
|
emit fifoData(&m_bytesWaiting, &m_percentBuffer, &m_totalReceived);
|
||||||
|
}
|
||||||
|
|
||||||
//Next available DATA
|
//Next available DATA
|
||||||
m_intBytesAvailable = m_objFIFO.head().size();
|
m_bytesAvailable = m_fifo.head().size();
|
||||||
|
|
||||||
m_objMutex.unlock();
|
m_mutex.unlock();
|
||||||
|
|
||||||
return (qint64)intEffectiveLen;
|
return (qint64) effectiveLen;
|
||||||
}
|
}
|
||||||
|
|
||||||
qint64 DATVideostream::writeData(const char *data, qint64 len)
|
qint64 DATVideostream::writeData(const char *data, qint64 len)
|
||||||
|
@ -32,10 +32,7 @@ class DATVideostream : public QIODevice
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
DATVideostream();
|
DATVideostream();
|
||||||
~DATVideostream();
|
virtual ~DATVideostream();
|
||||||
|
|
||||||
static const int m_defaultMemoryLimit = 2820000;
|
|
||||||
static const int m_minStackSize = 4;
|
|
||||||
|
|
||||||
int pushData(const char * chrData, int intSize);
|
int pushData(const char * chrData, int intSize);
|
||||||
void resetTotalReceived();
|
void resetTotalReceived();
|
||||||
@ -48,31 +45,31 @@ public:
|
|||||||
virtual void close();
|
virtual void close();
|
||||||
virtual bool open(OpenMode mode);
|
virtual bool open(OpenMode mode);
|
||||||
|
|
||||||
QQueue<QByteArray> m_objFIFO;
|
static const int m_defaultMemoryLimit = 2820000;
|
||||||
|
static const int m_minStackSize = 4;
|
||||||
|
|
||||||
signals:
|
signals:
|
||||||
|
void dataAvailable();
|
||||||
void onDataAvailable();
|
void fifoData(int *intDataBytes, int *intPercentBuffer, qint64 *intTotalReceived);
|
||||||
void onDataPackets(int *intDataBytes, int *intPercentBuffer,qint64 *intTotalReceived);
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
virtual qint64 readData(char *data, qint64 len);
|
virtual qint64 readData(char *data, qint64 len);
|
||||||
virtual qint64 writeData(const char *data, qint64 len);
|
virtual qint64 writeData(const char *data, qint64 len);
|
||||||
virtual qint64 readLineData(char *data, qint64 maxSize);
|
virtual qint64 readLineData(char *data, qint64 maxSize);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
QQueue<QByteArray> m_fifo;
|
||||||
bool m_multiThreaded;
|
bool m_multiThreaded;
|
||||||
int m_threadTimeout;
|
int m_threadTimeout;
|
||||||
|
|
||||||
QEventLoop m_objeventLoop;
|
QEventLoop m_eventLoop;
|
||||||
QMutex m_objMutex;
|
QMutex m_mutex;
|
||||||
int m_intMemoryLimit;
|
int m_memoryLimit;
|
||||||
int m_intBytesAvailable;
|
int m_bytesAvailable;
|
||||||
int m_intBytesWaiting;
|
int m_bytesWaiting;
|
||||||
int m_intPercentBuffer;
|
int m_percentBuffer;
|
||||||
qint64 m_intTotalReceived;
|
qint64 m_totalReceived;
|
||||||
qint64 m_intPacketReceived;
|
qint64 m_packetReceived;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user