mirror of
https://github.com/f4exb/sdrangel.git
synced 2024-11-25 01:18:38 -05:00
DATV demod: process video and audio mute buttons coloring the same way. Make audio voulme log
This commit is contained in:
parent
2d6730678d
commit
af181914e1
3
debian/changelog
vendored
3
debian/changelog
vendored
@ -1,11 +1,12 @@
|
||||
sdrangel (4.5.1-1) unstable; urgency=medium
|
||||
|
||||
* DATV demod: implemented audio processing
|
||||
* Perseus: fixed cold start flawed init sequence
|
||||
* PlutoSDR: recognize networked devices
|
||||
* FreeDV: internal FreeDV library to facilitate packaging
|
||||
* Fixed some constness in qrtplib
|
||||
|
||||
-- Edouard Griffiths, F4EXB <f4exb06@gmail.com> Sun, 17 Mar 2019 20:14:18 +0100
|
||||
-- Edouard Griffiths, F4EXB <f4exb06@gmail.com> Fri, 22 Mar 2019 20:14:18 +0100
|
||||
|
||||
sdrangel (4.5.0-1) unstable; urgency=medium
|
||||
|
||||
|
@ -124,6 +124,33 @@ bool DATVDemod::audioActive()
|
||||
}
|
||||
}
|
||||
|
||||
bool DATVDemod::videoActive()
|
||||
{
|
||||
if (m_objRegisteredVideoRender) {
|
||||
return m_objRegisteredVideoRender->getVideoStreamIndex() >= 0;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool DATVDemod::audioDecodeOK()
|
||||
{
|
||||
if (m_objRegisteredVideoRender) {
|
||||
return m_objRegisteredVideoRender->getAudioDecodeOK();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool DATVDemod::videoDecodeOK()
|
||||
{
|
||||
if (m_objRegisteredVideoRender) {
|
||||
return m_objRegisteredVideoRender->getVideoDecodeOK();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool DATVDemod::PlayVideo(bool blnStartStop)
|
||||
{
|
||||
|
||||
|
@ -147,6 +147,9 @@ public:
|
||||
bool SetTVScreen(TVScreen *objScreen);
|
||||
DATVideostream * SetVideoRender(DATVideoRender *objScreen);
|
||||
bool audioActive();
|
||||
bool audioDecodeOK();
|
||||
bool videoActive();
|
||||
bool videoDecodeOK();
|
||||
|
||||
bool PlayVideo(bool blnStartStop);
|
||||
|
||||
|
@ -404,12 +404,32 @@ void DATVDemodGUI::tick()
|
||||
ui->lblRate->setText(QString("Speed: %1b/s").arg(formatBytes(m_intLastSpeed)));
|
||||
}
|
||||
|
||||
if (m_objDATVDemod->audioActive()) {
|
||||
ui->audioMute->setStyleSheet("QToolButton { background-color : green; }");
|
||||
} else {
|
||||
if (m_objDATVDemod->audioActive())
|
||||
{
|
||||
if (m_objDATVDemod->audioDecodeOK()) {
|
||||
ui->audioMute->setStyleSheet("QToolButton { background-color : green; }");
|
||||
} else {
|
||||
ui->audioMute->setStyleSheet("QToolButton { background-color : red; }");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
ui->audioMute->setStyleSheet("QToolButton { background:rgb(79,79,79); }");
|
||||
}
|
||||
|
||||
if (m_objDATVDemod->videoActive())
|
||||
{
|
||||
if (m_objDATVDemod->videoDecodeOK()) {
|
||||
ui->videoMute->setStyleSheet("QToolButton { background-color : green; }");
|
||||
} else {
|
||||
ui->videoMute->setStyleSheet("QToolButton { background-color : red; }");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
ui->videoMute->setStyleSheet("QToolButton { background:rgb(79,79,79); }");
|
||||
}
|
||||
|
||||
m_intPreviousDecodedData = m_intLastDecodedData;
|
||||
|
||||
//Try to start video rendering
|
||||
@ -675,12 +695,6 @@ void DATVDemodGUI::on_StreamMetaDataChanged(DataTSMetaData2 *objMetaData)
|
||||
ui->chkVS->setChecked(objMetaData->OK_VideoStream);
|
||||
ui->chkDecoding->setChecked(objMetaData->OK_Decoding);
|
||||
|
||||
if (objMetaData->OK_Decoding) {
|
||||
ui->videoMute->setStyleSheet("QToolButton { background-color : green; }");
|
||||
} else {
|
||||
ui->videoMute->setStyleSheet("QToolButton { background:rgb(79,79,79); }");
|
||||
}
|
||||
|
||||
if (objMetaData->Height > 0) {
|
||||
ui->screenTV_2->setFixedWidth((int)objMetaData->Width*(270.0f/(float)objMetaData->Height));
|
||||
}
|
||||
|
@ -54,6 +54,9 @@ DATVideoRender::DATVideoRender(QWidget *parent) : TVScreen(true, parent)
|
||||
m_frame = nullptr;
|
||||
m_frameCount = -1;
|
||||
|
||||
m_audioDecodeOK = false;
|
||||
m_videoDecodeOK = false;
|
||||
|
||||
// for (int i = 0; i < m_audioFifoBufferSize; i++)
|
||||
// {
|
||||
// m_audioFifoBuffer[2*i] = 8192.0f * sin((M_PI * i)/(m_audioFifoBufferSize/1000.0f));
|
||||
@ -487,95 +490,98 @@ bool DATVideoRender::RenderStream()
|
||||
|
||||
gotFrame = 0;
|
||||
|
||||
if (new_decode(m_videoDecoderCtx, m_frame, &gotFrame, &packet) < 0)
|
||||
if (new_decode(m_videoDecoderCtx, m_frame, &gotFrame, &packet) >= 0)
|
||||
{
|
||||
qDebug() << "DATVideoProcess::RenderStream decoding video packet error";
|
||||
m_running = false;
|
||||
return false;
|
||||
m_videoDecodeOK = true;
|
||||
|
||||
if (gotFrame)
|
||||
{
|
||||
//Rendering and RGB Converter setup
|
||||
needRenderingSetup = (m_frameCount == 0);
|
||||
needRenderingSetup |= (m_swsCtx == nullptr);
|
||||
|
||||
if ((m_currentRenderWidth != m_frame->width) || (m_currentRenderHeight != m_frame->height))
|
||||
{
|
||||
needRenderingSetup = true;
|
||||
}
|
||||
|
||||
if (needRenderingSetup)
|
||||
{
|
||||
if (m_swsCtx != nullptr)
|
||||
{
|
||||
sws_freeContext(m_swsCtx);
|
||||
m_swsCtx = nullptr;
|
||||
}
|
||||
|
||||
//Convertisseur YUV -> RGB
|
||||
m_swsCtx = sws_alloc_context();
|
||||
|
||||
av_opt_set_int(m_swsCtx, "srcw", m_frame->width, 0);
|
||||
av_opt_set_int(m_swsCtx, "srch", m_frame->height, 0);
|
||||
av_opt_set_int(m_swsCtx, "src_format", m_frame->format, 0);
|
||||
|
||||
av_opt_set_int(m_swsCtx, "dstw", m_frame->width, 0);
|
||||
av_opt_set_int(m_swsCtx, "dsth", m_frame->height, 0);
|
||||
av_opt_set_int(m_swsCtx, "dst_format", AV_PIX_FMT_RGB24, 0);
|
||||
|
||||
av_opt_set_int(m_swsCtx, "sws_flag", SWS_FAST_BILINEAR /* SWS_BICUBIC*/, 0);
|
||||
|
||||
if (sws_init_context(m_swsCtx, nullptr, nullptr) < 0)
|
||||
{
|
||||
qDebug() << "DATVideoProcess::RenderStream cannont init video data converter";
|
||||
m_swsCtx = nullptr;
|
||||
m_running = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((m_currentRenderHeight > 0) && (m_currentRenderWidth > 0))
|
||||
{
|
||||
//av_freep(&m_pbytDecodedData[0]);
|
||||
//av_freep(&m_pintDecodedLineSize[0]);
|
||||
}
|
||||
|
||||
if (av_image_alloc(m_pbytDecodedData, m_pintDecodedLineSize, m_frame->width, m_frame->height, AV_PIX_FMT_RGB24, 1) < 0)
|
||||
{
|
||||
qDebug() << "DATVideoProcess::RenderStream cannont init video image buffer";
|
||||
sws_freeContext(m_swsCtx);
|
||||
m_swsCtx = nullptr;
|
||||
m_running = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
//Rendering device setup
|
||||
|
||||
resizeTVScreen(m_frame->width, m_frame->height);
|
||||
update();
|
||||
resetImage();
|
||||
|
||||
m_currentRenderWidth = m_frame->width;
|
||||
m_currentRenderHeight = m_frame->height;
|
||||
|
||||
MetaData.Width = m_frame->width;
|
||||
MetaData.Height = m_frame->height;
|
||||
MetaData.OK_Decoding = true;
|
||||
emit onMetaDataChanged(&MetaData);
|
||||
}
|
||||
|
||||
//Frame rendering
|
||||
|
||||
if (sws_scale(m_swsCtx, m_frame->data, m_frame->linesize, 0, m_frame->height, m_pbytDecodedData, m_pintDecodedLineSize) < 0)
|
||||
{
|
||||
qDebug() << "DATVideoProcess::RenderStream error converting video frame to RGB";
|
||||
m_running = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
renderImage(m_pbytDecodedData[0]);
|
||||
av_frame_unref(m_frame);
|
||||
m_frameCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (gotFrame)
|
||||
else
|
||||
{
|
||||
//Rendering and RGB Converter setup
|
||||
needRenderingSetup = (m_frameCount == 0);
|
||||
needRenderingSetup |= (m_swsCtx == nullptr);
|
||||
|
||||
if ((m_currentRenderWidth != m_frame->width) || (m_currentRenderHeight != m_frame->height))
|
||||
{
|
||||
needRenderingSetup = true;
|
||||
}
|
||||
|
||||
if (needRenderingSetup)
|
||||
{
|
||||
if (m_swsCtx != nullptr)
|
||||
{
|
||||
sws_freeContext(m_swsCtx);
|
||||
m_swsCtx = nullptr;
|
||||
}
|
||||
|
||||
//Convertisseur YUV -> RGB
|
||||
m_swsCtx = sws_alloc_context();
|
||||
|
||||
av_opt_set_int(m_swsCtx, "srcw", m_frame->width, 0);
|
||||
av_opt_set_int(m_swsCtx, "srch", m_frame->height, 0);
|
||||
av_opt_set_int(m_swsCtx, "src_format", m_frame->format, 0);
|
||||
|
||||
av_opt_set_int(m_swsCtx, "dstw", m_frame->width, 0);
|
||||
av_opt_set_int(m_swsCtx, "dsth", m_frame->height, 0);
|
||||
av_opt_set_int(m_swsCtx, "dst_format", AV_PIX_FMT_RGB24, 0);
|
||||
|
||||
av_opt_set_int(m_swsCtx, "sws_flag", SWS_FAST_BILINEAR /* SWS_BICUBIC*/, 0);
|
||||
|
||||
if (sws_init_context(m_swsCtx, nullptr, nullptr) < 0)
|
||||
{
|
||||
qDebug() << "DATVideoProcess::RenderStream cannont init video data converter";
|
||||
m_swsCtx = nullptr;
|
||||
m_running = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((m_currentRenderHeight > 0) && (m_currentRenderWidth > 0))
|
||||
{
|
||||
//av_freep(&m_pbytDecodedData[0]);
|
||||
//av_freep(&m_pintDecodedLineSize[0]);
|
||||
}
|
||||
|
||||
if (av_image_alloc(m_pbytDecodedData, m_pintDecodedLineSize, m_frame->width, m_frame->height, AV_PIX_FMT_RGB24, 1) < 0)
|
||||
{
|
||||
qDebug() << "DATVideoProcess::RenderStream cannont init video image buffer";
|
||||
sws_freeContext(m_swsCtx);
|
||||
m_swsCtx = nullptr;
|
||||
m_running = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
//Rendering device setup
|
||||
|
||||
resizeTVScreen(m_frame->width, m_frame->height);
|
||||
update();
|
||||
resetImage();
|
||||
|
||||
m_currentRenderWidth = m_frame->width;
|
||||
m_currentRenderHeight = m_frame->height;
|
||||
|
||||
MetaData.Width = m_frame->width;
|
||||
MetaData.Height = m_frame->height;
|
||||
MetaData.OK_Decoding = true;
|
||||
emit onMetaDataChanged(&MetaData);
|
||||
}
|
||||
|
||||
//Frame rendering
|
||||
|
||||
if (sws_scale(m_swsCtx, m_frame->data, m_frame->linesize, 0, m_frame->height, m_pbytDecodedData, m_pintDecodedLineSize) < 0)
|
||||
{
|
||||
qDebug() << "DATVideoProcess::RenderStream error converting video frame to RGB";
|
||||
m_running = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
renderImage(m_pbytDecodedData[0]);
|
||||
av_frame_unref(m_frame);
|
||||
m_frameCount++;
|
||||
m_videoDecodeOK = false;
|
||||
// qDebug() << "DATVideoProcess::RenderStream video decode error";
|
||||
}
|
||||
}
|
||||
// Audio channel
|
||||
@ -592,8 +598,9 @@ bool DATVideoRender::RenderStream()
|
||||
gotFrame = 0;
|
||||
|
||||
if (new_decode(m_audioDecoderCtx, m_frame, &gotFrame, &packet) >= 0)
|
||||
//if (avcodec_decode_audio4(m_audioDecoderCtx, m_frame, &gotFrame, &packet) >= 0) // old style
|
||||
{
|
||||
m_audioDecodeOK = true;
|
||||
|
||||
if (gotFrame)
|
||||
{
|
||||
int16_t *audioBuffer;
|
||||
@ -639,7 +646,8 @@ bool DATVideoRender::RenderStream()
|
||||
}
|
||||
else
|
||||
{
|
||||
qDebug("DATVideoRender::RenderStream: audio decode error");
|
||||
m_audioDecodeOK = false;
|
||||
// qDebug("DATVideoRender::RenderStream: audio decode error");
|
||||
}
|
||||
}
|
||||
|
||||
@ -648,14 +656,13 @@ bool DATVideoRender::RenderStream()
|
||||
//********** Rendering **********
|
||||
|
||||
m_running = false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void DATVideoRender::setAudioVolume(int audioVolume)
|
||||
{
|
||||
int audioVolumeConstrained = audioVolume < 0 ? 0 : audioVolume > 100 ? 100 : audioVolume;
|
||||
m_audioVolume = audioVolumeConstrained / 100.0f;
|
||||
m_audioVolume = pow(10.0, audioVolumeConstrained*0.02 - 2.0); // .01 -> 1 log
|
||||
}
|
||||
|
||||
void DATVideoRender::setResampler()
|
||||
|
@ -103,6 +103,9 @@ class DATVideoRender : public TVScreen
|
||||
void setVideoMute(bool videoMute) { m_videoMute = videoMute; }
|
||||
void setAudioVolume(int audioVolume);
|
||||
|
||||
bool getAudioDecodeOK() const { return m_audioDecodeOK; }
|
||||
bool getVideoDecodeOK() const { return m_videoDecodeOK; }
|
||||
|
||||
struct DataTSMetaData2 MetaData;
|
||||
|
||||
private:
|
||||
@ -138,6 +141,9 @@ class DATVideoRender : public TVScreen
|
||||
int m_currentRenderWidth;
|
||||
int m_currentRenderHeight;
|
||||
|
||||
bool m_audioDecodeOK;
|
||||
bool m_videoDecodeOK;
|
||||
|
||||
bool InitializeFFMPEG();
|
||||
bool PreprocessStream();
|
||||
void ResetMetaData();
|
||||
|
Loading…
Reference in New Issue
Block a user