ATV Demodulator: improvements of HSync and VSync algorithms. Fixes #459.

This commit is contained in:
Vort 2020-07-19 14:07:04 +03:00
parent 24ca0f0d56
commit 4afd5a7a2d
8 changed files with 222 additions and 184 deletions

View File

@ -243,6 +243,7 @@ ATVDemodGUI::ATVDemodGUI(PluginAPI* objPluginAPI, DeviceUISet *deviceUISet, Base
{
ui->setupUi(this);
ui->screenTV->setColor(false);
ui->screenTV->setExtraColumns(true);
setAttribute(Qt::WA_DeleteOnClose, true);
connect(this, SIGNAL(widgetRolled(QWidget*,bool)), this, SLOT(onWidgetRolled(QWidget*,bool)));

View File

@ -47,7 +47,7 @@ void ATVDemodSettings::resetToDefaults()
m_vSync = false;
m_invertVideo = false;
m_halfFrames = false; // m_fltRatioOfRowsToDisplay = 1.0
m_levelSynchroTop = 0.1f;
m_levelSynchroTop = 0.15f;
m_levelBlack = 0.3f;
m_lineTimeFactor = 0;
m_topTimeFactor = 25;

View File

@ -36,11 +36,8 @@ ATVDemodSink::ATVDemodSink() :
m_scopeSink(nullptr),
m_registeredTVScreen(nullptr),
m_numberSamplesPerHTop(0),
m_imageIndex(0),
m_fieldIndex(0),
m_synchroSamples(0),
m_verticalSynchroDetected(false),
m_ampLineSum(0.0f),
m_ampLineAvg(0.0f),
m_effMin(20.0f),
m_effMax(-20.0f),
m_ampMin(-1.0f),
@ -48,10 +45,13 @@ ATVDemodSink::ATVDemodSink() :
m_ampDelta(2.0f),
m_colIndex(0),
m_sampleIndex(0),
m_sampleIndexDetected(0),
m_hSyncShiftSum(0.0f),
m_hSyncShiftCount(0),
m_hSyncErrorCount(0),
m_amSampleIndex(0),
m_rowIndex(0),
m_lineIndex(0),
m_objAvgColIndex(3),
m_ampAverage(4800),
m_bfoPLL(200/1000000, 100/1000000, 0.01),
m_bfoFilter(200.0, 1000000.0, 0.9),
@ -66,8 +66,6 @@ ATVDemodSink::ATVDemodSink() :
//m_intNumberSamplePerLine=0;
m_synchroSamples=0;
m_interleaved = true;
m_firstRowIndexEven = 0;
m_firstRowIndexOdd = 0;
m_DSBFilter = new fftfilt(m_settings.m_fftBandwidth / (float) m_tvSampleRate, 2*m_ssbFftLen); // arbitrary cutoff
m_DSBFilterBuffer = new Complex[m_ssbFftLen];
@ -348,70 +346,80 @@ void ATVDemodSink::applyStandard(int sampleRate, const ATVDemodSettings& setting
{
case ATVDemodSettings::ATVStdHSkip:
// what is left in a line for the image
m_numberOfSyncLines = 0;
m_interleaved = false; // irrelevant
m_numberOfBlackLines = 0;
m_numberOfEqLines = 0; // not applicable
m_numberSamplesHSyncCrop = (int) (0.09f * lineDuration * sampleRate); // 9% of full line empirically
m_interleaved = false; // irrelevant
m_firstRowIndexEven = 0; // irrelevant
m_firstRowIndexOdd = 0; // irrelevant
break;
case ATVDemodSettings::ATVStdShort:
// what is left in a line for the image
m_numberOfSyncLines = 4;
m_numberOfBlackLines = 5;
m_numberOfEqLines = 0;
m_interleaved = false;
m_numberOfVSyncLines = 2;
m_numberOfBlackLines = 4;
m_firstVisibleLine = 3;
m_numberSamplesHSyncCrop = (int) (0.085f * lineDuration * sampleRate); // 8.5% of full line empirically
m_interleaved = false;
m_firstRowIndexEven = 0; // irrelevant
m_firstRowIndexOdd = 0; // irrelevant
break;
case ATVDemodSettings::ATVStdShortInterleaved:
// what is left in a line for the image
m_numberOfSyncLines = 4;
m_numberOfBlackLines = 7;
m_numberOfEqLines = 0;
m_interleaved = true;
m_numberOfVSyncLines = 2;
m_numberOfBlackLines = 5;
m_firstVisibleLine = 3;
m_numberSamplesHSyncCrop = (int) (0.085f * lineDuration * sampleRate); // 8.5% of full line empirically
m_interleaved = true;
m_firstRowIndexEven = 0;
m_firstRowIndexOdd = 1;
break;
case ATVDemodSettings::ATVStd405: // Follows loosely the 405 lines standard
// what is left in a ine for the image
m_numberOfSyncLines = 24; // (15+7)*2 - 20
m_numberOfBlackLines = 30; // above + 6
m_numberOfEqLines = 3;
// what is left in a line for the image
m_interleaved = true;
m_numberOfVSyncLines = 3;
m_numberOfBlackLines = 30;
m_firstVisibleLine = 13;
m_numberSamplesHSyncCrop = (int) (0.085f * lineDuration * sampleRate); // 8.5% of full line empirically
m_interleaved = true;
m_firstRowIndexEven = 0;
m_firstRowIndexOdd = 3;
break;
case ATVDemodSettings::ATVStdPAL525: // Follows PAL-M standard
// what is left in a 64/1.008 us line for the image
m_numberOfSyncLines = 40; // (15+15)*2 - 20
m_numberOfBlackLines = 46; // above + 6
m_numberOfEqLines = 3;
m_interleaved = true;
m_numberOfVSyncLines = 4;
m_numberOfBlackLines = 45;
m_firstVisibleLine = 20;
m_numberSamplesHSyncCrop = (int) (0.085f * lineDuration * sampleRate); // 8.5% of full line empirically
m_interleaved = true;
m_firstRowIndexEven = 0;
m_firstRowIndexOdd = 3;
break;
case ATVDemodSettings::ATVStdPAL625: // Follows PAL-B/G/H standard
default:
// what is left in a 64 us line for the image
m_numberOfSyncLines = 44; // (15+17)*2 - 20
m_numberOfBlackLines = 50; // above + 6
m_numberOfEqLines = 3;
m_interleaved = true;
m_numberOfVSyncLines = 3;
m_numberOfBlackLines = 49;
m_firstVisibleLine = 23;
m_numberSamplesHSyncCrop = (int) (0.085f * lineDuration * sampleRate); // 8.5% of full line empirically
m_interleaved = true;
m_firstRowIndexEven = 0;
m_firstRowIndexOdd = 3;
}
// for now all standards apply this
m_numberSamplesPerLineSignals = (int) ((12.0f/64.0f) * lineDuration * sampleRate); // 12.0 = 2.6 + 4.7 + 4.7 : front porch + horizontal sync pulse + back porch
m_numberSamplesPerHSync = (int) ((9.6f/64.0f) * lineDuration * sampleRate); // 9.4 = 4.7 + 4.7 : horizontal sync pulse + back porch
m_numberSamplesPerHTopNom = (int) ((4.7f/64.0f) * lineDuration * sampleRate); // 4.7 : horizontal sync pulse (ultra black) nominal value
// Rec. ITU-R BT.1700
// Table 2. Details of line synchronizing signals
m_numberSamplesPerLineSignals = (int)(lineDuration * sampleRate * 12.0 / 64.0); // "a", Line-blanking interval
m_numberSamplesPerHSync = (int)(lineDuration * sampleRate * 10.5 / 64.0); // "b", Interval between time datum and back edge of line-blanking pulse
m_numberSamplesPerHTopNom = (int)(lineDuration * sampleRate * 4.7 / 64.0); // "d", Duration of synchronizing pulse
// Table 3. Details of field synchronizing signals
float hl = 32.0f; // half of the line
float p = 2.35f; // "p", Duration of equalizing pulse
float q = 27.3f; // "q", Duration of field-synchronizing pulse
// In the first half of the first line field index is detected
m_fieldDetectStartPos = (int)(lineDuration * sampleRate * p / 64.0);
m_fieldDetectEndPos = (int)(lineDuration * sampleRate * q / 64.0);
// In the second half of the first line vertical synchronization is detected
m_vSyncDetectStartPos = (int)(lineDuration * sampleRate * (p + hl) / 64.0);
m_vSyncDetectEndPos = (int)(lineDuration * sampleRate * (q + hl) / 64.0);
float fieldDetectPercent = 0.75f; // It is better not to detect field index than detect it wrong
float detectTotalLen = lineDuration * sampleRate * (q - p) / 64.0; // same for field index and vSync detection
m_fieldDetectThreshold1 = (int)(detectTotalLen * fieldDetectPercent);
m_fieldDetectThreshold2 = (int)(detectTotalLen * (1.0f - fieldDetectPercent));
float vSyncDetectPercent = 0.5f;
m_vSyncDetectThreshold = (int)(detectTotalLen * vSyncDetectPercent);
m_numberSamplesPerHTop = m_numberSamplesPerHTopNom * (settings.m_topTimeFactor / 100.0f); // adjust the value used in the system
}
@ -444,8 +452,9 @@ void ATVDemodSink::applyChannelSettings(int channelSampleRate, int channelFreque
if ((channelSampleRate != m_channelSampleRate) || force)
{
ATVDemodSettings::getBaseValues(channelSampleRate, m_settings.m_nbLines * m_settings.m_fps, m_tvSampleRate, m_samplesPerLineNom);
m_samplesPerLine = m_samplesPerLineNom + m_settings.m_lineTimeFactor;
unsigned int samplesPerLineNom;
ATVDemodSettings::getBaseValues(channelSampleRate, m_settings.m_nbLines * m_settings.m_fps, m_tvSampleRate, samplesPerLineNom);
m_samplesPerLine = samplesPerLineNom + m_settings.m_lineTimeFactor;
qDebug() << "ATVDemodSink::applyChannelSettings:"
<< " m_tvSampleRate: " << m_tvSampleRate
<< " m_fftBandwidth: " << m_settings.m_fftBandwidth
@ -490,7 +499,7 @@ void ATVDemodSink::applyChannelSettings(int channelSampleRate, int channelFreque
);
}
m_imageIndex = 0;
m_fieldIndex = 0;
m_colIndex = 0;
m_rowIndex = 0;
@ -535,9 +544,10 @@ void ATVDemodSink::applySettings(const ATVDemodSettings& settings, bool force)
|| (settings.m_atvStd != m_settings.m_atvStd)
|| (settings.m_lineTimeFactor != m_settings.m_lineTimeFactor) || force)
{
ATVDemodSettings::getBaseValues(m_channelSampleRate, settings.m_nbLines * settings.m_fps, m_tvSampleRate, m_samplesPerLineNom);
m_samplesPerLine = m_samplesPerLineNom + settings.m_lineTimeFactor;
m_ampAverage.resize(m_samplesPerLine * m_settings.m_nbLines * settings.m_fps * 2); // AGC average in two full images
unsigned int samplesPerLineNom;
ATVDemodSettings::getBaseValues(m_channelSampleRate, settings.m_nbLines * settings.m_fps, m_tvSampleRate, samplesPerLineNom);
m_samplesPerLine = samplesPerLineNom + settings.m_lineTimeFactor;
m_ampAverage.resize(m_samplesPerLine * m_settings.m_nbLines * 2); // AGC average in two full images
qDebug() << "ATVDemodSink::applySettings:"
<< " m_tvSampleRate: " << m_tvSampleRate
@ -582,7 +592,7 @@ void ATVDemodSink::applySettings(const ATVDemodSettings& settings, bool force)
);
}
m_imageIndex = 0;
m_fieldIndex = 0;
m_colIndex = 0;
m_rowIndex = 0;
}

View File

@ -97,8 +97,7 @@ private:
int m_channelSampleRate;
int m_channelFrequencyOffset;
int m_tvSampleRate;
unsigned int m_samplesPerLineNom; //!< number of samples per complete line (includes sync signals) - nominal value
unsigned int m_samplesPerLine; //!< number of samples per complete line (includes sync signals) - adusted value
int m_samplesPerLine; //!< number of samples per complete line (includes sync signals) - adusted value
ATVDemodSettings m_settings;
int m_videoTabIndex;
@ -113,25 +112,31 @@ private:
//int m_intNumberSamplePerLine;
int m_numberSamplesPerHTopNom; //!< number of samples per horizontal synchronization pulse (pulse in ultra-black) - nominal value
int m_numberSamplesPerHTop; //!< number of samples per horizontal synchronization pulse (pulse in ultra-black) - adusted value
int m_numberOfSyncLines; //!< this is the number of non displayable lines at the start of a frame. First displayable row comes next.
int m_numberOfBlackLines; //!< this is the total number of lines not part of the image and is used for vertical screen size
int m_numberOfEqLines; //!< number of equalizing lines both whole and partial
int m_firstVisibleLine;
int m_fieldDetectStartPos;
int m_fieldDetectEndPos;
int m_vSyncDetectStartPos;
int m_vSyncDetectEndPos;
int m_vSyncDetectThreshold;
int m_fieldDetectThreshold1;
int m_fieldDetectThreshold2;
int m_numberOfVSyncLines;
int m_numberSamplesPerLineSignals; //!< number of samples in the non image part of the line (signals = front porch + pulse + back porch)
int m_numberSamplesPerHSync; //!< number of samples per horizontal synchronization pattern (pulse + back porch)
int m_numberSamplesHSyncCrop; //!< number of samples to crop from start of horizontal synchronization
bool m_interleaved; //!< interleaved image
int m_firstRowIndexEven; //!< index of the first row of an even image
int m_firstRowIndexOdd; //!< index of the first row of an even image
//*************** PROCESSING ***************
int m_imageIndex;
int m_fieldIndex;
int m_synchroSamples;
bool m_verticalSynchroDetected;
float m_ampLineSum;
float m_ampLineAvg;
int m_fieldDetectSampleCount;
int m_vSyncDetectSampleCount;
float m_effMin;
float m_effMax;
@ -144,12 +149,18 @@ private:
float m_fltBufferQ[6];
int m_colIndex;
int m_sampleIndex;
int m_sampleIndex; // assumed (averaged) sample offset from the start of horizontal sync pulse
int m_sampleIndexDetected; // detected sample offset from the start of horizontal sync pulse
int m_amSampleIndex;
int m_rowIndex;
int m_lineIndex;
AvgExpInt m_objAvgColIndex;
float m_hSyncShiftSum;
int m_hSyncShiftCount;
int m_hSyncErrorCount;
float prevSample;
int m_avgColIndex;
SampleVector m_sampleBuffer;
@ -212,7 +223,6 @@ private:
m_avgColIndex = m_colIndex;
m_registeredTVScreen->renderImage(0);
m_imageIndex++;
m_lineIndex = 0;
m_rowIndex = 0;
m_registeredTVScreen->selectRow(m_rowIndex);
@ -247,136 +257,120 @@ private:
}
}
// Vertical sync is obtained when the average level of signal on a line is below a certain threshold. This is obtained by lowering signal to ultra black during at least 3/4th of the line
// We use directly the sum of line sample values
inline void processClassic(float& sample, int& sampleVideo)
{
// Filling pixel on the current line - reference index 0 at start of sync pulse
// remove only sync pulse empirically, +4 is to compensate shift due to hsync amortizing factor of 1/4
m_registeredTVScreen->setDataColor(m_colIndex - m_numberSamplesPerHSync + m_numberSamplesPerHTop, sampleVideo, sampleVideo, sampleVideo);
m_registeredTVScreen->setDataColor(m_sampleIndex - m_numberSamplesPerHSync, sampleVideo, sampleVideo, sampleVideo);
int synchroTimeSamples = (3 * m_samplesPerLine) / 4; // count 3/4 line globally
float synchroTrameLevel = 0.5f * ((float) synchroTimeSamples) * m_settings.m_levelBlack; // threshold is half the black value over 3/4th of line samples
// Horizontal Synchro detection
// Floor Detection 0
if (sample < m_settings.m_levelSynchroTop)
if (m_settings.m_hSync)
{
m_synchroSamples++;
}
// Black detection 0.3
else if (sample > m_settings.m_levelBlack) {
m_synchroSamples = 0;
}
// Horizontal Synchro detection
if ((prevSample >= m_settings.m_levelSynchroTop &&
sample < m_settings.m_levelSynchroTop) // horizontal synchro detected
&& (m_sampleIndexDetected > m_samplesPerLine - m_numberSamplesPerHTopNom))
{
double sampleIndexDetectedFrac =
(sample - m_settings.m_levelSynchroTop) / (prevSample - sample);
double hSyncShift = -m_sampleIndex - sampleIndexDetectedFrac;
if (hSyncShift > m_samplesPerLine / 2)
hSyncShift -= m_samplesPerLine;
else if (hSyncShift < -m_samplesPerLine / 2)
hSyncShift += m_samplesPerLine;
//Horizontal Synchro processing
if ((m_synchroSamples == m_numberSamplesPerHTop) // horizontal synchro detected
&& (m_sampleIndex > (m_samplesPerLine/2) + m_numberSamplesPerLineSignals))
{
m_avgColIndex = m_sampleIndex - m_colIndex;
//qDebug("HSync: %d %d %d", m_sampleIndex, m_colIndex, m_avgColIndex);
m_sampleIndex = 0;
if (fabs(hSyncShift) > m_numberSamplesPerHTopNom)
{
m_hSyncErrorCount++;
if (m_hSyncErrorCount >= 8)
{
// Fast sync: shift is too large, needs to be fixed ASAP
m_sampleIndex = 0;
m_hSyncShiftSum = 0.0;
m_hSyncShiftCount = 0;
m_hSyncErrorCount = 0;
}
}
else
{
m_hSyncShiftSum += hSyncShift;
m_hSyncShiftCount++;
m_hSyncErrorCount = 0;
}
m_sampleIndexDetected = 0;
}
else
m_sampleIndexDetected++;
}
else
{
m_sampleIndex++;
m_hSyncShiftSum = 0.0f;
m_hSyncShiftCount = 0;
}
m_sampleIndex++;
if (m_colIndex < m_samplesPerLine + m_numberSamplesPerHTop - 1) // increment until full line + next horizontal pulse
if (m_settings.m_vSync)
{
m_colIndex++;
if (m_colIndex < (m_samplesPerLine/2)) { // count on first half of line for better separation between black and ultra black
m_ampLineSum += sample;
}
if (m_sampleIndex > m_fieldDetectStartPos && m_sampleIndex < m_fieldDetectEndPos)
m_fieldDetectSampleCount += sample < m_settings.m_levelSynchroTop;
if (m_sampleIndex > m_vSyncDetectStartPos && m_sampleIndex < m_vSyncDetectEndPos)
m_vSyncDetectSampleCount += sample < m_settings.m_levelSynchroTop;
}
else // full line + next horizontal pulse => start of screen reference line
// end of line
if (m_sampleIndex >= m_samplesPerLine)
{
m_ampLineAvg = m_ampLineSum / ((m_samplesPerLine/2) - m_numberSamplesPerHTop); // avg length is half line less horizontal top
m_ampLineSum = 0.0f;
m_sampleIndex = 0;
m_lineIndex++;
// set column index to start a new line
if (m_settings.m_hSync && (m_lineIndex == 0)) {
m_colIndex = m_numberSamplesPerHTop + m_avgColIndex/4; // amortizing 1/4
} else {
m_colIndex = m_numberSamplesPerHTop;
}
// process line
m_lineIndex++; // new line
m_rowIndex += m_interleaved ? 2 : 1; // new row considering interleaving
if (m_rowIndex < m_settings.m_nbLines) {
m_registeredTVScreen->selectRow(m_rowIndex - m_numberOfSyncLines);
}
}
// Vertical sync and image rendering
if (m_lineIndex > m_numberOfBlackLines) {
m_verticalSynchroDetected = false; // reset trigger when detection zone is left
}
if ((m_settings.m_vSync) && (m_lineIndex <= m_settings.m_nbLines)) // VSync activated and lines in range
{
if (m_colIndex >= synchroTimeSamples)
if (m_lineIndex == m_numberOfVSyncLines + 3 && m_fieldIndex == 0)
{
if (m_ampLineAvg < 0.15f) // ultra black detection
float shiftSamples = 0.0f;
// Slow sync: slight adjustment is needed
if (m_hSyncShiftCount != 0 && m_settings.m_hSync)
{
if (!m_verticalSynchroDetected) // not yet
{
m_verticalSynchroDetected = true; // prevent repetition
// Odd frame or not interleaved
if ((m_imageIndex % 2 == 1) || !m_interleaved) {
m_registeredTVScreen->renderImage(0);
}
if (m_lineIndex > m_settings.m_nbLines/2) { // long frame done (even)
m_imageIndex = m_firstRowIndexOdd; // next is odd
} else {
m_imageIndex = m_firstRowIndexEven; // next is even
}
if (m_interleaved) {
m_rowIndex = m_imageIndex;
} else {
m_rowIndex = 0; // just the first line
}
// qDebug("ATVDemodSink::processClassic: m_lineIndex: %d m_imageIndex: %d m_rowIndex: %d",
// m_lineIndex, m_imageIndex, m_rowIndex);
m_registeredTVScreen->selectRow(m_rowIndex - m_numberOfSyncLines);
m_lineIndex = 0;
m_imageIndex++;
}
shiftSamples = m_hSyncShiftSum / m_hSyncShiftCount;
m_sampleIndex = shiftSamples;
m_hSyncShiftSum = 0.0f;
m_hSyncShiftCount = 0;
m_hSyncErrorCount = 0;
}
m_registeredTVScreen->renderImage(0,
shiftSamples < -1.0f ? -1.0f : (shiftSamples > 1.0f ? 1.0f : shiftSamples));
}
}
else // no VSync or lines out of range => set new image arbitrarily
{
if (m_lineIndex >= m_settings.m_nbLines/2)
if (m_vSyncDetectSampleCount > m_vSyncDetectThreshold &&
(m_lineIndex < 3 || m_lineIndex > m_numberOfVSyncLines + 1) && m_settings.m_vSync)
{
if (m_lineIndex > m_settings.m_nbLines/2) { // long frame done (even)
m_imageIndex = m_firstRowIndexOdd; // next is odd
} else {
m_imageIndex = m_firstRowIndexEven; // next is even
if (m_interleaved)
{
if (m_fieldDetectSampleCount > m_fieldDetectThreshold1)
m_fieldIndex = 0;
else if (m_fieldDetectSampleCount < m_fieldDetectThreshold2)
m_fieldIndex = 1;
}
if (m_interleaved) {
m_rowIndex = m_imageIndex;
} else {
m_rowIndex = 0; // just the first line
}
m_registeredTVScreen->selectRow(m_rowIndex - m_numberOfSyncLines);
m_lineIndex = 0;
m_imageIndex++;
m_lineIndex = 2;
}
m_fieldDetectSampleCount = 0;
m_vSyncDetectSampleCount = 0;
if (m_lineIndex > m_settings.m_nbLines / 2 + m_fieldIndex && m_interleaved)
{
m_lineIndex = 1;
m_fieldIndex = 1 - m_fieldIndex;
}
else if (m_lineIndex > m_settings.m_nbLines && !m_interleaved)
{
m_lineIndex = 1;
m_fieldIndex = 0;
}
int rowIndex = m_lineIndex - m_firstVisibleLine;
if (m_interleaved)
rowIndex = rowIndex * 2 - m_fieldIndex;
m_registeredTVScreen->selectRow(rowIndex);
}
prevSample = sample;
}
};

View File

@ -39,11 +39,13 @@ GLShaderTVArray::GLShaderTVArray(bool blnColor) : m_blnColor(blnColor)
{
m_blnAlphaBlend = false;
m_blnAlphaReset = false;
m_blnExtraColumns = false;
m_objProgram = 0;
m_objImage = 0;
m_objTexture = 0;
m_intCols = 0;
m_intRows = 0;
m_subsampleShift = 0.0f;
m_blnInitialized = false;
m_objCurrentRow = 0;
@ -110,7 +112,8 @@ void GLShaderTVArray::InitializeGL(int intCols, int intRows)
}
//Image container
m_objImage = new QImage(intCols, intRows, QImage::Format_RGBA8888);
int cols = intCols + (m_blnExtraColumns ? 2 : 0);
m_objImage = new QImage(cols, intRows, QImage::Format_RGBA8888);
m_objImage->fill(QColor(0, 0, 0));
m_objTexture = new QOpenGLTexture(*m_objImage);
@ -155,11 +158,23 @@ void GLShaderTVArray::RenderPixels(unsigned char *chrData)
QMatrix4x4 objQMatrix;
float rectHalfWidth = 1.0f;
float sampleSize = 2.0f / m_intCols;
if (m_blnExtraColumns)
rectHalfWidth += sampleSize;
float xShift = sampleSize * m_subsampleShift;
GLfloat arrVertices[] =
// 2 3
// 1 4
//1 2 3 3 4 1
{ -1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f };
{
-rectHalfWidth + xShift, -1.0f, // 1
-rectHalfWidth + xShift, 1.0f, // 2
rectHalfWidth + xShift, 1.0f, // 3
rectHalfWidth + xShift, 1.0f, // 3
rectHalfWidth + xShift, -1.0f, // 4
-rectHalfWidth + xShift, -1.0f // 1
};
GLfloat arrTextureCoords[] =
// 1 4
@ -227,8 +242,9 @@ void GLShaderTVArray::RenderPixels(unsigned char *chrData)
m_objTexture->bind();
ptrF->glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_intCols, m_intRows, GL_RGBA,
GL_UNSIGNED_BYTE, m_objImage->bits());
ptrF->glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0,
m_intCols + (m_blnExtraColumns ? 2 : 0), m_intRows, GL_RGBA,
GL_UNSIGNED_BYTE, m_objImage->bits());
ptrF->glEnableVertexAttribArray(0); // vertex
ptrF->glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, arrVertices);
@ -318,9 +334,11 @@ bool GLShaderTVArray::SetDataColor(int intCol, QRgb objColor)
if (m_blnInitialized)
{
if ((intCol < m_intCols) && (intCol >= 0) && (m_objCurrentRow != 0))
if ((intCol < m_intCols + m_blnExtraColumns) &&
(intCol >= -m_blnExtraColumns) &&
(m_objCurrentRow != 0))
{
m_objCurrentRow[intCol] = objColor;
m_objCurrentRow[intCol + m_blnExtraColumns] = objColor;
blnRslt = true;
}
}

View File

@ -43,6 +43,8 @@ public:
GLShaderTVArray(bool blnColor);
~GLShaderTVArray();
void setExtraColumns(bool blnExtraColumns) { m_blnExtraColumns = blnExtraColumns; }
void setSubsampleShift(float subsampleShift) { m_subsampleShift = subsampleShift; }
void setColor(bool blnColor) { m_blnColor = blnColor; }
void setAlphaBlend(bool blnAlphaBlend) { m_blnAlphaBlend = blnAlphaBlend; }
void setAlphaReset() { m_blnAlphaReset = true; }
@ -71,6 +73,7 @@ protected:
int m_intCols;
int m_intRows;
float m_subsampleShift;
QRgb * m_objCurrentRow;
@ -78,6 +81,7 @@ protected:
bool m_blnColor;
bool m_blnAlphaBlend;
bool m_blnAlphaReset;
bool m_blnExtraColumns;
};
#endif /* INCLUDE_GUI_GLTVSHADERARRAY_H_ */

View File

@ -28,6 +28,7 @@
#include <algorithm>
#include <QDebug>
// Note: When this object is created, QWidget* is converted to bool
TVScreen::TVScreen(bool blnColor, QWidget* parent) :
QGLWidget(parent), m_objMutex(QMutex::NonRecursive), m_objGLShaderArray(blnColor)
{
@ -36,6 +37,7 @@ TVScreen::TVScreen(bool blnColor, QWidget* parent) :
m_objTimer.start(40); // capped at 25 FPS
m_chrLastData = NULL;
m_subsampleShift = 0.0;
m_blnConfigChanged = false;
m_blnDataChanged = false;
m_blnGLContextInitialized = false;
@ -57,6 +59,11 @@ void TVScreen::setColor(bool blnColor)
m_objGLShaderArray.setColor(blnColor);
}
void TVScreen::setExtraColumns(bool blnExtraColumns)
{
m_objGLShaderArray.setExtraColumns(blnExtraColumns);
}
QRgb* TVScreen::getRowBuffer(int intRow)
{
if (!m_blnGLContextInitialized)
@ -67,9 +74,10 @@ QRgb* TVScreen::getRowBuffer(int intRow)
return m_objGLShaderArray.GetRowBuffer(intRow);
}
void TVScreen::renderImage(unsigned char * objData)
void TVScreen::renderImage(unsigned char * objData, float subsampleShift)
{
m_chrLastData = objData;
m_subsampleShift = subsampleShift;
m_blnDataChanged = true;
}
@ -176,6 +184,7 @@ void TVScreen::paintGL()
m_intAskedRows = 0;
}
m_objGLShaderArray.setSubsampleShift(m_subsampleShift);
m_objGLShaderArray.RenderPixels(m_chrLastData);
m_objMutex.unlock();

View File

@ -46,9 +46,10 @@ public:
virtual ~TVScreen();
void setColor(bool blnColor);
void setExtraColumns(bool blnExtraColumns);
void resizeTVScreen(int intCols, int intRows);
void getSize(int& intCols, int& intRows) const;
void renderImage(unsigned char * objData);
void renderImage(unsigned char * objData, float subsampleShift = 0.0);
QRgb* getRowBuffer(int intRow);
void resetImage();
void resetImage(int alpha);
@ -73,6 +74,7 @@ private:
bool m_blnGLContextInitialized;
int m_intAskedCols;
int m_intAskedRows;
float m_subsampleShift;
// state