1
/* This file is part of the KDE project.
3
Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
5
This library is free software: you can redistribute it and/or modify
6
it under the terms of the GNU Lesser General Public License as published by
7
the Free Software Foundation, either version 2.1 or 3 of the License.
9
This library is distributed in the hope that it will be useful,
10
but WITHOUT ANY WARRANTY; without even the implied warranty of
11
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
GNU Lesser General Public License for more details.
14
You should have received a copy of the GNU Lesser General Public License
15
along with this library. If not, see <http://www.gnu.org/licenses/>.
19
#include "videorenderer_soft.h"
21
#ifndef QT_NO_PHONON_VIDEO
23
#include "qmeminputpin.h"
24
#include "qbasefilter.h"
26
#include <QtGui/QPainter>
27
#include <QtGui/QPaintEngine>
28
#include <QtGui/QApplication>
29
#include <QtCore/QTime>
31
#define _USE_MATH_DEFINES //for pi
32
#include <QtCore/qmath.h> //for sin and cos
33
/* M_PI is a #define that may or may not be handled in <cmath> */
35
#define M_PI 3.14159265358979323846264338327950288419717
38
#include <dvdmedia.h> //for VIDEOINFOHEADER2
40
//this will make a display every second of how many frames were pocessed and actually displayed
49
#ifndef GL_FRAGMENT_PROGRAM_ARB
50
#define GL_FRAGMENT_PROGRAM_ARB 0x8804
51
#define GL_PROGRAM_FORMAT_ASCII_ARB 0x8875
54
// support old OpenGL installations (1.2)
55
// assume that if TEXTURE0 isn't defined, none are
57
# define GL_TEXTURE0 0x84C0
58
# define GL_TEXTURE1 0x84C1
59
# define GL_TEXTURE2 0x84C2
62
// arbfp1 fragment program for converting yuv (YV12) to rgb
63
static const char yv12ToRgb[] =
65
"PARAM c[5] = { program.local[0..1],"
66
"{ 1.164, 0, 1.596, 0.5 },"
67
"{ 0.0625, 1.164, -0.391, -0.81300002 },"
68
"{ 1.164, 2.0179999, 0 } };"
70
"TEX R0.x, fragment.texcoord[0], texture[1], 2D;"
71
"ADD R0.y, R0.x, -c[2].w;"
72
"TEX R0.x, fragment.texcoord[0], texture[2], 2D;"
73
"ADD R0.x, R0, -c[2].w;"
74
"MUL R0.z, R0.y, c[0].w;"
75
"MAD R0.z, R0.x, c[0], R0;"
76
"MUL R0.w, R0.x, c[0];"
77
"MUL R0.z, R0, c[0].y;"
78
"TEX R0.x, fragment.texcoord[0], texture[0], 2D;"
79
"MAD R0.y, R0, c[0].z, R0.w;"
80
"ADD R0.x, R0, -c[3];"
82
"MUL R0.z, R0, c[1].x;"
83
"MAD R0.x, R0, c[0].y, c[0];"
84
"MUL R0.y, R0, c[1].x;"
85
"DP3 result.color.x, R0, c[2];"
86
"DP3 result.color.y, R0, c[3].yzww;"
87
"DP3 result.color.z, R0, c[4];"
88
"MOV result.color.w, c[1].y;"
91
static const char yuy2ToRgb[] =
93
"PARAM c[5] = { program.local[0..1],"
94
"{ 0.5, 2, 1, 0.0625 },"
95
"{ 1.164, 0, 1.596, 2.0179999 },"
96
"{ 1.164, -0.391, -0.81300002 } };"
100
"FLR R1.z, fragment.texcoord[0].x;"
101
"ADD R0.x, R1.z, c[2];"
102
"ADD R1.z, fragment.texcoord[0].x, -R1;"
103
"MUL R1.x, fragment.texcoord[0].z, R0;"
104
"MOV R1.y, fragment.texcoord[0];"
105
"TEX R0, R1, texture[0], 2D;"
106
"ADD R1.y, R0.z, -R0.x;"
107
"MUL R2.x, R1.z, R1.y;"
108
"MAD R0.x, R2, c[2].y, R0;"
109
"MOV R1.y, fragment.texcoord[0];"
110
"ADD R1.x, fragment.texcoord[0].z, R1;"
111
"TEX R1.xyw, R1, texture[0], 2D;"
112
"ADD R2.x, R1, -R0.z;"
113
"MAD R1.x, R1.z, c[2].y, -c[2].z;"
114
"MAD R0.z, R1.x, R2.x, R0;"
115
"ADD R1.xy, R1.ywzw, -R0.ywzw;"
116
"ADD R0.z, R0, -R0.x;"
117
"SGE R1.w, R1.z, c[2].x;"
118
"MAD R0.x, R1.w, R0.z, R0;"
119
"MAD R0.yz, R1.z, R1.xxyw, R0.xyww;"
120
"ADD R0.xyz, R0, -c[2].wxxw;"
121
"MUL R0.w, R0.y, c[0];"
122
"MAD R0.w, R0.z, c[0].z, R0;"
123
"MUL R0.z, R0, c[0].w;"
124
"MAD R0.y, R0, c[0].z, R0.z;"
125
"MUL R0.w, R0, c[0].y;"
126
"MUL R0.y, R0, c[0];"
127
"MUL R0.z, R0.w, c[1].x;"
128
"MAD R0.x, R0, c[0].y, c[0];"
129
"MUL R0.y, R0, c[1].x;"
130
"DP3 result.color.x, R0, c[3];"
131
"DP3 result.color.y, R0, c[4];"
132
"DP3 result.color.z, R0, c[3].xwyw;"
133
"MOV result.color.w, c[1].y;"
136
#endif //QT_NO_OPENGL
138
#define CLIP_SHIFT_RIGHT_8(c) ((c) < 0 ? 0 : (c) > 0xffff ? 0xff : (c) >> 8)
139
#define CLIP_SHIFT_LEFT_8(c) ((c) < 0 ? 0 : (c) > 0xffff ? 0xff0000 : ( ((c) << 8) & 0xff0000) )
140
#define CLIP_NO_SHIFT(c) ((c) < 0 ? 0 : (c) > 0xffff ? 0xff00 : ((c) & 0xff00) )
141
#define CLIPPED_PIXEL(base, r, g, b) (0xff000000u | CLIP_SHIFT_LEFT_8(base+r) | CLIP_NO_SHIFT(base+g) | CLIP_SHIFT_RIGHT_8(base+b))
142
#define CLIPPED_PIXEL2(r, g, b) (0xff000000u | CLIP_SHIFT_LEFT_8(r) | CLIP_NO_SHIFT(g) | CLIP_SHIFT_RIGHT_8(b))
150
static const QVector<AM_MEDIA_TYPE> videoMediaTypes()
152
AM_MEDIA_TYPE mt = { MEDIATYPE_Video, MEDIASUBTYPE_YV12, 0, 0, 0, GUID_NULL, 0, 0, 0 };
154
QVector<AM_MEDIA_TYPE> ret;
156
//we add all the subtypes we support
158
mt.subtype = MEDIASUBTYPE_YUY2;
160
mt.subtype = MEDIASUBTYPE_RGB32;
166
class VideoRendererSoftFilter : public QBaseFilter
169
VideoRendererSoftFilter(VideoRendererSoft *renderer);
171
~VideoRendererSoftFilter();
173
QSize videoSize() const;
176
void freeGLResources()
179
//let's reinitialize those values
180
m_usingOpenGL = false;
181
//to be sure we recreate it
182
if (m_textureUploaded) {
183
glDeleteTextures(3, m_texture);
184
m_textureUploaded = false;
187
m_checkedPrograms = false;
189
#endif // QT_NO_OPENGL
193
QMutexLocker locker(&m_mutex);
194
m_sampleBuffer = ComPointer<IMediaSample>();
197
#endif // QT_NO_OPENGL
198
m_textureUploaded = false;
203
//received from the input pin
204
::SetEvent(m_receiveCanWait); //unblocks the flow
206
//we send the message to the graph
207
ComPointer<IMediaEventSink> sink(graph(), IID_IMediaEventSink);
209
sink->Notify(EC_COMPLETE, S_OK,
210
reinterpret_cast<LONG_PTR>(static_cast<IBaseFilter*>(this)));
214
void freeMediaSample()
216
QMutexLocker locker(&m_mutex);
217
m_sampleBuffer = ComPointer<IMediaSample>();
223
::SetEvent(m_receiveCanWait); //unblocks the flow
228
if (m_inputPin->connected() == 0) {
229
::SetEvent(m_receiveCanWait); //unblock the flow in receive
231
::ResetEvent(m_receiveCanWait); //block the flow again
237
HRESULT hr = QBaseFilter::Stop();
244
HRESULT hr = QBaseFilter::Pause();
245
if (m_inputPin->connected() == 0) {
246
::SetEvent(m_receiveCanWait); //unblock the flow in receive
248
::ResetEvent(m_receiveCanWait); //this will block
253
STDMETHODIMP Run(REFERENCE_TIME start)
255
HRESULT hr = QBaseFilter::Run(start);
258
if (m_inputPin->connected() == 0) {
261
::SetEvent(m_receiveCanWait); //unblocks the flow (this event will block then again)
266
nbFramesProcessed = 0;
267
nbFramesDisplayed = 0;
273
HRESULT processSample(IMediaSample *sample);
275
void applyMixerSettings(qreal brightness, qreal contrast, qreal hue, qreal saturation)
277
//let's normalize the values
278
m_brightness = brightness * 128;
279
m_contrast = contrast + 1.;
281
m_saturation = saturation + 1.;
284
QImage currentImage() const
286
return m_currentImage;
289
void setCurrentImage(const QImage &image)
291
QMutexLocker locker(&m_mutex);
292
m_currentImage = image;
295
//the following function is called from the GUI thread
296
void repaintCurrentFrame(QPainter &painter, const QRect &r);
300
static void convertYV12toRGB(const uchar *data, const QSize &s, QImage &dest,
301
qreal brightness, qreal contrast, qreal hue, qreal saturation);
302
static void convertYUY2toRGB(const uchar *data, const QSize &s, QImage &dest,
303
qreal brightness, qreal contrast, qreal hue, qreal saturation);
304
static void normalizeRGB(const uchar *data, const QSize &s, QImage &destImage);
307
QPin *const m_inputPin;
308
ComPointer<IMediaSample> m_sampleBuffer;
309
QImage m_currentImage;
312
VideoRendererSoft *m_renderer;
313
mutable QMutex m_mutex;
314
REFERENCE_TIME m_start;
315
HANDLE m_renderEvent, m_receiveCanWait; // Signals sample to render
317
bool m_textureUploaded;
327
int nbFramesProcessed;
328
int nbFramesDisplayed;
339
void updateTexture();
340
bool checkGLPrograms();
342
// ARB_fragment_program
343
typedef void (APIENTRY *_glProgramStringARB) (GLenum, GLenum, GLsizei, const GLvoid *);
344
typedef void (APIENTRY *_glBindProgramARB) (GLenum, GLuint);
345
typedef void (APIENTRY *_glDeleteProgramsARB) (GLsizei, const GLuint *);
346
typedef void (APIENTRY *_glGenProgramsARB) (GLsizei, GLuint *);
347
typedef void (APIENTRY *_glProgramLocalParameter4fARB) (GLenum, GLuint, GLfloat, GLfloat, GLfloat, GLfloat);
348
typedef void (APIENTRY *_glActiveTexture) (GLenum);
350
_glProgramStringARB glProgramStringARB;
351
_glBindProgramARB glBindProgramARB;
352
_glDeleteProgramsARB glDeleteProgramsARB;
353
_glGenProgramsARB glGenProgramsARB;
354
_glProgramLocalParameter4fARB glProgramLocalParameter4fARB;
355
_glActiveTexture glActiveTexture;
357
bool m_checkedPrograms;
364
class VideoRendererSoftPin : public QMemInputPin
367
VideoRendererSoftPin(VideoRendererSoftFilter *parent) :
368
QMemInputPin(parent, videoMediaTypes(), false /*no transformation of the samples*/),
373
STDMETHODIMP EndOfStream()
375
m_renderer->endOfStream();
376
return QMemInputPin::EndOfStream();
379
STDMETHODIMP ReceiveCanBlock()
385
STDMETHODIMP BeginFlush()
387
m_renderer->beginFlush();
388
return QMemInputPin::BeginFlush();
391
STDMETHODIMP EndFlush()
393
m_renderer->endFlush();
394
return QMemInputPin::EndFlush();
398
STDMETHODIMP GetAllocatorRequirements(ALLOCATOR_PROPERTIES *prop)
410
STDMETHODIMP NotifyAllocator(IMemAllocator *alloc, BOOL readonly)
415
ALLOCATOR_PROPERTIES prop;
416
HRESULT hr = alloc->GetProperties(&prop);
417
if (SUCCEEDED(hr) && prop.cBuffers == 1) {
418
//we ask to get 2 buffers so that we don't block the flow
419
//when we addref the mediasample
421
ALLOCATOR_PROPERTIES dummy;
422
alloc->SetProperties(&prop, &dummy);
425
return QMemInputPin::NotifyAllocator(alloc, readonly);
431
VideoRendererSoftFilter * const m_renderer;
435
VideoRendererSoftFilter::VideoRendererSoftFilter(VideoRendererSoft *renderer) :
436
QBaseFilter(CLSID_NULL), m_inputPin(new VideoRendererSoftPin(this)),
437
m_renderer(renderer), m_start(0)
439
,m_usingOpenGL(false), m_checkedPrograms(false), m_textureUploaded(false)
442
m_renderEvent = ::CreateEvent(0, 0, 0, 0);
443
m_receiveCanWait = ::CreateEvent(0, 0, 0, 0);
444
//simply initialize the array with default values
445
applyMixerSettings(0., 0., 0., 0.);
450
VideoRendererSoftFilter::~VideoRendererSoftFilter()
452
::CloseHandle(m_renderEvent);
453
::CloseHandle(m_receiveCanWait);
454
//this frees up resources
458
QSize VideoRendererSoftFilter::videoSize() const
461
const AM_MEDIA_TYPE &mt = m_inputPin->connectedType();
462
if (mt.pbFormat && mt.pbFormat) {
463
if (mt.formattype == FORMAT_VideoInfo) {
464
const VIDEOINFOHEADER *header = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
465
const int h = qAbs(header->bmiHeader.biHeight),
466
w = qAbs(header->bmiHeader.biWidth);
468
} else if (mt.formattype == FORMAT_VideoInfo2) {
469
const VIDEOINFOHEADER2 *header = reinterpret_cast<VIDEOINFOHEADER2*>(mt.pbFormat);
470
const int h = qAbs(header->bmiHeader.biHeight),
471
w = qAbs(header->bmiHeader.biWidth);
479
HRESULT VideoRendererSoftFilter::processSample(IMediaSample *sample)
482
if (fpsTime.elapsed() > 1000) {
483
qDebug("FPS_COUNTER: processed=%d, displayed=%d (%d)", nbFramesProcessed, nbFramesDisplayed, fpsTime.elapsed());
484
nbFramesProcessed = 0;
485
nbFramesDisplayed = 0;
491
AM_MEDIA_TYPE *type = 0;
492
if (sample->GetMediaType(&type) == S_OK) {
493
//let's update the media type of the input pin
494
m_inputPin->setConnectedType(*type);
498
const AM_MEDIA_TYPE &mt = m_inputPin->connectedType();
500
if (mt.pbFormat == 0 || mt.cbFormat == 0) {
501
return VFW_E_INVALIDMEDIATYPE;
504
m_size = videoSize();
505
if (!m_size.isValid()) {
506
return VFW_E_INVALIDMEDIATYPE;
513
REFERENCE_TIME start = 0, stop = 0;
514
HRESULT hr = sample->GetTime(&start, &stop);
516
ComPointer<IReferenceClock> clock;
517
GetSyncSource(clock.pparam());
519
const bool playing = SUCCEEDED(hr) && state() == State_Running && clock;
522
REFERENCE_TIME current;
523
clock->GetTime(¤t);
527
//let's synchronize here
528
clock->AdviseTime(m_start, start,
529
reinterpret_cast<HEVENT>(m_renderEvent), &advise);
531
HANDLE handles[] = {m_receiveCanWait, m_renderEvent};
532
if (::WaitForMultipleObjects(2, handles, false, INFINITE) == WAIT_OBJECT_0) {
533
if (state() != State_Stopped && !m_inputPin->isFlushing()) {
534
::ResetEvent(m_receiveCanWait);
540
//the let's lock the sample to be used in the GUI thread
542
QMutexLocker locker(&m_mutex);
544
m_sampleBuffer = ComPointer<IMediaSample>(sample);
547
//image is updated: we should update the widget
548
//we should never call directly members of target due to thread-safety
549
QApplication::postEvent(m_renderer, new QEvent(QEvent::UpdateRequest));
552
//useless to test the return value of WaitForSingleObject: timeout can't happen
553
::WaitForSingleObject(m_receiveCanWait, INFINITE);
554
if (state() != State_Stopped && !m_inputPin->isFlushing()) {
555
::ResetEvent(m_receiveCanWait);
559
//everything should be ok
564
bool VideoRendererSoftFilter::checkGLPrograms()
566
if (!m_checkedPrograms) {
567
m_checkedPrograms = true;
569
glProgramStringARB = (_glProgramStringARB) wglGetProcAddress("glProgramStringARB");
570
glBindProgramARB = (_glBindProgramARB) wglGetProcAddress("glBindProgramARB");
571
glDeleteProgramsARB = (_glDeleteProgramsARB) wglGetProcAddress("glDeleteProgramsARB");
572
glGenProgramsARB = (_glGenProgramsARB) wglGetProcAddress("glGenProgramsARB");
573
glProgramLocalParameter4fARB = (_glProgramLocalParameter4fARB) wglGetProcAddress("glProgramLocalParameter4fARB");
574
glActiveTexture = (_glActiveTexture) wglGetProcAddress("glActiveTexture");
576
//we check only once if the widget is drawn using opengl
577
if (glProgramStringARB && glBindProgramARB && glDeleteProgramsARB &&
578
glGenProgramsARB && glActiveTexture && glProgramLocalParameter4fARB) {
579
glGenProgramsARB(2, m_program);
581
const char *code[] = {yv12ToRgb, yuy2ToRgb};
584
for(int i = 0; i < ProgramCount && !error; ++i) {
586
glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program[i]);
588
const GLbyte *gl_src = reinterpret_cast<const GLbyte *>(code[i]);
589
glProgramStringARB(GL_FRAGMENT_PROGRAM_ARB, GL_PROGRAM_FORMAT_ASCII_ARB,
590
strlen(code[i]), gl_src);
592
if (glGetError() != GL_NO_ERROR) {
598
glDeleteProgramsARB(2, m_program);
600
//everything went fine we store the context here (we support YV12 and YUY2)
601
m_usingOpenGL = m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12
602
|| m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2;
603
//those "textures" will be used as byte streams
604
//to pass Y, U and V data to the graphics card
605
glGenTextures(3, m_texture);
609
return m_usingOpenGL;
612
void VideoRendererSoftFilter::updateTexture()
614
if (!m_sampleBuffer) {
615
return; //the texture is already up2date or their is no data yet
619
m_sampleBuffer->GetPointer(&data);
621
if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) {
622
int w[3] = { m_size.width(), m_size.width()/2, m_size.width()/2 };
623
int h[3] = { m_size.height(), m_size.height()/2, m_size.height()/2 };
624
int offs[3] = { 0, m_size.width()*m_size.height(), m_size.width()*m_size.height()*5/4 };
626
for (int i = 0; i < 3; ++i) {
627
glBindTexture(GL_TEXTURE_2D, m_texture[i]);
628
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, w[i], h[i], 0,
629
GL_LUMINANCE, GL_UNSIGNED_BYTE, data + offs[i]);
631
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
632
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
633
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
634
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
636
} else { //m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2
637
//we upload 1 texture
638
glBindTexture(GL_TEXTURE_2D, m_texture[0]);
639
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_size.width() / 2, m_size.height(), 0,
640
GL_RGBA, GL_UNSIGNED_BYTE, data);
642
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
643
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
645
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
646
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
649
m_sampleBuffer = ComPointer<IMediaSample>();
650
m_textureUploaded = true;
654
void VideoRendererSoftFilter::repaintCurrentFrame(QPainter &painter, const QRect &r)
656
QMutexLocker locker(&m_mutex);
664
if (painter.paintEngine() &&
665
(painter.paintEngine()->type() == QPaintEngine::OpenGL || painter.paintEngine()->type() == QPaintEngine::OpenGL2)
666
&& checkGLPrograms()) {
668
//for now we only support YUV (both YV12 and YUY2)
671
if (!m_textureUploaded) {
672
//we simply fill the whole video with content
673
//the callee has already set the brush
678
//let's draw the texture
679
painter.beginNativePainting();
681
//Let's pass the other arguments
682
const Program prog = (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) ? YV12toRGB : YUY2toRGB;
683
glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program[prog]);
684
//loading the parameters
685
glProgramLocalParameter4fARB(GL_FRAGMENT_PROGRAM_ARB, 0, m_brightness / 256., m_contrast, qCos(m_hue), qSin(m_hue));
686
glProgramLocalParameter4fARB(GL_FRAGMENT_PROGRAM_ARB, 1, m_saturation, painter.opacity() /*alpha */, 0. /*dummy*/, 0. /*dummy*/);
688
glEnable(GL_FRAGMENT_PROGRAM_ARB);
690
const float v_array[] = { r.left(), r.top(), r.right()+1, r.top(), r.right()+1, r.bottom()+1, r.left(), r.bottom()+1 };
692
float tx_array[12] = {0., 0., 0., 1.,
696
if (prog == YUY2toRGB) {
697
const float w = m_size.width() / 2,
703
for (int i = 0; i < 4; ++i) {
704
tx_array[3*i + 2] = iw;
708
glActiveTexture(GL_TEXTURE0);
709
glBindTexture(GL_TEXTURE_2D, m_texture[0]);
711
if (prog == YV12toRGB) {
712
glActiveTexture(GL_TEXTURE1);
713
glBindTexture(GL_TEXTURE_2D, m_texture[2]);
714
glActiveTexture(GL_TEXTURE2);
715
glBindTexture(GL_TEXTURE_2D, m_texture[1]);
716
glActiveTexture(GL_TEXTURE0);
720
glVertexPointer(2, GL_FLOAT, 0, v_array);
721
glTexCoordPointer(3, GL_FLOAT, 0, tx_array);
722
glEnableClientState(GL_VERTEX_ARRAY);
723
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
724
glDrawArrays(GL_QUADS, 0, 4);
725
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
726
glDisableClientState(GL_VERTEX_ARRAY);
728
glDisable(GL_FRAGMENT_PROGRAM_ARB);
729
painter.endNativePainting();
733
if (m_sampleBuffer) {
734
//we need to get the sample data
736
m_sampleBuffer->GetPointer(&data);
739
//let's update the current image
740
if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) {
741
convertYV12toRGB(data, m_size, m_currentImage,
742
m_brightness, m_contrast, m_hue, m_saturation);
743
} else if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2) {
744
convertYUY2toRGB(data, m_size, m_currentImage,
745
m_brightness, m_contrast, m_hue, m_saturation);
746
} else if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_RGB32) {
747
normalizeRGB(data, m_size, m_currentImage);
749
m_sampleBuffer = ComPointer<IMediaSample>();
752
if (m_currentImage.isNull()) {
753
//we simply fill the whole video with content
754
//the callee has alrtead set the brush
757
painter.drawImage(0, 0, m_currentImage);
762
void VideoRendererSoftFilter::normalizeRGB(const uchar *data, const QSize &s, QImage &destImage)
764
const int w = s.width(),
766
if (destImage.size() != s) {
767
destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied);
769
if (destImage.isNull()) {
770
return; //the system can't allocate the memory for the image drawing
773
const QRgb *rgb = reinterpret_cast<const QRgb*>(data);
775
//this sets the alpha channel to 0xff and flip the image vertically
776
for (int y = h - 1; y >= 0; --y) {
777
QRgb *dest = reinterpret_cast<QRgb*>(destImage.scanLine(y));
778
for(int i = w; i > 0; --i, ++rgb, ++dest) {
779
*dest = *rgb | (0xff << 24); //we force the alpha channel to 0xff
785
//we render data interpreted as YV12 into m_renderbuffer
786
void VideoRendererSoftFilter::convertYV12toRGB(const uchar *data, const QSize &s, QImage &destImage,
787
qreal brightness, qreal contrast, qreal hue, qreal saturation)
789
const int w = s.width(),
792
//let's cache some computation
793
const int cosHx256 = qRound(qCos(hue) * contrast * saturation * 256),
794
sinHx256 = qRound(qSin(hue) * contrast * saturation * 256);
797
for(int i = 0;i<256;++i) {
798
Yvalue[i] = qRound(((i - 16) * contrast + brightness) * 298 + 128);
802
if (destImage.size() != s) {
803
destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied);
806
if (destImage.isNull()) {
807
return; //the system can't allocate the memory for the image drawing
810
QRgb *dest = reinterpret_cast<QRgb*>(destImage.bits());
811
const uchar *dataY = data,
812
*dataV = data + (w*h),
813
*dataU = dataV + (w*h)/4;
818
for(int l = (h >> 1); l > 0; --l) {
819
//we treat 2 lines by 2 lines
821
for(int x = (w >> 1); x > 0; --x) {
823
const int u = *dataU++ - 128,
825
const int d = (u * cosHx256 + v * sinHx256) >> 8,
826
e = (v * cosHx256 + u * sinHx256) >> 8;
828
const int compRed = 409 * e,
829
compGreen = -100 * d - 208 * e,
832
const int y21 = Yvalue[ dataY[w] ],
833
y11 = Yvalue[ *dataY++ ],
834
y22 = Yvalue[ dataY[w] ],
835
y12 = Yvalue[ *dataY++ ];
838
*line1++ = CLIPPED_PIXEL(y11, compRed, compGreen, compBlue);
840
//1st line, 2nd pixel
841
*line1++ = CLIPPED_PIXEL(y12, compRed, compGreen, compBlue);
844
*line2++ = CLIPPED_PIXEL(y21, compRed, compGreen, compBlue);
847
*line2++ = CLIPPED_PIXEL(y22, compRed, compGreen, compBlue);
860
//we render data interpreted as YUY2 into m_renderbuffer
861
void VideoRendererSoftFilter::convertYUY2toRGB(const uchar *data, const QSize &s, QImage &destImage,
862
qreal brightness, qreal contrast, qreal hue, qreal saturation)
864
const int w = s.width(),
867
//let's cache some computation
869
for(int i = 0;i<256;++i) {
870
Yvalue[i] = qRound(((i - 16) * contrast + brightness) * 298 + 128);
873
const int cosHx256 = qRound(qCos(hue) * contrast * saturation * 256),
874
sinHx256 = qRound(qSin(hue) * contrast * saturation * 256);
876
if (destImage.size() != s) {
877
//this will only allocate memory when needed
878
destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied);
880
if (destImage.isNull()) {
881
return; //the system can't allocate the memory for the image drawing
884
QRgb *dest = reinterpret_cast<QRgb*>(destImage.bits());
886
//the number of iterations is width * height / 2 because we treat 2 pixels at each iterations
887
for (int c = w * h / 2; c > 0 ; --c) {
889
//the idea of that algorithm comes from
890
//http://msdn2.microsoft.com/en-us/library/ms867704.aspx#yuvformats_identifying_yuv_formats_in_directshow
892
//we treat 2 pixels by 2 pixels (we start reading 2 pixels info ie. "YUYV"
893
const int y1 = Yvalue[*data++],
895
y2 = Yvalue[*data++],
898
const int d = (u * cosHx256 + v * sinHx256) >> 8,
899
e = (v * cosHx256 + u * sinHx256) >> 8;
901
const int compRed = 409 * e,
902
compGreen = -100 * d - 208 * e,
906
*dest++ = CLIPPED_PIXEL(y1, compRed, compGreen, compBlue);
909
*dest++ = CLIPPED_PIXEL(y2, compRed, compGreen, compBlue);
914
VideoRendererSoft::VideoRendererSoft(QWidget *target) :
915
m_renderer(new VideoRendererSoftFilter(this)), m_target(target)
917
m_filter = Filter(m_renderer);
920
VideoRendererSoft::~VideoRendererSoft()
925
bool VideoRendererSoft::isNative() const
931
void VideoRendererSoft::repaintCurrentFrame(QWidget *target, const QRect &rect)
933
QPainter painter(target);
935
QColor backColor = target->palette().color(target->backgroundRole());
936
painter.setBrush(backColor);
937
painter.setPen(Qt::NoPen);
938
if (!m_videoRect.contains(rect)) {
939
//we repaint the borders only when needed
940
const QVector<QRect> reg = (QRegion(rect) - m_videoRect).rects();
941
for (int i = 0; i < reg.count(); ++i) {
942
painter.drawRect(reg.at(i));
946
painter.setRenderHint(QPainter::SmoothPixmapTransform);
947
painter.setTransform(m_transform, true);
948
QSize vsize = videoSize();
949
m_renderer->repaintCurrentFrame(painter, QRect(0,0, vsize.width(), vsize.height()));
952
void VideoRendererSoft::notifyResize(const QSize &size,
953
Phonon::VideoWidget::AspectRatio aspectRatio, Phonon::VideoWidget::ScaleMode scaleMode)
955
const QSize vsize = videoSize();
956
internalNotifyResize(size, vsize, aspectRatio, scaleMode);
960
if (vsize.isValid() && size.isValid()) {
961
m_transform.translate(m_dstX, m_dstY);
962
const qreal sx = qreal(m_dstWidth) / qreal(vsize.width()),
963
sy = qreal(m_dstHeight) / qreal(vsize.height());
964
m_transform.scale(sx, sy);
965
m_videoRect = m_transform.mapRect( QRect(0,0, vsize.width(), vsize.height()));
969
QSize VideoRendererSoft::videoSize() const
971
if (m_renderer->pins().first()->connected()) {
972
return m_renderer->videoSize();
974
return m_renderer->currentImage().size();
978
void VideoRendererSoft::applyMixerSettings(qreal brightness, qreal contrast, qreal hue, qreal saturation)
980
m_renderer->applyMixerSettings(brightness, contrast, hue, saturation);
983
QImage VideoRendererSoft::snapshot() const
985
return m_renderer->currentImage(); //not accurate (especially when using opengl...)
988
void VideoRendererSoft::setSnapshot(const QImage &image)
990
m_renderer->setCurrentImage(image);
993
bool VideoRendererSoft::event(QEvent *e)
995
if (e->type() == QEvent::UpdateRequest) {
996
m_target->update(m_videoRect);
999
return QObject::event(e);
1008
#endif //QT_NO_PHONON_VIDEO