1 /*  This file is part of the KDE project.
2 
3 Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
4 
5 This library is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Lesser General Public License as published by
7 the Free Software Foundation, either version 2.1 or 3 of the License.
8 
9 This library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12 GNU Lesser General Public License for more details.
13 
14 You should have received a copy of the GNU Lesser General Public License
15 along with this library.  If not, see <http://www.gnu.org/licenses/>.
16 */
17 
18 
19 #include "videorenderer_soft.h"
20 
21 #ifndef QT_NO_PHONON_VIDEO
22 
23 #include "qmeminputpin.h"
24 #include "qbasefilter.h"
25 
26 #include <QtGui/QPainter>
27 #include <QtGui/QPaintEngine>
28 #include <QtGui/QApplication>
29 #include <QtCore/QTime>
30 
31 #define _USE_MATH_DEFINES //for pi
32 #include <QtCore/qmath.h> //for sin and cos
33 /* M_PI is a #define that may or may not be handled in <cmath> */
34 #ifndef M_PI
35 #define M_PI 3.14159265358979323846264338327950288419717
36 #endif
37 
38 #include <dvdmedia.h> //for VIDEOINFOHEADER2
39 
40 //this will make a display every second of how many frames were pocessed and actually displayed
41 //#define FPS_COUNTER
42 
43 #ifdef Q_OS_WINCE
44 #define QT_NO_OPENGL
45 #endif
46 
47 #ifndef QT_NO_OPENGL
48 #include <GL/gl.h>
49 #ifndef GL_FRAGMENT_PROGRAM_ARB
50 #define GL_FRAGMENT_PROGRAM_ARB           0x8804
51 #define GL_PROGRAM_FORMAT_ASCII_ARB       0x8875
52 #endif
53 
54 // support old OpenGL installations (1.2)
55 // assume that if TEXTURE0 isn't defined, none are
56 #ifndef GL_TEXTURE0
57 # define GL_TEXTURE0    0x84C0
58 # define GL_TEXTURE1    0x84C1
59 # define GL_TEXTURE2    0x84C2
60 #endif
61 
62 // arbfp1 fragment program for converting yuv (YV12) to rgb
63 static const char yv12ToRgb[] =
64 "!!ARBfp1.0"
65 "PARAM c[5] = { program.local[0..1],"
66 "{ 1.164, 0, 1.596, 0.5 },"
67 "{ 0.0625, 1.164, -0.391, -0.81300002 },"
68 "{ 1.164, 2.0179999, 0 } };"
69 "TEMP R0;"
70 "TEX R0.x, fragment.texcoord[0], texture[1], 2D;"
71 "ADD R0.y, R0.x, -c[2].w;"
72 "TEX R0.x, fragment.texcoord[0], texture[2], 2D;"
73 "ADD R0.x, R0, -c[2].w;"
74 "MUL R0.z, R0.y, c[0].w;"
75 "MAD R0.z, R0.x, c[0], R0;"
76 "MUL R0.w, R0.x, c[0];"
77 "MUL R0.z, R0, c[0].y;"
78 "TEX R0.x, fragment.texcoord[0], texture[0], 2D;"
79 "MAD R0.y, R0, c[0].z, R0.w;"
80 "ADD R0.x, R0, -c[3];"
81 "MUL R0.y, R0, c[0];"
82 "MUL R0.z, R0, c[1].x;"
83 "MAD R0.x, R0, c[0].y, c[0];"
84 "MUL R0.y, R0, c[1].x;"
85 "DP3 result.color.x, R0, c[2];"
86 "DP3 result.color.y, R0, c[3].yzww;"
87 "DP3 result.color.z, R0, c[4];"
88 "MOV result.color.w, c[1].y;"
89 "END";
90 
91 static const char yuy2ToRgb[] =
92 "!!ARBfp1.0"
93 "PARAM c[5] = { program.local[0..1],"
94 "{ 0.5, 2, 1, 0.0625 },"
95 "{ 1.164, 0, 1.596, 2.0179999 },"
96 "{ 1.164, -0.391, -0.81300002 } };"
97 "TEMP R0;"
98 "TEMP R1;"
99 "TEMP R2;"
100 "FLR R1.z, fragment.texcoord[0].x;"
101 "ADD R0.x, R1.z, c[2];"
102 "ADD R1.z, fragment.texcoord[0].x, -R1;"
103 "MUL R1.x, fragment.texcoord[0].z, R0;"
104 "MOV R1.y, fragment.texcoord[0];"
105 "TEX R0, R1, texture[0], 2D;"
106 "ADD R1.y, R0.z, -R0.x;"
107 "MUL R2.x, R1.z, R1.y;"
108 "MAD R0.x, R2, c[2].y, R0;"
109 "MOV R1.y, fragment.texcoord[0];"
110 "ADD R1.x, fragment.texcoord[0].z, R1;"
111 "TEX R1.xyw, R1, texture[0], 2D;"
112 "ADD R2.x, R1, -R0.z;"
113 "MAD R1.x, R1.z, c[2].y, -c[2].z;"
114 "MAD R0.z, R1.x, R2.x, R0;"
115 "ADD R1.xy, R1.ywzw, -R0.ywzw;"
116 "ADD R0.z, R0, -R0.x;"
117 "SGE R1.w, R1.z, c[2].x;"
118 "MAD R0.x, R1.w, R0.z, R0;"
119 "MAD R0.yz, R1.z, R1.xxyw, R0.xyww;"
120 "ADD R0.xyz, R0, -c[2].wxxw;"
121 "MUL R0.w, R0.y, c[0];"
122 "MAD R0.w, R0.z, c[0].z, R0;"
123 "MUL R0.z, R0, c[0].w;"
124 "MAD R0.y, R0, c[0].z, R0.z;"
125 "MUL R0.w, R0, c[0].y;"
126 "MUL R0.y, R0, c[0];"
127 "MUL R0.z, R0.w, c[1].x;"
128 "MAD R0.x, R0, c[0].y, c[0];"
129 "MUL R0.y, R0, c[1].x;"
130 "DP3 result.color.x, R0, c[3];"
131 "DP3 result.color.y, R0, c[4];"
132 "DP3 result.color.z, R0, c[3].xwyw;"
133 "MOV result.color.w, c[1].y;"
134 "END";
135 
136 #endif //QT_NO_OPENGL
137 
138 #define CLIP_SHIFT_RIGHT_8(c) ((c) < 0 ? 0 : (c) > 0xffff ? 0xff : (c) >> 8)
139 #define CLIP_SHIFT_LEFT_8(c) ((c) < 0 ? 0 : (c) > 0xffff ? 0xff0000 : ( ((c) << 8) & 0xff0000) )
140 #define CLIP_NO_SHIFT(c) ((c) < 0 ? 0 : (c) > 0xffff ? 0xff00 : ((c) & 0xff00) )
141 #define CLIPPED_PIXEL(base, r, g, b) (0xff000000u | CLIP_SHIFT_LEFT_8(base+r) | CLIP_NO_SHIFT(base+g) | CLIP_SHIFT_RIGHT_8(base+b))
142 #define CLIPPED_PIXEL2(r, g, b) (0xff000000u | CLIP_SHIFT_LEFT_8(r) | CLIP_NO_SHIFT(g) | CLIP_SHIFT_RIGHT_8(b))
143 
144 QT_BEGIN_NAMESPACE
145 
146 namespace Phonon
147 {
148     namespace DS9
149     {
videoMediaTypes()150         static const QVector<AM_MEDIA_TYPE> videoMediaTypes()
151         {
152             AM_MEDIA_TYPE mt = { MEDIATYPE_Video, MEDIASUBTYPE_YV12, 0, 0, 0, GUID_NULL, 0, 0, 0 };
153 
154             QVector<AM_MEDIA_TYPE> ret;
155 
156             //we add all the subtypes we support
157             ret << mt; //YV12
158             mt.subtype = MEDIASUBTYPE_YUY2;
159             ret << mt; //YUY2
160             mt.subtype = MEDIASUBTYPE_RGB32;
161             ret << mt; //RGB32
162 
163             return ret;
164         }
165 
166         class VideoRendererSoftFilter : public QBaseFilter
167         {
168         public:
169             VideoRendererSoftFilter(VideoRendererSoft *renderer);
170 
171             ~VideoRendererSoftFilter();
172 
173             QSize videoSize() const;
174 
175 #ifndef QT_NO_OPENGL
freeGLResources()176             void freeGLResources()
177             {
178                 if (m_usingOpenGL) {
179                     //let's reinitialize those values
180                     m_usingOpenGL = false;
181                     //to be sure we recreate it
182                     if (m_textureUploaded) {
183                         glDeleteTextures(3, m_texture);
184                         m_textureUploaded = false;
185                     }
186                 }
187                 m_checkedPrograms = false;
188             }
189 #endif // QT_NO_OPENGL
190 
freeResources()191             void freeResources()
192             {
193                 QMutexLocker locker(&m_mutex);
194                 m_sampleBuffer = ComPointer<IMediaSample>();
195 #ifndef QT_NO_OPENGL
196                 freeGLResources();
197                 m_textureUploaded = false;
198 #endif // QT_NO_OPENGL
199             }
200 
endOfStream()201             void endOfStream()
202             {
203                 //received from the input pin
204                 ::SetEvent(m_receiveCanWait); //unblocks the flow
205 
206                 //we send the message to the graph
207                 ComPointer<IMediaEventSink> sink(graph(), IID_IMediaEventSink);
208                 if (sink) {
209                     sink->Notify(EC_COMPLETE, S_OK,
210                         reinterpret_cast<LONG_PTR>(static_cast<IBaseFilter*>(this)));
211                 }
212             }
213 
freeMediaSample()214             void freeMediaSample()
215             {
216                 QMutexLocker locker(&m_mutex);
217                 m_sampleBuffer = ComPointer<IMediaSample>();
218             }
219 
beginFlush()220             void beginFlush()
221             {
222                 freeMediaSample();
223                 ::SetEvent(m_receiveCanWait); //unblocks the flow
224             }
225 
endFlush()226             void endFlush()
227             {
228                 if (m_inputPin->connected() == 0) {
229                     ::SetEvent(m_receiveCanWait); //unblock the flow in receive
230                 } else {
231                     ::ResetEvent(m_receiveCanWait); //block the flow again
232                 }
233             }
234 
Stop()235             STDMETHODIMP Stop()
236             {
237                 HRESULT hr = QBaseFilter::Stop();
238                 beginFlush();
239                 return hr;
240             }
241 
Pause()242             STDMETHODIMP Pause()
243             {
244                 HRESULT hr = QBaseFilter::Pause();
245                 if (m_inputPin->connected() == 0) {
246                     ::SetEvent(m_receiveCanWait); //unblock the flow in receive
247                 } else {
248                     ::ResetEvent(m_receiveCanWait); //this will block
249                 }
250                 return hr;
251             }
252 
Run(REFERENCE_TIME start)253             STDMETHODIMP Run(REFERENCE_TIME start)
254             {
255                 HRESULT hr = QBaseFilter::Run(start);
256                 m_start = start;
257 
258                 if (m_inputPin->connected() == 0) {
259                     endOfStream();
260                 } else {
261                     ::SetEvent(m_receiveCanWait); //unblocks the flow (this event will block then again)
262                 }
263 
264 #ifdef FPS_COUNTER
265                 fpsTime.restart();
266                 nbFramesProcessed = 0;
267                 nbFramesDisplayed = 0;
268 #endif
269 
270                 return hr;
271             }
272 
273             HRESULT processSample(IMediaSample *sample);
274 
applyMixerSettings(qreal brightness,qreal contrast,qreal hue,qreal saturation)275             void applyMixerSettings(qreal brightness, qreal contrast, qreal hue, qreal saturation)
276             {
277                 //let's normalize the values
278                 m_brightness = brightness * 128;
279                 m_contrast = contrast + 1.;
280                 m_hue = hue * M_PI;
281                 m_saturation = saturation + 1.;
282             }
283 
currentImage() const284             QImage currentImage() const
285             {
286                 return m_currentImage;
287             }
288 
setCurrentImage(const QImage & image)289             void setCurrentImage(const QImage &image)
290             {
291                 QMutexLocker locker(&m_mutex);
292                 m_currentImage = image;
293             }
294 
295             //the following function is called from the GUI thread
296             void repaintCurrentFrame(QPainter &painter, const QRect &r);
297 
298 
299         protected:
300             static void convertYV12toRGB(const uchar *data, const QSize &s, QImage &dest,
301                                          qreal brightness, qreal contrast, qreal hue, qreal saturation);
302             static void convertYUY2toRGB(const uchar *data, const QSize &s, QImage &dest,
303                                          qreal brightness, qreal contrast, qreal hue, qreal saturation);
304             static void normalizeRGB(const uchar *data, const QSize &s, QImage &destImage);
305 
306         private:
307             QPin *const m_inputPin;
308             ComPointer<IMediaSample> m_sampleBuffer;
309             QImage m_currentImage;
310 
311 
312             VideoRendererSoft *m_renderer;
313             mutable QMutex m_mutex;
314             REFERENCE_TIME m_start;
315             HANDLE m_renderEvent, m_receiveCanWait;         // Signals sample to render
316             QSize m_size;
317 
318             //mixer settings
319             qreal m_brightness,
320                   m_contrast,
321                   m_hue,
322                   m_saturation;
323 
324 #ifdef FPS_COUNTER
325            QTime fpsTime;
326            int nbFramesProcessed;
327            int nbFramesDisplayed;
328 #endif
329 
330 #ifndef QT_NO_OPENGL
331             enum Program
332             {
333                 YV12toRGB = 0,
334                 YUY2toRGB = 1,
335                 ProgramCount = 2
336             };
337 
338             void updateTexture();
339             bool checkGLPrograms();
340 
341             // ARB_fragment_program
342             typedef void (APIENTRY *_glProgramStringARB) (GLenum, GLenum, GLsizei, const GLvoid *);
343             typedef void (APIENTRY *_glBindProgramARB) (GLenum, GLuint);
344             typedef void (APIENTRY *_glDeleteProgramsARB) (GLsizei, const GLuint *);
345             typedef void (APIENTRY *_glGenProgramsARB) (GLsizei, GLuint *);
346             typedef void (APIENTRY *_glProgramLocalParameter4fARB) (GLenum, GLuint, GLfloat, GLfloat, GLfloat, GLfloat);
347             typedef void (APIENTRY *_glActiveTexture) (GLenum);
348 
349             _glProgramStringARB glProgramStringARB;
350             _glBindProgramARB glBindProgramARB;
351             _glDeleteProgramsARB glDeleteProgramsARB;
352             _glGenProgramsARB glGenProgramsARB;
353             _glProgramLocalParameter4fARB glProgramLocalParameter4fARB;
354             _glActiveTexture glActiveTexture;
355 
356             bool m_checkedPrograms;
357             bool m_usingOpenGL;
358             bool m_textureUploaded;
359             GLuint m_program[2];
360             GLuint m_texture[3];
361 #endif
362         };
363 
364         class VideoRendererSoftPin : public QMemInputPin
365         {
366         public:
VideoRendererSoftPin(VideoRendererSoftFilter * parent)367             VideoRendererSoftPin(VideoRendererSoftFilter *parent) :
368               QMemInputPin(parent, videoMediaTypes(), false /*no transformation of the samples*/, 0),
369                   m_renderer(parent)
370               {
371               }
372 
EndOfStream()373               STDMETHODIMP EndOfStream()
374               {
375                   m_renderer->endOfStream();
376                   return QMemInputPin::EndOfStream();
377               }
378 
ReceiveCanBlock()379               STDMETHODIMP ReceiveCanBlock()
380               {
381                   //yes, it can block
382                   return S_OK;
383               }
384 
BeginFlush()385               STDMETHODIMP BeginFlush()
386               {
387                   m_renderer->beginFlush();
388                   return QMemInputPin::BeginFlush();
389               }
390 
EndFlush()391               STDMETHODIMP EndFlush()
392               {
393                   m_renderer->endFlush();
394                   return QMemInputPin::EndFlush();
395               }
396 
397 
GetAllocatorRequirements(ALLOCATOR_PROPERTIES * prop)398               STDMETHODIMP GetAllocatorRequirements(ALLOCATOR_PROPERTIES *prop)
399               {
400                   if (!prop) {
401                       return E_POINTER;
402                   }
403 
404                   //we need 2 buffers
405                   prop->cBuffers = 2;
406                   return S_OK;
407               }
408 
409 
NotifyAllocator(IMemAllocator * alloc,BOOL readonly)410               STDMETHODIMP NotifyAllocator(IMemAllocator *alloc, BOOL readonly)
411               {
412                   if (!alloc) {
413                       return E_POINTER;
414                   }
415                   ALLOCATOR_PROPERTIES prop;
416                   HRESULT hr = alloc->GetProperties(&prop);
417                   if (SUCCEEDED(hr) && prop.cBuffers == 1) {
418                       //we ask to get 2 buffers so that we don't block the flow
419                       //when we addref the mediasample
420                       prop.cBuffers = 2;
421                       ALLOCATOR_PROPERTIES dummy;
422                       alloc->SetProperties(&prop, &dummy);
423                   }
424 
425                   return QMemInputPin::NotifyAllocator(alloc, readonly);
426               }
427 
428 
429 
430         private:
431             VideoRendererSoftFilter * const m_renderer;
432 
433         };
434 
VideoRendererSoftFilter(VideoRendererSoft * renderer)435         VideoRendererSoftFilter::VideoRendererSoftFilter(VideoRendererSoft *renderer) :
436         QBaseFilter(CLSID_NULL), m_inputPin(new VideoRendererSoftPin(this)),
437             m_renderer(renderer), m_start(0)
438 #ifndef QT_NO_OPENGL
439             , m_checkedPrograms(false), m_usingOpenGL(false), m_textureUploaded(false)
440 #endif
441         {
442             m_renderEvent    = ::CreateEvent(0, 0, 0, 0);
443             m_receiveCanWait = ::CreateEvent(0, 0, 0, 0);
444             //simply initialize the array with default values
445             applyMixerSettings(0., 0., 0., 0.);
446 #ifndef QT_NO_OPENGL
447 #endif
448         }
449 
~VideoRendererSoftFilter()450         VideoRendererSoftFilter::~VideoRendererSoftFilter()
451         {
452             ::CloseHandle(m_renderEvent);
453             ::CloseHandle(m_receiveCanWait);
454             //this frees up resources
455             freeResources();
456         }
457 
videoSize() const458         QSize VideoRendererSoftFilter::videoSize() const
459         {
460             QSize ret;
461             const AM_MEDIA_TYPE &mt = m_inputPin->connectedType();
462             if (mt.pbFormat && mt.pbFormat) {
463                 if (mt.formattype == FORMAT_VideoInfo) {
464                     const VIDEOINFOHEADER *header = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
465                     const int h = qAbs(header->bmiHeader.biHeight),
466                         w = qAbs(header->bmiHeader.biWidth);
467                     ret = QSize(w, h);
468                 } else if (mt.formattype == FORMAT_VideoInfo2) {
469                     const VIDEOINFOHEADER2 *header = reinterpret_cast<VIDEOINFOHEADER2*>(mt.pbFormat);
470                     const int h = qAbs(header->bmiHeader.biHeight),
471                         w = qAbs(header->bmiHeader.biWidth);
472                     ret = QSize(w, h);
473                 }
474             }
475             return ret;
476         }
477 
478 
processSample(IMediaSample * sample)479         HRESULT VideoRendererSoftFilter::processSample(IMediaSample *sample)
480         {
481 #ifdef FPS_COUNTER
482             if (fpsTime.elapsed() > 1000) {
483                 qDebug("FPS_COUNTER: processed=%d, displayed=%d (%d)", nbFramesProcessed, nbFramesDisplayed, fpsTime.elapsed());
484                 nbFramesProcessed = 0;
485                 nbFramesDisplayed = 0;
486                 fpsTime.restart();
487 
488             }
489 #endif
490 
491             AM_MEDIA_TYPE *type = 0;
492             if (sample->GetMediaType(&type) == S_OK) {
493                 //let's update the media type of the input pin
494                 m_inputPin->setConnectedType(*type);
495             }
496 
497 
498             const AM_MEDIA_TYPE &mt = m_inputPin->connectedType();
499 
500             if (mt.pbFormat == 0 || mt.cbFormat == 0) {
501                 return VFW_E_INVALIDMEDIATYPE;
502             }
503 
504             m_size = videoSize();
505             if (!m_size.isValid()) {
506                 return VFW_E_INVALIDMEDIATYPE;
507             }
508 
509 #ifdef FPS_COUNTER
510             nbFramesProcessed++;
511 #endif
512 
513             REFERENCE_TIME start = 0, stop = 0;
514             HRESULT hr = sample->GetTime(&start, &stop);
515 
516             ComPointer<IReferenceClock> clock;
517             GetSyncSource(clock.pparam());
518 
519             const bool playing = SUCCEEDED(hr) && state() == State_Running && clock;
520 
521             if (playing) {
522                 REFERENCE_TIME current;
523                 clock->GetTime(&current);
524 
525                 DWORD_PTR advise;
526 
527                 //let's synchronize here
528                 clock->AdviseTime(m_start, start,
529                     reinterpret_cast<HEVENT>(m_renderEvent), &advise);
530 
531                 HANDLE handles[] = {m_receiveCanWait, m_renderEvent};
532                 if (::WaitForMultipleObjects(2, handles, false, INFINITE) == WAIT_OBJECT_0) {
533                     if (state() != State_Stopped && !m_inputPin->isFlushing()) {
534                         ::ResetEvent(m_receiveCanWait);
535                     }
536                 }
537             }
538 
539 
540             //the let's lock the sample to be used in the GUI thread
541             {
542                 QMutexLocker locker(&m_mutex);
543                 sample->AddRef();
544                 m_sampleBuffer = ComPointer<IMediaSample>(sample);
545             }
546 
547             //image is updated: we should update the widget
548             //we should never call directly members of target due to thread-safety
549             QApplication::postEvent(m_renderer, new QEvent(QEvent::UpdateRequest));
550 
551             if (!playing) {
552                 //useless to test the return value of WaitForSingleObject: timeout can't happen
553                 ::WaitForSingleObject(m_receiveCanWait, INFINITE);
554                 if (state() != State_Stopped && !m_inputPin->isFlushing()) {
555                     ::ResetEvent(m_receiveCanWait);
556                 }
557             }
558 
559             //everything should be ok
560             return S_OK;
561         }
562 
563 #ifndef QT_NO_OPENGL
checkGLPrograms()564         bool VideoRendererSoftFilter::checkGLPrograms()
565         {
566             if (!m_checkedPrograms) {
567                 m_checkedPrograms = true;
568 
569                 glProgramStringARB = (_glProgramStringARB) wglGetProcAddress("glProgramStringARB");
570                 glBindProgramARB = (_glBindProgramARB) wglGetProcAddress("glBindProgramARB");
571                 glDeleteProgramsARB = (_glDeleteProgramsARB) wglGetProcAddress("glDeleteProgramsARB");
572                 glGenProgramsARB = (_glGenProgramsARB) wglGetProcAddress("glGenProgramsARB");
573                 glProgramLocalParameter4fARB = (_glProgramLocalParameter4fARB) wglGetProcAddress("glProgramLocalParameter4fARB");
574                 glActiveTexture = (_glActiveTexture) wglGetProcAddress("glActiveTexture");
575 
576                 //we check only once if the widget is drawn using opengl
577                 if (glProgramStringARB && glBindProgramARB && glDeleteProgramsARB &&
578                     glGenProgramsARB && glActiveTexture && glProgramLocalParameter4fARB) {
579                     glGenProgramsARB(2, m_program);
580 
581                     const char *code[] = {yv12ToRgb, yuy2ToRgb};
582 
583                     bool error = false;
584                     for(int i = 0; i < ProgramCount && !error;  ++i) {
585 
586                         glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program[i]);
587 
588                         const GLbyte *gl_src = reinterpret_cast<const GLbyte *>(code[i]);
589                         glProgramStringARB(GL_FRAGMENT_PROGRAM_ARB, GL_PROGRAM_FORMAT_ASCII_ARB,
590                             strlen(code[i]), gl_src);
591 
592                         if (glGetError() != GL_NO_ERROR) {
593                             error = true;
594                         }
595                     }
596 
597                     if (error) {
598                         glDeleteProgramsARB(2, m_program);
599                     } else {
600                         //everything went fine we store the context here (we support YV12 and YUY2)
601                         m_usingOpenGL = m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12
602                             || m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2;
603                         //those "textures" will be used as byte streams
604                         //to pass Y, U and V data to the graphics card
605                         glGenTextures(3, m_texture);
606                     }
607                 }
608             }
609             return m_usingOpenGL;
610         }
611 
updateTexture()612         void VideoRendererSoftFilter::updateTexture()
613         {
614             if (!m_sampleBuffer) {
615                 return; //the texture is already up2date or their is no data yet
616             }
617 
618             uchar *data = 0;
619             m_sampleBuffer->GetPointer(&data);
620 
621             if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) {
622                 int w[3] = { m_size.width(), m_size.width()/2, m_size.width()/2 };
623                 int h[3] = { m_size.height(), m_size.height()/2, m_size.height()/2 };
624                 int offs[3] = { 0, m_size.width()*m_size.height(), m_size.width()*m_size.height()*5/4 };
625 
626                 for (int i = 0; i < 3; ++i) {
627                     glBindTexture(GL_TEXTURE_2D, m_texture[i]);
628                     glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, w[i], h[i], 0,
629                         GL_LUMINANCE, GL_UNSIGNED_BYTE, data + offs[i]);
630 
631                     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
632                     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
633                     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
634                     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
635                 }
636             } else { //m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2
637                 //we upload 1 texture
638                 glBindTexture(GL_TEXTURE_2D, m_texture[0]);
639                 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_size.width() / 2, m_size.height(), 0,
640                     GL_RGBA, GL_UNSIGNED_BYTE, data);
641 
642                 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
643                 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
644 
645                 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
646                 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
647 
648             }
649             m_sampleBuffer = ComPointer<IMediaSample>();
650             m_textureUploaded = true;
651         }
652 #endif
653 
repaintCurrentFrame(QPainter & painter,const QRect & r)654         void VideoRendererSoftFilter::repaintCurrentFrame(QPainter &painter, const QRect &r)
655         {
656             QMutexLocker locker(&m_mutex);
657 
658 #ifdef FPS_COUNTER
659             nbFramesDisplayed++;
660 #endif
661 
662 
663 #ifndef QT_NO_OPENGL
664             if (painter.paintEngine() &&
665                 (painter.paintEngine()->type() == QPaintEngine::OpenGL || painter.paintEngine()->type() == QPaintEngine::OpenGL2)
666                 && checkGLPrograms()) {
667 
668                 //for now we only support YUV (both YV12 and YUY2)
669                 updateTexture();
670 
671                 if (!m_textureUploaded) {
672                     //we simply fill the whole video with content
673                     //the callee has already set the brush
674                     painter.drawRect(r);
675                     return;
676                 }
677 
678                 //let's draw the texture
679                 painter.beginNativePainting();
680 
681                 //Let's pass the other arguments
682                 const Program prog = (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) ? YV12toRGB : YUY2toRGB;
683                 glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program[prog]);
684                 //loading the parameters
685                 glProgramLocalParameter4fARB(GL_FRAGMENT_PROGRAM_ARB, 0, m_brightness / 256., m_contrast, qCos(m_hue), qSin(m_hue));
686                 glProgramLocalParameter4fARB(GL_FRAGMENT_PROGRAM_ARB, 1, m_saturation, painter.opacity() /*alpha */, 0. /*dummy*/, 0. /*dummy*/);
687 
688                 glEnable(GL_FRAGMENT_PROGRAM_ARB);
689 
690                 const float v_array[] = { r.left(), r.top(), r.right()+1, r.top(), r.right()+1, r.bottom()+1, r.left(), r.bottom()+1 };
691 
692                 float tx_array[12] = {0., 0., 0., 1.,
693                     0., 0., 1., 1.,
694                     0., 0., 1., 0.};
695 
696                 if (prog == YUY2toRGB) {
697                     const float w = m_size.width() / 2,
698                         iw = 1. / w;
699 
700                     tx_array[3] = w;
701                     tx_array[6] = w;
702 
703                     for (int i = 0; i < 4; ++i) {
704                         tx_array[3*i + 2] = iw;
705                     }
706                 }
707 
708                 glActiveTexture(GL_TEXTURE0);
709                 glBindTexture(GL_TEXTURE_2D, m_texture[0]);
710 
711                 if (prog == YV12toRGB) {
712                     glActiveTexture(GL_TEXTURE1);
713                     glBindTexture(GL_TEXTURE_2D, m_texture[2]);
714                     glActiveTexture(GL_TEXTURE2);
715                     glBindTexture(GL_TEXTURE_2D, m_texture[1]);
716                     glActiveTexture(GL_TEXTURE0);
717                 }
718 
719 
720                 glVertexPointer(2, GL_FLOAT, 0, v_array);
721                 glTexCoordPointer(3, GL_FLOAT, 0, tx_array);
722                 glEnableClientState(GL_VERTEX_ARRAY);
723                 glEnableClientState(GL_TEXTURE_COORD_ARRAY);
724                 glDrawArrays(GL_QUADS, 0, 4);
725                 glDisableClientState(GL_TEXTURE_COORD_ARRAY);
726                 glDisableClientState(GL_VERTEX_ARRAY);
727 
728                 glDisable(GL_FRAGMENT_PROGRAM_ARB);
729                 painter.endNativePainting();
730                 return;
731             } else
732 #endif
733               if (m_sampleBuffer) {
734                 //we need to get the sample data
735                 uchar *data = 0;
736                 m_sampleBuffer->GetPointer(&data);
737 
738 
739                 //let's update the current image
740                 if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) {
741                     convertYV12toRGB(data, m_size, m_currentImage,
742                         m_brightness, m_contrast, m_hue, m_saturation);
743                 } else if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2) {
744                     convertYUY2toRGB(data, m_size, m_currentImage,
745                         m_brightness, m_contrast, m_hue, m_saturation);
746                 } else if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_RGB32) {
747                     normalizeRGB(data, m_size, m_currentImage);
748                 }
749                 m_sampleBuffer = ComPointer<IMediaSample>();
750             }
751 
752             if (m_currentImage.isNull()) {
753                 //we simply fill the whole video with content
754                 //the callee has alrtead set the brush
755                 painter.drawRect(r);
756             } else {
757                 painter.drawImage(0, 0, m_currentImage);
758             }
759         }
760 
761 
normalizeRGB(const uchar * data,const QSize & s,QImage & destImage)762         void VideoRendererSoftFilter::normalizeRGB(const uchar *data, const QSize &s, QImage &destImage)
763         {
764             const int w = s.width(),
765                       h = s.height();
766             if (destImage.size() != s) {
767                 destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied);
768             }
769             if (destImage.isNull()) {
770                 return; //the system can't allocate the memory for the image drawing
771             }
772 
773             const QRgb *rgb = reinterpret_cast<const QRgb*>(data);
774 
775             //this sets the alpha channel to 0xff and flip the image vertically
776             for (int y = h - 1; y >= 0; --y) {
777                 QRgb *dest = reinterpret_cast<QRgb*>(destImage.scanLine(y));
778                 for(int i = w; i > 0; --i, ++rgb, ++dest) {
779                     *dest = *rgb | (0xff << 24); //we force the alpha channel to 0xff
780                 }
781             }
782         }
783 
784 
785         //we render data interpreted as YV12 into m_renderbuffer
convertYV12toRGB(const uchar * data,const QSize & s,QImage & destImage,qreal brightness,qreal contrast,qreal hue,qreal saturation)786         void VideoRendererSoftFilter::convertYV12toRGB(const uchar *data, const QSize &s, QImage &destImage,
787             qreal brightness, qreal contrast, qreal hue, qreal saturation)
788         {
789             const int w = s.width(),
790                       h = s.height();
791 
792             //let's cache some computation
793             const int cosHx256 = qRound(qCos(hue) * contrast * saturation * 256),
794                       sinHx256 = qRound(qSin(hue) * contrast * saturation * 256);
795 
796             int Yvalue[256];
797             for(int i = 0;i<256;++i) {
798                 Yvalue[i] = qRound(((i - 16) * contrast + brightness) * 298 + 128);
799             }
800 
801 
802             if (destImage.size() != s) {
803                 destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied);
804             }
805 
806             if (destImage.isNull()) {
807                 return; //the system can't allocate the memory for the image drawing
808             }
809 
810             QRgb *dest = reinterpret_cast<QRgb*>(destImage.bits());
811             const uchar *dataY = data,
812                 *dataV = data + (w*h),
813                 *dataU = dataV + (w*h)/4;
814 
815             uint *line1 = dest,
816                 *line2 = dest + w;
817 
818             for(int l = (h >> 1); l > 0; --l) {
819                 //we treat 2 lines by 2 lines
820 
821                 for(int x = (w >> 1); x > 0; --x) {
822 
823                     const int u = *dataU++ - 128,
824                         v = *dataV++ - 128;
825                     const int d = (u * cosHx256 + v * sinHx256) >> 8,
826                         e = (v * cosHx256 + u * sinHx256) >> 8;
827 
828                     const int compRed = 409 * e,
829                         compGreen = -100 * d - 208 * e,
830                         compBlue = 516 * d;
831 
832                     const int y21 = Yvalue[ dataY[w] ],
833                               y11 = Yvalue[ *dataY++ ],
834                               y22 = Yvalue[ dataY[w] ],
835                               y12 = Yvalue[ *dataY++ ];
836 
837                     //1st line 1st pixel
838                     *line1++ = CLIPPED_PIXEL(y11, compRed, compGreen, compBlue);
839 
840                     //1st line, 2nd pixel
841                     *line1++ = CLIPPED_PIXEL(y12, compRed, compGreen, compBlue);
842 
843                     //2nd line 1st pixel
844                     *line2++ = CLIPPED_PIXEL(y21, compRed, compGreen, compBlue);
845 
846                     //2nd line 2nd pixel
847                     *line2++ = CLIPPED_PIXEL(y22, compRed, compGreen, compBlue);
848 
849                 } //for
850 
851                 //end of the line
852                 dataY += w;
853                 line1 = line2;
854                 line2 += w;
855 
856             } //for
857 
858         }
859 
860         //we render data interpreted as YUY2 into m_renderbuffer
convertYUY2toRGB(const uchar * data,const QSize & s,QImage & destImage,qreal brightness,qreal contrast,qreal hue,qreal saturation)861         void VideoRendererSoftFilter::convertYUY2toRGB(const uchar *data, const QSize &s, QImage &destImage,
862                                          qreal brightness, qreal contrast, qreal hue, qreal saturation)
863         {
864             const int w = s.width(),
865                       h = s.height();
866 
867             //let's cache some computation
868             int Yvalue[256];
869             for(int i = 0;i<256;++i) {
870                 Yvalue[i] = qRound(((i - 16) * contrast + brightness) * 298 + 128);
871             }
872 
873             const int cosHx256 = qRound(qCos(hue) * contrast * saturation * 256),
874                       sinHx256 = qRound(qSin(hue) * contrast * saturation * 256);
875 
876             if (destImage.size() != s) {
877                 //this will only allocate memory when needed
878                 destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied);
879             }
880             if (destImage.isNull()) {
881                 return; //the system can't allocate the memory for the image drawing
882             }
883 
884             QRgb *dest = reinterpret_cast<QRgb*>(destImage.bits());
885 
886             //the number of iterations is width * height / 2 because we treat 2 pixels at each iterations
887             for (int c = w * h / 2; c > 0 ; --c) {
888 
889                 //the idea of that algorithm comes from
890                 //http://msdn2.microsoft.com/en-us/library/ms867704.aspx#yuvformats_identifying_yuv_formats_in_directshow
891 
892                 //we treat 2 pixels by 2 pixels (we start reading 2 pixels info ie. "YUYV"
893                 const int y1 = Yvalue[*data++],
894                     u = *data++ - 128,
895                     y2 = Yvalue[*data++],
896                     v = *data++ - 128;
897 
898                 const int d = (u * cosHx256 + v * sinHx256) >> 8,
899                     e = (v * cosHx256 + u * sinHx256) >> 8;
900 
901                 const int compRed = 409 * e,
902                     compGreen = -100 * d - 208 * e,
903                     compBlue = 516 * d;
904 
905                 //first pixel
906                 *dest++ = CLIPPED_PIXEL(y1, compRed, compGreen, compBlue);
907 
908                 //second pixel
909                 *dest++ = CLIPPED_PIXEL(y2, compRed, compGreen, compBlue);
910             }
911         }
912 
913 
VideoRendererSoft(QWidget * target)914         VideoRendererSoft::VideoRendererSoft(QWidget *target) :
915         m_renderer(new VideoRendererSoftFilter(this)), m_target(target)
916         {
917             m_filter = Filter(m_renderer);
918         }
919 
~VideoRendererSoft()920         VideoRendererSoft::~VideoRendererSoft()
921         {
922         }
923 
924 
isNative() const925         bool VideoRendererSoft::isNative() const
926         {
927             return false;
928         }
929 
930 
repaintCurrentFrame(QWidget * target,const QRect & rect)931         void VideoRendererSoft::repaintCurrentFrame(QWidget *target, const QRect &rect)
932         {
933             QPainter painter(target);
934 
935             QColor backColor = target->palette().color(target->backgroundRole());
936             painter.setBrush(backColor);
937             painter.setPen(Qt::NoPen);
938             if (!m_videoRect.contains(rect)) {
939                 //we repaint the borders only when needed
940                 const QVector<QRect> reg = (QRegion(rect) - m_videoRect).rects();
941                 for (int i = 0; i < reg.count(); ++i) {
942                     painter.drawRect(reg.at(i));
943                 }
944             }
945 
946             painter.setRenderHint(QPainter::SmoothPixmapTransform);
947             painter.setTransform(m_transform, true);
948             QSize vsize = videoSize();
949             m_renderer->repaintCurrentFrame(painter, QRect(0,0, vsize.width(), vsize.height()));
950         }
951 
notifyResize(const QSize & size,Phonon::VideoWidget::AspectRatio aspectRatio,Phonon::VideoWidget::ScaleMode scaleMode)952         void VideoRendererSoft::notifyResize(const QSize &size,
953             Phonon::VideoWidget::AspectRatio aspectRatio, Phonon::VideoWidget::ScaleMode scaleMode)
954         {
955             const QSize vsize = videoSize();
956             internalNotifyResize(size, vsize, aspectRatio, scaleMode);
957 
958             m_transform.reset();
959 
960             if (vsize.isValid() && size.isValid()) {
961                 m_transform.translate(m_dstX, m_dstY);
962                 const qreal sx = qreal(m_dstWidth) / qreal(vsize.width()),
963                     sy = qreal(m_dstHeight) / qreal(vsize.height());
964                 m_transform.scale(sx, sy);
965                 m_videoRect = m_transform.mapRect( QRect(0,0, vsize.width(), vsize.height()));
966             }
967         }
968 
videoSize() const969         QSize VideoRendererSoft::videoSize() const
970         {
971             if (m_renderer->pins().first()->connected()) {
972                 return m_renderer->videoSize();
973             } else {
974                 return m_renderer->currentImage().size();
975             }
976         }
977 
applyMixerSettings(qreal brightness,qreal contrast,qreal hue,qreal saturation)978         void VideoRendererSoft::applyMixerSettings(qreal brightness, qreal contrast, qreal hue, qreal saturation)
979         {
980             m_renderer->applyMixerSettings(brightness, contrast, hue, saturation);
981         }
982 
snapshot() const983         QImage VideoRendererSoft::snapshot() const
984         {
985             return m_renderer->currentImage(); //not accurate (especially when using opengl...)
986         }
987 
setSnapshot(const QImage & image)988         void VideoRendererSoft::setSnapshot(const QImage &image)
989         {
990             m_renderer->setCurrentImage(image);
991         }
992 
event(QEvent * e)993         bool VideoRendererSoft::event(QEvent *e)
994         {
995             if (e->type() == QEvent::UpdateRequest) {
996                 m_target->update(m_videoRect);
997                 return true;
998             }
999             return QObject::event(e);
1000         }
1001 
1002 
1003     }
1004 }
1005 
1006 QT_END_NAMESPACE
1007 
1008 #endif //QT_NO_PHONON_VIDEO
1009