|
1 /* This file is part of the KDE project. |
|
2 |
|
3 Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). |
|
4 |
|
5 This library is free software: you can redistribute it and/or modify |
|
6 it under the terms of the GNU Lesser General Public License as published by |
|
7 the Free Software Foundation, either version 2.1 or 3 of the License. |
|
8 |
|
9 This library is distributed in the hope that it will be useful, |
|
10 but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 GNU Lesser General Public License for more details. |
|
13 |
|
14 You should have received a copy of the GNU Lesser General Public License |
|
15 along with this library. If not, see <http://www.gnu.org/licenses/>. |
|
16 */ |
|
17 |
|
18 |
|
19 #include "videorenderer_soft.h" |
|
20 |
|
21 #ifndef QT_NO_PHONON_VIDEO |
|
22 |
|
23 #include "qmeminputpin.h" |
|
24 #include "qbasefilter.h" |
|
25 |
|
26 #include <QtGui/QPainter> |
|
27 #include <QtGui/QPaintEngine> |
|
28 #include <QtGui/QApplication> |
|
29 #include <QtCore/QTime> |
|
30 |
|
31 #define _USE_MATH_DEFINES //for pi |
|
32 #include <QtCore/qmath.h> //for sin and cos |
|
33 /* M_PI is a #define that may or may not be handled in <cmath> */ |
|
34 #ifndef M_PI |
|
35 #define M_PI 3.14159265358979323846264338327950288419717 |
|
36 #endif |
|
37 |
|
38 #include <dvdmedia.h> //for VIDEOINFOHEADER2 |
|
39 |
|
40 //this will make a display every second of how many frames were pocessed and actually displayed |
|
41 //#define FPS_COUNTER |
|
42 |
|
43 #ifdef Q_OS_WINCE |
|
44 #define QT_NO_OPENGL |
|
45 #endif |
|
46 |
|
47 #ifndef QT_NO_OPENGL |
|
48 #include <gl/gl.h> |
|
49 #ifndef GL_FRAGMENT_PROGRAM_ARB |
|
50 #define GL_FRAGMENT_PROGRAM_ARB 0x8804 |
|
51 #define GL_PROGRAM_FORMAT_ASCII_ARB 0x8875 |
|
52 #endif |
|
53 |
|
54 // support old OpenGL installations (1.2) |
|
55 // assume that if TEXTURE0 isn't defined, none are |
|
56 #ifndef GL_TEXTURE0 |
|
57 # define GL_TEXTURE0 0x84C0 |
|
58 # define GL_TEXTURE1 0x84C1 |
|
59 # define GL_TEXTURE2 0x84C2 |
|
60 #endif |
|
61 |
|
62 // arbfp1 fragment program for converting yuv (YV12) to rgb |
|
63 static const char yv12ToRgb[] = |
|
64 "!!ARBfp1.0" |
|
65 "PARAM c[5] = { program.local[0..1]," |
|
66 "{ 1.164, 0, 1.596, 0.5 }," |
|
67 "{ 0.0625, 1.164, -0.391, -0.81300002 }," |
|
68 "{ 1.164, 2.0179999, 0 } };" |
|
69 "TEMP R0;" |
|
70 "TEX R0.x, fragment.texcoord[0], texture[1], 2D;" |
|
71 "ADD R0.y, R0.x, -c[2].w;" |
|
72 "TEX R0.x, fragment.texcoord[0], texture[2], 2D;" |
|
73 "ADD R0.x, R0, -c[2].w;" |
|
74 "MUL R0.z, R0.y, c[0].w;" |
|
75 "MAD R0.z, R0.x, c[0], R0;" |
|
76 "MUL R0.w, R0.x, c[0];" |
|
77 "MUL R0.z, R0, c[0].y;" |
|
78 "TEX R0.x, fragment.texcoord[0], texture[0], 2D;" |
|
79 "MAD R0.y, R0, c[0].z, R0.w;" |
|
80 "ADD R0.x, R0, -c[3];" |
|
81 "MUL R0.y, R0, c[0];" |
|
82 "MUL R0.z, R0, c[1].x;" |
|
83 "MAD R0.x, R0, c[0].y, c[0];" |
|
84 "MUL R0.y, R0, c[1].x;" |
|
85 "DP3 result.color.x, R0, c[2];" |
|
86 "DP3 result.color.y, R0, c[3].yzww;" |
|
87 "DP3 result.color.z, R0, c[4];" |
|
88 "MOV result.color.w, c[1].y;" |
|
89 "END"; |
|
90 |
|
91 static const char yuy2ToRgb[] = |
|
92 "!!ARBfp1.0" |
|
93 "PARAM c[5] = { program.local[0..1]," |
|
94 "{ 0.5, 2, 1, 0.0625 }," |
|
95 "{ 1.164, 0, 1.596, 2.0179999 }," |
|
96 "{ 1.164, -0.391, -0.81300002 } };" |
|
97 "TEMP R0;" |
|
98 "TEMP R1;" |
|
99 "TEMP R2;" |
|
100 "FLR R1.z, fragment.texcoord[0].x;" |
|
101 "ADD R0.x, R1.z, c[2];" |
|
102 "ADD R1.z, fragment.texcoord[0].x, -R1;" |
|
103 "MUL R1.x, fragment.texcoord[0].z, R0;" |
|
104 "MOV R1.y, fragment.texcoord[0];" |
|
105 "TEX R0, R1, texture[0], 2D;" |
|
106 "ADD R1.y, R0.z, -R0.x;" |
|
107 "MUL R2.x, R1.z, R1.y;" |
|
108 "MAD R0.x, R2, c[2].y, R0;" |
|
109 "MOV R1.y, fragment.texcoord[0];" |
|
110 "ADD R1.x, fragment.texcoord[0].z, R1;" |
|
111 "TEX R1.xyw, R1, texture[0], 2D;" |
|
112 "ADD R2.x, R1, -R0.z;" |
|
113 "MAD R1.x, R1.z, c[2].y, -c[2].z;" |
|
114 "MAD R0.z, R1.x, R2.x, R0;" |
|
115 "ADD R1.xy, R1.ywzw, -R0.ywzw;" |
|
116 "ADD R0.z, R0, -R0.x;" |
|
117 "SGE R1.w, R1.z, c[2].x;" |
|
118 "MAD R0.x, R1.w, R0.z, R0;" |
|
119 "MAD R0.yz, R1.z, R1.xxyw, R0.xyww;" |
|
120 "ADD R0.xyz, R0, -c[2].wxxw;" |
|
121 "MUL R0.w, R0.y, c[0];" |
|
122 "MAD R0.w, R0.z, c[0].z, R0;" |
|
123 "MUL R0.z, R0, c[0].w;" |
|
124 "MAD R0.y, R0, c[0].z, R0.z;" |
|
125 "MUL R0.w, R0, c[0].y;" |
|
126 "MUL R0.y, R0, c[0];" |
|
127 "MUL R0.z, R0.w, c[1].x;" |
|
128 "MAD R0.x, R0, c[0].y, c[0];" |
|
129 "MUL R0.y, R0, c[1].x;" |
|
130 "DP3 result.color.x, R0, c[3];" |
|
131 "DP3 result.color.y, R0, c[4];" |
|
132 "DP3 result.color.z, R0, c[3].xwyw;" |
|
133 "MOV result.color.w, c[1].y;" |
|
134 "END"; |
|
135 |
|
136 #endif //QT_NO_OPENGL |
|
137 |
|
138 #define CLIP_SHIFT_RIGHT_8(c) ((c) < 0 ? 0 : (c) > 0xffff ? 0xff : (c) >> 8) |
|
139 #define CLIP_SHIFT_LEFT_8(c) ((c) < 0 ? 0 : (c) > 0xffff ? 0xff0000 : ( ((c) << 8) & 0xff0000) ) |
|
140 #define CLIP_NO_SHIFT(c) ((c) < 0 ? 0 : (c) > 0xffff ? 0xff00 : ((c) & 0xff00) ) |
|
141 #define CLIPPED_PIXEL(base, r, g, b) (0xff000000u | CLIP_SHIFT_LEFT_8(base+r) | CLIP_NO_SHIFT(base+g) | CLIP_SHIFT_RIGHT_8(base+b)) |
|
142 #define CLIPPED_PIXEL2(r, g, b) (0xff000000u | CLIP_SHIFT_LEFT_8(r) | CLIP_NO_SHIFT(g) | CLIP_SHIFT_RIGHT_8(b)) |
|
143 |
|
144 QT_BEGIN_NAMESPACE |
|
145 |
|
146 namespace Phonon |
|
147 { |
|
148 namespace DS9 |
|
149 { |
|
150 static const QVector<AM_MEDIA_TYPE> videoMediaTypes() |
|
151 { |
|
152 AM_MEDIA_TYPE mt = { MEDIATYPE_Video, MEDIASUBTYPE_YV12, 0, 0, 0, GUID_NULL, 0, 0, 0 }; |
|
153 |
|
154 QVector<AM_MEDIA_TYPE> ret; |
|
155 |
|
156 //we add all the subtypes we support |
|
157 ret << mt; //YV12 |
|
158 mt.subtype = MEDIASUBTYPE_YUY2; |
|
159 ret << mt; //YUY2 |
|
160 mt.subtype = MEDIASUBTYPE_RGB32; |
|
161 ret << mt; //RGB32 |
|
162 |
|
163 return ret; |
|
164 } |
|
165 |
|
166 class VideoRendererSoftFilter : public QBaseFilter |
|
167 { |
|
168 public: |
|
169 VideoRendererSoftFilter(VideoRendererSoft *renderer); |
|
170 |
|
171 ~VideoRendererSoftFilter(); |
|
172 |
|
173 QSize videoSize() const; |
|
174 |
|
175 #ifndef QT_NO_OPENGL |
|
176 void freeGLResources() |
|
177 { |
|
178 if (m_usingOpenGL) { |
|
179 //let's reinitialize those values |
|
180 m_usingOpenGL = false; |
|
181 //to be sure we recreate it |
|
182 if (m_textureUploaded) { |
|
183 glDeleteTextures(3, m_texture); |
|
184 m_textureUploaded = false; |
|
185 } |
|
186 } |
|
187 m_checkedPrograms = false; |
|
188 } |
|
189 #endif // QT_NO_OPENGL |
|
190 |
|
191 void freeResources() |
|
192 { |
|
193 QMutexLocker locker(&m_mutex); |
|
194 m_sampleBuffer = ComPointer<IMediaSample>(); |
|
195 #ifndef QT_NO_OPENGL |
|
196 freeGLResources(); |
|
197 m_textureUploaded = false; |
|
198 #endif // QT_NO_OPENGL |
|
199 } |
|
200 |
|
201 void endOfStream() |
|
202 { |
|
203 //received from the input pin |
|
204 ::SetEvent(m_receiveCanWait); //unblocks the flow |
|
205 |
|
206 //we send the message to the graph |
|
207 ComPointer<IMediaEventSink> sink(graph(), IID_IMediaEventSink); |
|
208 if (sink) { |
|
209 sink->Notify(EC_COMPLETE, S_OK, |
|
210 reinterpret_cast<LONG_PTR>(static_cast<IBaseFilter*>(this))); |
|
211 } |
|
212 } |
|
213 |
|
214 void freeMediaSample() |
|
215 { |
|
216 QMutexLocker locker(&m_mutex); |
|
217 m_sampleBuffer = ComPointer<IMediaSample>(); |
|
218 } |
|
219 |
|
220 void beginFlush() |
|
221 { |
|
222 freeMediaSample(); |
|
223 ::SetEvent(m_receiveCanWait); //unblocks the flow |
|
224 } |
|
225 |
|
226 void endFlush() |
|
227 { |
|
228 if (m_inputPin->connected() == 0) { |
|
229 ::SetEvent(m_receiveCanWait); //unblock the flow in receive |
|
230 } else { |
|
231 ::ResetEvent(m_receiveCanWait); //block the flow again |
|
232 } |
|
233 } |
|
234 |
|
235 STDMETHODIMP Stop() |
|
236 { |
|
237 HRESULT hr = QBaseFilter::Stop(); |
|
238 beginFlush(); |
|
239 return hr; |
|
240 } |
|
241 |
|
242 STDMETHODIMP Pause() |
|
243 { |
|
244 HRESULT hr = QBaseFilter::Pause(); |
|
245 if (m_inputPin->connected() == 0) { |
|
246 ::SetEvent(m_receiveCanWait); //unblock the flow in receive |
|
247 } else { |
|
248 ::ResetEvent(m_receiveCanWait); //this will block |
|
249 } |
|
250 return hr; |
|
251 } |
|
252 |
|
253 STDMETHODIMP Run(REFERENCE_TIME start) |
|
254 { |
|
255 HRESULT hr = QBaseFilter::Run(start); |
|
256 m_start = start; |
|
257 |
|
258 if (m_inputPin->connected() == 0) { |
|
259 endOfStream(); |
|
260 } else { |
|
261 ::SetEvent(m_receiveCanWait); //unblocks the flow (this event will block then again) |
|
262 } |
|
263 |
|
264 #ifdef FPS_COUNTER |
|
265 fpsTime.restart(); |
|
266 nbFramesProcessed = 0; |
|
267 nbFramesDisplayed = 0; |
|
268 #endif |
|
269 |
|
270 return hr; |
|
271 } |
|
272 |
|
273 HRESULT processSample(IMediaSample *sample); |
|
274 |
|
275 void applyMixerSettings(qreal brightness, qreal contrast, qreal hue, qreal saturation) |
|
276 { |
|
277 //let's normalize the values |
|
278 m_brightness = brightness * 128; |
|
279 m_contrast = contrast + 1.; |
|
280 m_hue = hue * M_PI; |
|
281 m_saturation = saturation + 1.; |
|
282 } |
|
283 |
|
284 QImage currentImage() const |
|
285 { |
|
286 return m_currentImage; |
|
287 } |
|
288 |
|
289 void setCurrentImage(const QImage &image) |
|
290 { |
|
291 QMutexLocker locker(&m_mutex); |
|
292 m_currentImage = image; |
|
293 } |
|
294 |
|
295 //the following function is called from the GUI thread |
|
296 void repaintCurrentFrame(QPainter &painter, const QRect &r); |
|
297 |
|
298 |
|
299 protected: |
|
300 static void convertYV12toRGB(const uchar *data, const QSize &s, QImage &dest, |
|
301 qreal brightness, qreal contrast, qreal hue, qreal saturation); |
|
302 static void convertYUY2toRGB(const uchar *data, const QSize &s, QImage &dest, |
|
303 qreal brightness, qreal contrast, qreal hue, qreal saturation); |
|
304 static void normalizeRGB(const uchar *data, const QSize &s, QImage &destImage); |
|
305 |
|
306 private: |
|
307 QPin *const m_inputPin; |
|
308 ComPointer<IMediaSample> m_sampleBuffer; |
|
309 QImage m_currentImage; |
|
310 |
|
311 |
|
312 VideoRendererSoft *m_renderer; |
|
313 mutable QMutex m_mutex; |
|
314 REFERENCE_TIME m_start; |
|
315 HANDLE m_renderEvent, m_receiveCanWait; // Signals sample to render |
|
316 QSize m_size; |
|
317 |
|
318 //mixer settings |
|
319 qreal m_brightness, |
|
320 m_contrast, |
|
321 m_hue, |
|
322 m_saturation; |
|
323 |
|
324 #ifdef FPS_COUNTER |
|
325 QTime fpsTime; |
|
326 int nbFramesProcessed; |
|
327 int nbFramesDisplayed; |
|
328 #endif |
|
329 |
|
330 #ifndef QT_NO_OPENGL |
|
331 enum Program |
|
332 { |
|
333 YV12toRGB = 0, |
|
334 YUY2toRGB = 1, |
|
335 ProgramCount = 2 |
|
336 }; |
|
337 |
|
338 void updateTexture(); |
|
339 bool checkGLPrograms(); |
|
340 |
|
341 // ARB_fragment_program |
|
342 typedef void (APIENTRY *_glProgramStringARB) (GLenum, GLenum, GLsizei, const GLvoid *); |
|
343 typedef void (APIENTRY *_glBindProgramARB) (GLenum, GLuint); |
|
344 typedef void (APIENTRY *_glDeleteProgramsARB) (GLsizei, const GLuint *); |
|
345 typedef void (APIENTRY *_glGenProgramsARB) (GLsizei, GLuint *); |
|
346 typedef void (APIENTRY *_glProgramLocalParameter4fARB) (GLenum, GLuint, GLfloat, GLfloat, GLfloat, GLfloat); |
|
347 typedef void (APIENTRY *_glActiveTexture) (GLenum); |
|
348 |
|
349 _glProgramStringARB glProgramStringARB; |
|
350 _glBindProgramARB glBindProgramARB; |
|
351 _glDeleteProgramsARB glDeleteProgramsARB; |
|
352 _glGenProgramsARB glGenProgramsARB; |
|
353 _glProgramLocalParameter4fARB glProgramLocalParameter4fARB; |
|
354 _glActiveTexture glActiveTexture; |
|
355 |
|
356 bool m_checkedPrograms; |
|
357 bool m_usingOpenGL; |
|
358 bool m_textureUploaded; |
|
359 GLuint m_program[2]; |
|
360 GLuint m_texture[3]; |
|
361 #endif |
|
362 }; |
|
363 |
|
364 class VideoRendererSoftPin : public QMemInputPin |
|
365 { |
|
366 public: |
|
367 VideoRendererSoftPin(VideoRendererSoftFilter *parent) : |
|
368 QMemInputPin(parent, videoMediaTypes(), false /*no transformation of the samples*/, 0), |
|
369 m_renderer(parent) |
|
370 { |
|
371 } |
|
372 |
|
373 STDMETHODIMP EndOfStream() |
|
374 { |
|
375 m_renderer->endOfStream(); |
|
376 return QMemInputPin::EndOfStream(); |
|
377 } |
|
378 |
|
379 STDMETHODIMP ReceiveCanBlock() |
|
380 { |
|
381 //yes, it can block |
|
382 return S_OK; |
|
383 } |
|
384 |
|
385 STDMETHODIMP BeginFlush() |
|
386 { |
|
387 m_renderer->beginFlush(); |
|
388 return QMemInputPin::BeginFlush(); |
|
389 } |
|
390 |
|
391 STDMETHODIMP EndFlush() |
|
392 { |
|
393 m_renderer->endFlush(); |
|
394 return QMemInputPin::EndFlush(); |
|
395 } |
|
396 |
|
397 |
|
398 STDMETHODIMP GetAllocatorRequirements(ALLOCATOR_PROPERTIES *prop) |
|
399 { |
|
400 if (!prop) { |
|
401 return E_POINTER; |
|
402 } |
|
403 |
|
404 //we need 2 buffers |
|
405 prop->cBuffers = 2; |
|
406 return S_OK; |
|
407 } |
|
408 |
|
409 |
|
410 STDMETHODIMP NotifyAllocator(IMemAllocator *alloc, BOOL readonly) |
|
411 { |
|
412 if (!alloc) { |
|
413 return E_POINTER; |
|
414 } |
|
415 ALLOCATOR_PROPERTIES prop; |
|
416 HRESULT hr = alloc->GetProperties(&prop); |
|
417 if (SUCCEEDED(hr) && prop.cBuffers == 1) { |
|
418 //we ask to get 2 buffers so that we don't block the flow |
|
419 //when we addref the mediasample |
|
420 prop.cBuffers = 2; |
|
421 ALLOCATOR_PROPERTIES dummy; |
|
422 alloc->SetProperties(&prop, &dummy); |
|
423 } |
|
424 |
|
425 return QMemInputPin::NotifyAllocator(alloc, readonly); |
|
426 } |
|
427 |
|
428 |
|
429 |
|
430 private: |
|
431 VideoRendererSoftFilter * const m_renderer; |
|
432 |
|
433 }; |
|
434 |
|
435 VideoRendererSoftFilter::VideoRendererSoftFilter(VideoRendererSoft *renderer) : |
|
436 QBaseFilter(CLSID_NULL), m_inputPin(new VideoRendererSoftPin(this)), |
|
437 m_renderer(renderer), m_start(0) |
|
438 #ifndef QT_NO_OPENGL |
|
439 , m_checkedPrograms(false), m_usingOpenGL(false), m_textureUploaded(false) |
|
440 #endif |
|
441 { |
|
442 m_renderEvent = ::CreateEvent(0, 0, 0, 0); |
|
443 m_receiveCanWait = ::CreateEvent(0, 0, 0, 0); |
|
444 //simply initialize the array with default values |
|
445 applyMixerSettings(0., 0., 0., 0.); |
|
446 #ifndef QT_NO_OPENGL |
|
447 #endif |
|
448 } |
|
449 |
|
450 VideoRendererSoftFilter::~VideoRendererSoftFilter() |
|
451 { |
|
452 ::CloseHandle(m_renderEvent); |
|
453 ::CloseHandle(m_receiveCanWait); |
|
454 //this frees up resources |
|
455 freeResources(); |
|
456 } |
|
457 |
|
458 QSize VideoRendererSoftFilter::videoSize() const |
|
459 { |
|
460 QSize ret; |
|
461 const AM_MEDIA_TYPE &mt = m_inputPin->connectedType(); |
|
462 if (mt.pbFormat && mt.pbFormat) { |
|
463 if (mt.formattype == FORMAT_VideoInfo) { |
|
464 const VIDEOINFOHEADER *header = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat); |
|
465 const int h = qAbs(header->bmiHeader.biHeight), |
|
466 w = qAbs(header->bmiHeader.biWidth); |
|
467 ret = QSize(w, h); |
|
468 } else if (mt.formattype == FORMAT_VideoInfo2) { |
|
469 const VIDEOINFOHEADER2 *header = reinterpret_cast<VIDEOINFOHEADER2*>(mt.pbFormat); |
|
470 const int h = qAbs(header->bmiHeader.biHeight), |
|
471 w = qAbs(header->bmiHeader.biWidth); |
|
472 ret = QSize(w, h); |
|
473 } |
|
474 } |
|
475 return ret; |
|
476 } |
|
477 |
|
478 |
|
479 HRESULT VideoRendererSoftFilter::processSample(IMediaSample *sample) |
|
480 { |
|
481 #ifdef FPS_COUNTER |
|
482 if (fpsTime.elapsed() > 1000) { |
|
483 qDebug("FPS_COUNTER: processed=%d, displayed=%d (%d)", nbFramesProcessed, nbFramesDisplayed, fpsTime.elapsed()); |
|
484 nbFramesProcessed = 0; |
|
485 nbFramesDisplayed = 0; |
|
486 fpsTime.restart(); |
|
487 |
|
488 } |
|
489 #endif |
|
490 |
|
491 AM_MEDIA_TYPE *type = 0; |
|
492 if (sample->GetMediaType(&type) == S_OK) { |
|
493 //let's update the media type of the input pin |
|
494 m_inputPin->setConnectedType(*type); |
|
495 } |
|
496 |
|
497 |
|
498 const AM_MEDIA_TYPE &mt = m_inputPin->connectedType(); |
|
499 |
|
500 if (mt.pbFormat == 0 || mt.cbFormat == 0) { |
|
501 return VFW_E_INVALIDMEDIATYPE; |
|
502 } |
|
503 |
|
504 m_size = videoSize(); |
|
505 if (!m_size.isValid()) { |
|
506 return VFW_E_INVALIDMEDIATYPE; |
|
507 } |
|
508 |
|
509 #ifdef FPS_COUNTER |
|
510 nbFramesProcessed++; |
|
511 #endif |
|
512 |
|
513 REFERENCE_TIME start = 0, stop = 0; |
|
514 HRESULT hr = sample->GetTime(&start, &stop); |
|
515 |
|
516 ComPointer<IReferenceClock> clock; |
|
517 GetSyncSource(clock.pparam()); |
|
518 |
|
519 const bool playing = SUCCEEDED(hr) && state() == State_Running && clock; |
|
520 |
|
521 if (playing) { |
|
522 REFERENCE_TIME current; |
|
523 clock->GetTime(¤t); |
|
524 |
|
525 DWORD_PTR advise; |
|
526 |
|
527 //let's synchronize here |
|
528 clock->AdviseTime(m_start, start, |
|
529 reinterpret_cast<HEVENT>(m_renderEvent), &advise); |
|
530 |
|
531 HANDLE handles[] = {m_receiveCanWait, m_renderEvent}; |
|
532 if (::WaitForMultipleObjects(2, handles, false, INFINITE) == WAIT_OBJECT_0) { |
|
533 if (state() != State_Stopped && !m_inputPin->isFlushing()) { |
|
534 ::ResetEvent(m_receiveCanWait); |
|
535 } |
|
536 } |
|
537 } |
|
538 |
|
539 |
|
540 //the let's lock the sample to be used in the GUI thread |
|
541 { |
|
542 QMutexLocker locker(&m_mutex); |
|
543 sample->AddRef(); |
|
544 m_sampleBuffer = ComPointer<IMediaSample>(sample); |
|
545 } |
|
546 |
|
547 //image is updated: we should update the widget |
|
548 //we should never call directly members of target due to thread-safety |
|
549 QApplication::postEvent(m_renderer, new QEvent(QEvent::UpdateRequest)); |
|
550 |
|
551 if (!playing) { |
|
552 //useless to test the return value of WaitForSingleObject: timeout can't happen |
|
553 ::WaitForSingleObject(m_receiveCanWait, INFINITE); |
|
554 if (state() != State_Stopped && !m_inputPin->isFlushing()) { |
|
555 ::ResetEvent(m_receiveCanWait); |
|
556 } |
|
557 } |
|
558 |
|
559 //everything should be ok |
|
560 return S_OK; |
|
561 } |
|
562 |
|
563 #ifndef QT_NO_OPENGL |
|
564 bool VideoRendererSoftFilter::checkGLPrograms() |
|
565 { |
|
566 if (!m_checkedPrograms) { |
|
567 m_checkedPrograms = true; |
|
568 |
|
569 glProgramStringARB = (_glProgramStringARB) wglGetProcAddress("glProgramStringARB"); |
|
570 glBindProgramARB = (_glBindProgramARB) wglGetProcAddress("glBindProgramARB"); |
|
571 glDeleteProgramsARB = (_glDeleteProgramsARB) wglGetProcAddress("glDeleteProgramsARB"); |
|
572 glGenProgramsARB = (_glGenProgramsARB) wglGetProcAddress("glGenProgramsARB"); |
|
573 glProgramLocalParameter4fARB = (_glProgramLocalParameter4fARB) wglGetProcAddress("glProgramLocalParameter4fARB"); |
|
574 glActiveTexture = (_glActiveTexture) wglGetProcAddress("glActiveTexture"); |
|
575 |
|
576 //we check only once if the widget is drawn using opengl |
|
577 if (glProgramStringARB && glBindProgramARB && glDeleteProgramsARB && |
|
578 glGenProgramsARB && glActiveTexture && glProgramLocalParameter4fARB) { |
|
579 glGenProgramsARB(2, m_program); |
|
580 |
|
581 const char *code[] = {yv12ToRgb, yuy2ToRgb}; |
|
582 |
|
583 bool error = false; |
|
584 for(int i = 0; i < ProgramCount && !error; ++i) { |
|
585 |
|
586 glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program[i]); |
|
587 |
|
588 const GLbyte *gl_src = reinterpret_cast<const GLbyte *>(code[i]); |
|
589 glProgramStringARB(GL_FRAGMENT_PROGRAM_ARB, GL_PROGRAM_FORMAT_ASCII_ARB, |
|
590 strlen(code[i]), gl_src); |
|
591 |
|
592 if (glGetError() != GL_NO_ERROR) { |
|
593 error = true; |
|
594 } |
|
595 } |
|
596 |
|
597 if (error) { |
|
598 glDeleteProgramsARB(2, m_program); |
|
599 } else { |
|
600 //everything went fine we store the context here (we support YV12 and YUY2) |
|
601 m_usingOpenGL = m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12 |
|
602 || m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2; |
|
603 //those "textures" will be used as byte streams |
|
604 //to pass Y, U and V data to the graphics card |
|
605 glGenTextures(3, m_texture); |
|
606 } |
|
607 } |
|
608 } |
|
609 return m_usingOpenGL; |
|
610 } |
|
611 |
|
612 void VideoRendererSoftFilter::updateTexture() |
|
613 { |
|
614 if (!m_sampleBuffer) { |
|
615 return; //the texture is already up2date or their is no data yet |
|
616 } |
|
617 |
|
618 uchar *data = 0; |
|
619 m_sampleBuffer->GetPointer(&data); |
|
620 |
|
621 if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) { |
|
622 int w[3] = { m_size.width(), m_size.width()/2, m_size.width()/2 }; |
|
623 int h[3] = { m_size.height(), m_size.height()/2, m_size.height()/2 }; |
|
624 int offs[3] = { 0, m_size.width()*m_size.height(), m_size.width()*m_size.height()*5/4 }; |
|
625 |
|
626 for (int i = 0; i < 3; ++i) { |
|
627 glBindTexture(GL_TEXTURE_2D, m_texture[i]); |
|
628 glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, w[i], h[i], 0, |
|
629 GL_LUMINANCE, GL_UNSIGNED_BYTE, data + offs[i]); |
|
630 |
|
631 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
|
632 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
|
633 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); |
|
634 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); |
|
635 } |
|
636 } else { //m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2 |
|
637 //we upload 1 texture |
|
638 glBindTexture(GL_TEXTURE_2D, m_texture[0]); |
|
639 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_size.width() / 2, m_size.height(), 0, |
|
640 GL_RGBA, GL_UNSIGNED_BYTE, data); |
|
641 |
|
642 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
|
643 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
|
644 |
|
645 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); |
|
646 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); |
|
647 |
|
648 } |
|
649 m_sampleBuffer = ComPointer<IMediaSample>(); |
|
650 m_textureUploaded = true; |
|
651 } |
|
652 #endif |
|
653 |
|
654 void VideoRendererSoftFilter::repaintCurrentFrame(QPainter &painter, const QRect &r) |
|
655 { |
|
656 QMutexLocker locker(&m_mutex); |
|
657 |
|
658 #ifdef FPS_COUNTER |
|
659 nbFramesDisplayed++; |
|
660 #endif |
|
661 |
|
662 |
|
663 #ifndef QT_NO_OPENGL |
|
664 if (painter.paintEngine() && painter.paintEngine()->type() == QPaintEngine::OpenGL && checkGLPrograms()) { |
|
665 //for now we only support YUV (both YV12 and YUY2) |
|
666 updateTexture(); |
|
667 |
|
668 if (!m_textureUploaded) { |
|
669 //we simply fill the whole video with content |
|
670 //the callee has already set the brush |
|
671 painter.drawRect(r); |
|
672 return; |
|
673 } |
|
674 |
|
675 //let's draw the texture |
|
676 |
|
677 //Let's pass the other arguments |
|
678 const Program prog = (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) ? YV12toRGB : YUY2toRGB; |
|
679 glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program[prog]); |
|
680 //loading the parameters |
|
681 glProgramLocalParameter4fARB(GL_FRAGMENT_PROGRAM_ARB, 0, m_brightness / 256., m_contrast, qCos(m_hue), qSin(m_hue)); |
|
682 glProgramLocalParameter4fARB(GL_FRAGMENT_PROGRAM_ARB, 1, m_saturation, painter.opacity() /*alpha */, 0. /*dummy*/, 0. /*dummy*/); |
|
683 |
|
684 glEnable(GL_FRAGMENT_PROGRAM_ARB); |
|
685 |
|
686 const float v_array[] = { r.left(), r.top(), r.right()+1, r.top(), r.right()+1, r.bottom()+1, r.left(), r.bottom()+1 }; |
|
687 |
|
688 float tx_array[12] = {0., 0., 0., 1., |
|
689 0., 0., 1., 1., |
|
690 0., 0., 1., 0.}; |
|
691 |
|
692 if (prog == YUY2toRGB) { |
|
693 const float w = m_size.width() / 2, |
|
694 iw = 1. / w; |
|
695 |
|
696 tx_array[3] = w; |
|
697 tx_array[6] = w; |
|
698 |
|
699 for (int i = 0; i < 4; ++i) { |
|
700 tx_array[3*i + 2] = iw; |
|
701 } |
|
702 } |
|
703 |
|
704 glActiveTexture(GL_TEXTURE0); |
|
705 glBindTexture(GL_TEXTURE_2D, m_texture[0]); |
|
706 |
|
707 if (prog == YV12toRGB) { |
|
708 glActiveTexture(GL_TEXTURE1); |
|
709 glBindTexture(GL_TEXTURE_2D, m_texture[2]); |
|
710 glActiveTexture(GL_TEXTURE2); |
|
711 glBindTexture(GL_TEXTURE_2D, m_texture[1]); |
|
712 glActiveTexture(GL_TEXTURE0); |
|
713 } |
|
714 |
|
715 |
|
716 glVertexPointer(2, GL_FLOAT, 0, v_array); |
|
717 glTexCoordPointer(3, GL_FLOAT, 0, tx_array); |
|
718 glEnableClientState(GL_VERTEX_ARRAY); |
|
719 glEnableClientState(GL_TEXTURE_COORD_ARRAY); |
|
720 glDrawArrays(GL_QUADS, 0, 4); |
|
721 glDisableClientState(GL_TEXTURE_COORD_ARRAY); |
|
722 glDisableClientState(GL_VERTEX_ARRAY); |
|
723 |
|
724 glDisable(GL_FRAGMENT_PROGRAM_ARB); |
|
725 return; |
|
726 } else |
|
727 #endif |
|
728 if (m_sampleBuffer) { |
|
729 //we need to get the sample data |
|
730 uchar *data = 0; |
|
731 m_sampleBuffer->GetPointer(&data); |
|
732 |
|
733 |
|
734 //let's update the current image |
|
735 if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YV12) { |
|
736 convertYV12toRGB(data, m_size, m_currentImage, |
|
737 m_brightness, m_contrast, m_hue, m_saturation); |
|
738 } else if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_YUY2) { |
|
739 convertYUY2toRGB(data, m_size, m_currentImage, |
|
740 m_brightness, m_contrast, m_hue, m_saturation); |
|
741 } else if (m_inputPin->connectedType().subtype == MEDIASUBTYPE_RGB32) { |
|
742 normalizeRGB(data, m_size, m_currentImage); |
|
743 } |
|
744 m_sampleBuffer = ComPointer<IMediaSample>(); |
|
745 } |
|
746 |
|
747 if (m_currentImage.isNull()) { |
|
748 //we simply fill the whole video with content |
|
749 //the callee has alrtead set the brush |
|
750 painter.drawRect(r); |
|
751 } else { |
|
752 painter.drawImage(0, 0, m_currentImage); |
|
753 } |
|
754 } |
|
755 |
|
756 |
|
757 void VideoRendererSoftFilter::normalizeRGB(const uchar *data, const QSize &s, QImage &destImage) |
|
758 { |
|
759 const int w = s.width(), |
|
760 h = s.height(); |
|
761 if (destImage.size() != s) { |
|
762 destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied); |
|
763 } |
|
764 if (destImage.isNull()) { |
|
765 return; //the system can't allocate the memory for the image drawing |
|
766 } |
|
767 |
|
768 const QRgb *rgb = reinterpret_cast<const QRgb*>(data); |
|
769 |
|
770 //this sets the alpha channel to 0xff and flip the image vertically |
|
771 for (int y = h - 1; y >= 0; --y) { |
|
772 QRgb *dest = reinterpret_cast<QRgb*>(destImage.scanLine(y)); |
|
773 for(int i = w; i > 0; --i, ++rgb, ++dest) { |
|
774 *dest = *rgb | (0xff << 24); //we force the alpha channel to 0xff |
|
775 } |
|
776 } |
|
777 } |
|
778 |
|
779 |
|
780 //we render data interpreted as YV12 into m_renderbuffer |
|
781 void VideoRendererSoftFilter::convertYV12toRGB(const uchar *data, const QSize &s, QImage &destImage, |
|
782 qreal brightness, qreal contrast, qreal hue, qreal saturation) |
|
783 { |
|
784 const int w = s.width(), |
|
785 h = s.height(); |
|
786 |
|
787 //let's cache some computation |
|
788 const int cosHx256 = qRound(qCos(hue) * contrast * saturation * 256), |
|
789 sinHx256 = qRound(qSin(hue) * contrast * saturation * 256); |
|
790 |
|
791 int Yvalue[256]; |
|
792 for(int i = 0;i<256;++i) { |
|
793 Yvalue[i] = qRound(((i - 16) * contrast + brightness) * 298 + 128); |
|
794 } |
|
795 |
|
796 |
|
797 if (destImage.size() != s) { |
|
798 destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied); |
|
799 } |
|
800 |
|
801 if (destImage.isNull()) { |
|
802 return; //the system can't allocate the memory for the image drawing |
|
803 } |
|
804 |
|
805 QRgb *dest = reinterpret_cast<QRgb*>(destImage.bits()); |
|
806 const uchar *dataY = data, |
|
807 *dataV = data + (w*h), |
|
808 *dataU = dataV + (w*h)/4; |
|
809 |
|
810 uint *line1 = dest, |
|
811 *line2 = dest + w; |
|
812 |
|
813 for(int l = (h >> 1); l > 0; --l) { |
|
814 //we treat 2 lines by 2 lines |
|
815 |
|
816 for(int x = (w >> 1); x > 0; --x) { |
|
817 |
|
818 const int u = *dataU++ - 128, |
|
819 v = *dataV++ - 128; |
|
820 const int d = (u * cosHx256 + v * sinHx256) >> 8, |
|
821 e = (v * cosHx256 + u * sinHx256) >> 8; |
|
822 |
|
823 const int compRed = 409 * e, |
|
824 compGreen = -100 * d - 208 * e, |
|
825 compBlue = 516 * d; |
|
826 |
|
827 const int y21 = Yvalue[ dataY[w] ], |
|
828 y11 = Yvalue[ *dataY++ ], |
|
829 y22 = Yvalue[ dataY[w] ], |
|
830 y12 = Yvalue[ *dataY++ ]; |
|
831 |
|
832 //1st line 1st pixel |
|
833 *line1++ = CLIPPED_PIXEL(y11, compRed, compGreen, compBlue); |
|
834 |
|
835 //1st line, 2nd pixel |
|
836 *line1++ = CLIPPED_PIXEL(y12, compRed, compGreen, compBlue); |
|
837 |
|
838 //2nd line 1st pixel |
|
839 *line2++ = CLIPPED_PIXEL(y21, compRed, compGreen, compBlue); |
|
840 |
|
841 //2nd line 2nd pixel |
|
842 *line2++ = CLIPPED_PIXEL(y22, compRed, compGreen, compBlue); |
|
843 |
|
844 } //for |
|
845 |
|
846 //end of the line |
|
847 dataY += w; |
|
848 line1 = line2; |
|
849 line2 += w; |
|
850 |
|
851 } //for |
|
852 |
|
853 } |
|
854 |
|
855 //we render data interpreted as YUY2 into m_renderbuffer |
|
856 void VideoRendererSoftFilter::convertYUY2toRGB(const uchar *data, const QSize &s, QImage &destImage, |
|
857 qreal brightness, qreal contrast, qreal hue, qreal saturation) |
|
858 { |
|
859 const int w = s.width(), |
|
860 h = s.height(); |
|
861 |
|
862 //let's cache some computation |
|
863 int Yvalue[256]; |
|
864 for(int i = 0;i<256;++i) { |
|
865 Yvalue[i] = qRound(((i - 16) * contrast + brightness) * 298 + 128); |
|
866 } |
|
867 |
|
868 const int cosHx256 = qRound(qCos(hue) * contrast * saturation * 256), |
|
869 sinHx256 = qRound(qSin(hue) * contrast * saturation * 256); |
|
870 |
|
871 if (destImage.size() != s) { |
|
872 //this will only allocate memory when needed |
|
873 destImage = QImage(w, h, QImage::Format_ARGB32_Premultiplied); |
|
874 } |
|
875 if (destImage.isNull()) { |
|
876 return; //the system can't allocate the memory for the image drawing |
|
877 } |
|
878 |
|
879 QRgb *dest = reinterpret_cast<QRgb*>(destImage.bits()); |
|
880 |
|
881 //the number of iterations is width * height / 2 because we treat 2 pixels at each iterations |
|
882 for (int c = w * h / 2; c > 0 ; --c) { |
|
883 |
|
884 //the idea of that algorithm comes from |
|
885 //http://msdn2.microsoft.com/en-us/library/ms867704.aspx#yuvformats_identifying_yuv_formats_in_directshow |
|
886 |
|
887 //we treat 2 pixels by 2 pixels (we start reading 2 pixels info ie. "YUYV" |
|
888 const int y1 = Yvalue[*data++], |
|
889 u = *data++ - 128, |
|
890 y2 = Yvalue[*data++], |
|
891 v = *data++ - 128; |
|
892 |
|
893 const int d = (u * cosHx256 + v * sinHx256) >> 8, |
|
894 e = (v * cosHx256 + u * sinHx256) >> 8; |
|
895 |
|
896 const int compRed = 409 * e, |
|
897 compGreen = -100 * d - 208 * e, |
|
898 compBlue = 516 * d; |
|
899 |
|
900 //first pixel |
|
901 *dest++ = CLIPPED_PIXEL(y1, compRed, compGreen, compBlue); |
|
902 |
|
903 //second pixel |
|
904 *dest++ = CLIPPED_PIXEL(y2, compRed, compGreen, compBlue); |
|
905 } |
|
906 } |
|
907 |
|
908 |
|
909 VideoRendererSoft::VideoRendererSoft(QWidget *target) : |
|
910 m_renderer(new VideoRendererSoftFilter(this)), m_target(target) |
|
911 { |
|
912 m_filter = Filter(m_renderer); |
|
913 } |
|
914 |
|
915 VideoRendererSoft::~VideoRendererSoft() |
|
916 { |
|
917 } |
|
918 |
|
919 |
|
920 bool VideoRendererSoft::isNative() const |
|
921 { |
|
922 return false; |
|
923 } |
|
924 |
|
925 |
|
926 void VideoRendererSoft::repaintCurrentFrame(QWidget *target, const QRect &rect) |
|
927 { |
|
928 QPainter painter(target); |
|
929 |
|
930 QColor backColor = target->palette().color(target->backgroundRole()); |
|
931 painter.setBrush(backColor); |
|
932 painter.setPen(Qt::NoPen); |
|
933 if (!m_videoRect.contains(rect)) { |
|
934 //we repaint the borders only when needed |
|
935 const QVector<QRect> reg = (QRegion(rect) - m_videoRect).rects(); |
|
936 for (int i = 0; i < reg.count(); ++i) { |
|
937 painter.drawRect(reg.at(i)); |
|
938 } |
|
939 } |
|
940 |
|
941 painter.setRenderHint(QPainter::SmoothPixmapTransform); |
|
942 painter.setTransform(m_transform, true); |
|
943 QSize vsize = videoSize(); |
|
944 m_renderer->repaintCurrentFrame(painter, QRect(0,0, vsize.width(), vsize.height())); |
|
945 } |
|
946 |
|
947 void VideoRendererSoft::notifyResize(const QSize &size, |
|
948 Phonon::VideoWidget::AspectRatio aspectRatio, Phonon::VideoWidget::ScaleMode scaleMode) |
|
949 { |
|
950 const QSize vsize = videoSize(); |
|
951 internalNotifyResize(size, vsize, aspectRatio, scaleMode); |
|
952 |
|
953 m_transform.reset(); |
|
954 |
|
955 if (vsize.isValid() && size.isValid()) { |
|
956 m_transform.translate(m_dstX, m_dstY); |
|
957 const qreal sx = qreal(m_dstWidth) / qreal(vsize.width()), |
|
958 sy = qreal(m_dstHeight) / qreal(vsize.height()); |
|
959 m_transform.scale(sx, sy); |
|
960 m_videoRect = m_transform.mapRect( QRect(0,0, vsize.width(), vsize.height())); |
|
961 } |
|
962 } |
|
963 |
|
964 QSize VideoRendererSoft::videoSize() const |
|
965 { |
|
966 if (m_renderer->pins().first()->connected()) { |
|
967 return m_renderer->videoSize(); |
|
968 } else { |
|
969 return m_renderer->currentImage().size(); |
|
970 } |
|
971 } |
|
972 |
|
973 void VideoRendererSoft::applyMixerSettings(qreal brightness, qreal contrast, qreal hue, qreal saturation) |
|
974 { |
|
975 m_renderer->applyMixerSettings(brightness, contrast, hue, saturation); |
|
976 } |
|
977 |
|
978 QImage VideoRendererSoft::snapshot() const |
|
979 { |
|
980 return m_renderer->currentImage(); //not accurate (especially when using opengl...) |
|
981 } |
|
982 |
|
983 void VideoRendererSoft::setSnapshot(const QImage &image) |
|
984 { |
|
985 m_renderer->setCurrentImage(image); |
|
986 } |
|
987 |
|
988 bool VideoRendererSoft::event(QEvent *e) |
|
989 { |
|
990 if (e->type() == QEvent::UpdateRequest) { |
|
991 m_target->update(m_videoRect); |
|
992 return true; |
|
993 } |
|
994 return QObject::event(e); |
|
995 } |
|
996 |
|
997 |
|
998 } |
|
999 } |
|
1000 |
|
1001 QT_END_NAMESPACE |
|
1002 |
|
1003 #endif //QT_NO_PHONON_VIDEO |