|
1 /* This file is part of the KDE project. |
|
2 |
|
3 Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). |
|
4 |
|
5 This library is free software: you can redistribute it and/or modify |
|
6 it under the terms of the GNU Lesser General Public License as published by |
|
7 the Free Software Foundation, either version 2.1 or 3 of the License. |
|
8 |
|
9 This library is distributed in the hope that it will be useful, |
|
10 but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 GNU Lesser General Public License for more details. |
|
13 |
|
14 You should have received a copy of the GNU Lesser General Public License |
|
15 along with this library. If not, see <http://www.gnu.org/licenses/>. |
|
16 */ |
|
17 |
|
18 #include <QtGui/QPainter> |
|
19 #include <QtGui/QResizeEvent> |
|
20 |
|
21 #ifndef QT_NO_OPENGL |
|
22 |
|
23 #include "common.h" |
|
24 #include "message.h" |
|
25 #include "mediaobject.h" |
|
26 #include "qwidgetvideosink.h" |
|
27 #include "glrenderer.h" |
|
28 #include "qrgb.h" |
|
29 |
|
30 #if !defined(QT_OPENGL_ES) |
|
31 |
|
32 #include <gst/gst.h> |
|
33 |
|
34 // support old OpenGL installations (1.2) |
|
35 // assume that if TEXTURE0 isn't defined, none are |
|
36 #ifndef GL_TEXTURE0 |
|
37 # define GL_TEXTURE0 0x84C0 |
|
38 # define GL_TEXTURE1 0x84C1 |
|
39 # define GL_TEXTURE2 0x84C2 |
|
40 #endif |
|
41 |
|
42 QT_BEGIN_NAMESPACE |
|
43 |
|
44 static void frameRendered() |
|
45 { |
|
46 static QString displayFps = qgetenv("PHONON_GST_FPS"); |
|
47 if (displayFps.isEmpty()) |
|
48 return; |
|
49 |
|
50 static int frames = 0; |
|
51 static QTime lastTime = QTime::currentTime(); |
|
52 QTime time = QTime::currentTime(); |
|
53 |
|
54 int delta = lastTime.msecsTo(time); |
|
55 if (delta > 2000) { |
|
56 printf("FPS: %f\n", 1000.0 * frames / qreal(delta)); |
|
57 lastTime = time; |
|
58 frames = 0; |
|
59 } |
|
60 |
|
61 ++frames; |
|
62 } |
|
63 |
|
64 namespace Phonon |
|
65 { |
|
66 namespace Gstreamer |
|
67 { |
|
68 |
|
69 GLRenderer::GLRenderer(VideoWidget* videoWidget) : |
|
70 AbstractRenderer(videoWidget) |
|
71 , m_glWindow(0) |
|
72 { |
|
73 videoWidget->backend()->logMessage("Creating OpenGL renderer"); |
|
74 QGLFormat format = QGLFormat::defaultFormat(); |
|
75 format.setSwapInterval(1); // Enable vertical sync on draw to avoid tearing |
|
76 m_glWindow = new GLRenderWidgetImplementation(videoWidget, format); |
|
77 |
|
78 if ((m_videoSink = m_glWindow->createVideoSink())) { //if ((m_videoSink = m_glWindow->createVideoSink())) { |
|
79 gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership |
|
80 gst_object_sink (GST_OBJECT (m_videoSink)); |
|
81 |
|
82 QWidgetVideoSinkBase* sink = reinterpret_cast<QWidgetVideoSinkBase*>(m_videoSink); |
|
83 // Let the videosink know which widget to direct frame updates to |
|
84 sink->renderWidget = videoWidget; |
|
85 } |
|
86 } |
|
87 |
|
88 GLRenderer::~GLRenderer() |
|
89 { |
|
90 if (m_videoSink) { |
|
91 gst_object_unref (GST_OBJECT (m_videoSink)); |
|
92 m_videoSink = 0; |
|
93 } |
|
94 } |
|
95 |
|
96 |
|
97 bool GLRenderer::eventFilter(QEvent * event) |
|
98 { |
|
99 if (event->type() == QEvent::User) { |
|
100 NewFrameEvent *frameEvent= static_cast <NewFrameEvent *>(event); |
|
101 m_glWindow->setNextFrame(frameEvent->frame, frameEvent->width, frameEvent->height); |
|
102 return true; |
|
103 } |
|
104 else if (event->type() == QEvent::Resize) { |
|
105 m_glWindow->setGeometry(m_videoWidget->geometry()); |
|
106 return true; |
|
107 } |
|
108 return false; |
|
109 } |
|
110 |
|
111 void GLRenderer::handleMediaNodeEvent(const MediaNodeEvent *event) |
|
112 { |
|
113 switch (event->type()) { |
|
114 case MediaNodeEvent::SourceChanged: |
|
115 { |
|
116 Q_ASSERT(m_glWindow); |
|
117 m_glWindow->clearFrame(); |
|
118 break; |
|
119 } |
|
120 default: |
|
121 break; |
|
122 } |
|
123 } |
|
124 |
|
125 GstElement* GLRenderWidgetImplementation::createVideoSink() |
|
126 { |
|
127 if (hasYUVSupport()) |
|
128 return GST_ELEMENT(g_object_new(get_type_YUV(), NULL)); |
|
129 return 0; |
|
130 } |
|
131 |
|
132 void GLRenderWidgetImplementation::setNextFrame(const QByteArray &array, int w, int h) |
|
133 { |
|
134 if (m_videoWidget->root()->state() == Phonon::LoadingState) |
|
135 return; |
|
136 |
|
137 m_frame = QImage(); |
|
138 |
|
139 if (hasYUVSupport()) |
|
140 updateTexture(array, w, h); |
|
141 else |
|
142 m_frame = QImage((uchar *)array.constData(), w, h, QImage::Format_RGB32); |
|
143 |
|
144 m_array = array; |
|
145 m_width = w; |
|
146 m_height = h; |
|
147 |
|
148 update(); |
|
149 } |
|
150 |
|
151 void GLRenderWidgetImplementation::clearFrame() |
|
152 { |
|
153 m_frame = QImage(); |
|
154 m_array = QByteArray(); |
|
155 update(); |
|
156 } |
|
157 |
|
158 bool GLRenderWidgetImplementation::hasYUVSupport() const |
|
159 { |
|
160 return m_yuvSupport; |
|
161 } |
|
162 |
|
163 static QImage convertFromYUV(const QByteArray &array, int w, int h) |
|
164 { |
|
165 QImage result(w, h, QImage::Format_RGB32); |
|
166 |
|
167 // TODO: bilinearly interpolate the U and V channels for better result |
|
168 |
|
169 for (int y = 0; y < h; ++y) { |
|
170 uint *sp = (uint *)result.scanLine(y); |
|
171 |
|
172 const uchar *yp = (const uchar *)(array.constData() + y * w); |
|
173 const uchar *up = (const uchar *)(array.constData() + w * h + (y/2)*(w/2)); |
|
174 const uchar *vp = (const uchar *)(array.constData() + w * h * 5/4 + (y/2)*(w/2)); |
|
175 |
|
176 for (int x = 0; x < w; ++x) { |
|
177 const int sy = *yp; |
|
178 const int su = *up; |
|
179 const int sv = *vp; |
|
180 |
|
181 const int R = int(1.164 * (sy - 16) + 1.596 * (sv - 128)); |
|
182 const int G = int(1.164 * (sy - 16) - 0.813 * (sv - 128) - 0.391 * (su - 128)); |
|
183 const int B = int(1.164 * (sy - 16) + 2.018 * (su - 128)); |
|
184 |
|
185 *sp = qRgb(qBound(0, R, 255), |
|
186 qBound(0, G, 255), |
|
187 qBound(0, B, 255)); |
|
188 |
|
189 ++yp; |
|
190 ++sp; |
|
191 if (x & 1) { |
|
192 ++up; |
|
193 ++vp; |
|
194 } |
|
195 } |
|
196 } |
|
197 return result; |
|
198 } |
|
199 |
|
200 const QImage &GLRenderWidgetImplementation::currentFrame() const |
|
201 { |
|
202 if (m_frame.isNull() && !m_array.isNull()) |
|
203 m_frame = convertFromYUV(m_array, m_width, m_height); |
|
204 |
|
205 return m_frame; |
|
206 } |
|
207 |
|
208 #ifndef GL_FRAGMENT_PROGRAM_ARB |
|
209 #define GL_FRAGMENT_PROGRAM_ARB 0x8804 |
|
210 #define GL_PROGRAM_FORMAT_ASCII_ARB 0x8875 |
|
211 #endif |
|
212 |
|
213 // arbfp1 fragment program for converting yuv to rgb |
|
214 const char *const yuvToRgb = |
|
215 "!!ARBfp1.0" |
|
216 "PARAM c[3] = { { 0.5, 0.0625 }," |
|
217 "{ 1.164, 0, 1.596, 2.0179999 }," |
|
218 "{ 1.164, -0.391, -0.81300002 } };" |
|
219 "TEMP R0;" |
|
220 "TEMP R1;" |
|
221 "TEX R0.x, fragment.texcoord[0], texture[2], 2D;" |
|
222 "ADD R1.z, R0.x, -c[0].x;" |
|
223 "TEX R1.x, fragment.texcoord[0], texture[0], 2D;" |
|
224 "TEX R0.x, fragment.texcoord[0], texture[1], 2D;" |
|
225 "ADD R1.x, R1, -c[0].y;" |
|
226 "ADD R1.y, R0.x, -c[0].x;" |
|
227 "DP3 result.color.x, R1, c[1];" |
|
228 "DP3 result.color.y, R1, c[2];" |
|
229 "DP3 result.color.z, R1, c[1].xwyw;" |
|
230 "END"; |
|
231 |
|
232 GLRenderWidgetImplementation::GLRenderWidgetImplementation(VideoWidget*videoWidget, const QGLFormat &format) : |
|
233 QGLWidget(format, videoWidget) |
|
234 , m_program(0) |
|
235 , m_yuvSupport(false) |
|
236 , m_videoWidget(videoWidget) |
|
237 { |
|
238 makeCurrent(); |
|
239 glGenTextures(3, m_texture); |
|
240 |
|
241 glProgramStringARB = (_glProgramStringARB) context()->getProcAddress(QLatin1String("glProgramStringARB")); |
|
242 glBindProgramARB = (_glBindProgramARB) context()->getProcAddress(QLatin1String("glBindProgramARB")); |
|
243 glDeleteProgramsARB = (_glDeleteProgramsARB) context()->getProcAddress(QLatin1String("glDeleteProgramsARB")); |
|
244 glGenProgramsARB = (_glGenProgramsARB) context()->getProcAddress(QLatin1String("glGenProgramsARB")); |
|
245 glActiveTexture = (_glActiveTexture) context()->getProcAddress(QLatin1String("glActiveTexture")); |
|
246 |
|
247 m_hasPrograms = glProgramStringARB && glBindProgramARB && glDeleteProgramsARB && glGenProgramsARB && glActiveTexture; |
|
248 |
|
249 if (m_hasPrograms) { |
|
250 glGenProgramsARB(1, &m_program); |
|
251 glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program); |
|
252 |
|
253 const GLbyte *gl_src = reinterpret_cast<const GLbyte *>(yuvToRgb); |
|
254 glProgramStringARB(GL_FRAGMENT_PROGRAM_ARB, GL_PROGRAM_FORMAT_ASCII_ARB, |
|
255 int(strlen(yuvToRgb)), gl_src); |
|
256 |
|
257 if (glGetError() != GL_NO_ERROR) { |
|
258 glDeleteProgramsARB(1, &m_program); |
|
259 m_hasPrograms = false; |
|
260 } else { |
|
261 m_yuvSupport = true; |
|
262 } |
|
263 } |
|
264 |
|
265 QPalette palette; |
|
266 palette.setColor(QPalette::Background, Qt::black); |
|
267 setPalette(palette); |
|
268 setAutoFillBackground(true); |
|
269 // Videowidget allways have this property to allow hiding the mouse cursor |
|
270 setMouseTracking(true); |
|
271 } |
|
272 |
|
273 void GLRenderWidgetImplementation::updateTexture(const QByteArray &array, int width, int height) |
|
274 { |
|
275 m_width = width; |
|
276 m_height = height; |
|
277 |
|
278 makeCurrent(); |
|
279 |
|
280 int w[3] = { width, width/2, width/2 }; |
|
281 int h[3] = { height, height/2, height/2 }; |
|
282 int offs[3] = { 0, width*height, width*height*5/4 }; |
|
283 |
|
284 for (int i = 0; i < 3; ++i) { |
|
285 glBindTexture(GL_TEXTURE_2D, m_texture[i]); |
|
286 glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, w[i], h[i], 0, |
|
287 GL_LUMINANCE, GL_UNSIGNED_BYTE, array.data() + offs[i]); |
|
288 |
|
289 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
|
290 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
|
291 |
|
292 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP); |
|
293 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP); |
|
294 } |
|
295 } |
|
296 |
|
297 void GLRenderWidgetImplementation::paintEvent(QPaintEvent *) |
|
298 { |
|
299 QPainter painter(this); |
|
300 m_drawFrameRect = m_videoWidget->calculateDrawFrameRect(); |
|
301 if (m_yuvSupport && frameIsSet()) { |
|
302 glEnable(GL_FRAGMENT_PROGRAM_ARB); |
|
303 glBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, m_program); |
|
304 const float tx_array[] = { 0, 0, 1, 0, 1, 1, 0, 1}; |
|
305 const QRectF r = drawFrameRect(); |
|
306 |
|
307 const float v_array[] = { r.left(), r.top(), r.right(), r.top(), r.right(), r.bottom(), r.left(), r.bottom() }; |
|
308 |
|
309 glActiveTexture(GL_TEXTURE0); |
|
310 glBindTexture(GL_TEXTURE_2D, m_texture[0]); |
|
311 glActiveTexture(GL_TEXTURE1); |
|
312 glBindTexture(GL_TEXTURE_2D, m_texture[1]); |
|
313 glActiveTexture(GL_TEXTURE2); |
|
314 glBindTexture(GL_TEXTURE_2D, m_texture[2]); |
|
315 glActiveTexture(GL_TEXTURE0); |
|
316 |
|
317 glVertexPointer(2, GL_FLOAT, 0, v_array); |
|
318 glTexCoordPointer(2, GL_FLOAT, 0, tx_array); |
|
319 glEnableClientState(GL_VERTEX_ARRAY); |
|
320 glEnableClientState(GL_TEXTURE_COORD_ARRAY); |
|
321 glDrawArrays(GL_QUADS, 0, 4); |
|
322 glDisableClientState(GL_TEXTURE_COORD_ARRAY); |
|
323 glDisableClientState(GL_VERTEX_ARRAY); |
|
324 |
|
325 glDisable(GL_FRAGMENT_PROGRAM_ARB); |
|
326 } else { |
|
327 painter.setRenderHint(QPainter::SmoothPixmapTransform); |
|
328 painter.drawImage(drawFrameRect(), currentFrame()); |
|
329 } |
|
330 |
|
331 frameRendered(); |
|
332 } |
|
333 } |
|
334 } //namespace Phonon::Gstreamer |
|
335 |
|
336 QT_END_NAMESPACE |
|
337 |
|
338 #endif // QT_OPENGL_ES |
|
339 #endif // QT_NO_OPENGL |