|
1 /**************************************************************************** |
|
2 ** |
|
3 ** Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). |
|
4 ** All rights reserved. |
|
5 ** Contact: Nokia Corporation (qt-info@nokia.com) |
|
6 ** |
|
7 ** This file is part of the Qt Mobility Components. |
|
8 ** |
|
9 ** $QT_BEGIN_LICENSE:LGPL$ |
|
10 ** No Commercial Usage |
|
11 ** This file contains pre-release code and may not be distributed. |
|
12 ** You may use this file in accordance with the terms and conditions |
|
13 ** contained in the Technology Preview License Agreement accompanying |
|
14 ** this package. |
|
15 ** |
|
16 ** GNU Lesser General Public License Usage |
|
17 ** Alternatively, this file may be used under the terms of the GNU Lesser |
|
18 ** General Public License version 2.1 as published by the Free Software |
|
19 ** Foundation and appearing in the file LICENSE.LGPL included in the |
|
20 ** packaging of this file. Please review the following information to |
|
21 ** ensure the GNU Lesser General Public License version 2.1 requirements |
|
22 ** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. |
|
23 ** |
|
24 ** In addition, as a special exception, Nokia gives you certain additional |
|
25 ** rights. These rights are described in the Nokia Qt LGPL Exception |
|
26 ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. |
|
27 ** |
|
28 ** If you have questions regarding the use of this file, please contact |
|
29 ** Nokia at qt-info@nokia.com. |
|
30 ** |
|
31 ** |
|
32 ** |
|
33 ** |
|
34 ** |
|
35 ** |
|
36 ** |
|
37 ** |
|
38 ** $QT_END_LICENSE$ |
|
39 ** |
|
40 ****************************************************************************/ |
|
41 |
|
42 #include <qabstractvideosurface.h> |
|
43 #include <qvideoframe.h> |
|
44 #include <QDebug> |
|
45 #include <QMap> |
|
46 #include <QDebug> |
|
47 #include <QThread> |
|
48 |
|
49 #include "qgstvideobuffer.h" |
|
50 |
|
51 #if defined(Q_WS_X11) && !defined(QT_NO_XVIDEO) |
|
52 #include <QtGui/qx11info_x11.h> |
|
53 #include "qgstxvimagebuffer.h" |
|
54 #endif |
|
55 |
|
56 #include "qvideosurfacegstsink.h" |
|
57 |
|
58 |
|
59 |
|
60 Q_DECLARE_METATYPE(QVideoSurfaceFormat) |
|
61 |
|
62 QVideoSurfaceGstDelegate::QVideoSurfaceGstDelegate(QAbstractVideoSurface *surface) |
|
63 : m_surface(surface) |
|
64 , m_renderReturn(GST_FLOW_ERROR) |
|
65 , m_bytesPerLine(0) |
|
66 { |
|
67 if (m_surface) { |
|
68 m_supportedPixelFormats = m_surface->supportedPixelFormats(); |
|
69 m_supportedXVideoPixelFormats = m_surface->supportedPixelFormats(QAbstractVideoBuffer::XvShmImageHandle); |
|
70 connect(m_surface, SIGNAL(supportedFormatsChanged()), this, SLOT(supportedFormatsChanged())); |
|
71 } |
|
72 } |
|
73 |
|
74 QList<QVideoFrame::PixelFormat> QVideoSurfaceGstDelegate::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const |
|
75 { |
|
76 QMutexLocker locker(const_cast<QMutex *>(&m_mutex)); |
|
77 |
|
78 if (handleType == QAbstractVideoBuffer::NoHandle || !m_surface) |
|
79 return m_supportedPixelFormats; |
|
80 else if (handleType == QAbstractVideoBuffer::XvShmImageHandle) |
|
81 return m_supportedXVideoPixelFormats; |
|
82 else |
|
83 return m_surface->supportedPixelFormats(handleType); |
|
84 } |
|
85 |
|
86 QVideoSurfaceFormat QVideoSurfaceGstDelegate::surfaceFormat() const |
|
87 { |
|
88 QMutexLocker locker(const_cast<QMutex *>(&m_mutex)); |
|
89 return m_format; |
|
90 } |
|
91 |
|
92 bool QVideoSurfaceGstDelegate::start(const QVideoSurfaceFormat &format, int bytesPerLine) |
|
93 { |
|
94 if (!m_surface) |
|
95 return false; |
|
96 |
|
97 QMutexLocker locker(&m_mutex); |
|
98 |
|
99 m_format = format; |
|
100 m_bytesPerLine = bytesPerLine; |
|
101 |
|
102 if (QThread::currentThread() == thread()) { |
|
103 m_started = !m_surface.isNull() ? m_surface->start(m_format) : false; |
|
104 } else { |
|
105 QMetaObject::invokeMethod(this, "queuedStart", Qt::QueuedConnection); |
|
106 |
|
107 m_setupCondition.wait(&m_mutex); |
|
108 } |
|
109 |
|
110 m_format = m_surface->surfaceFormat(); |
|
111 |
|
112 return m_started; |
|
113 } |
|
114 |
|
115 void QVideoSurfaceGstDelegate::stop() |
|
116 { |
|
117 if (!m_surface) |
|
118 return; |
|
119 |
|
120 QMutexLocker locker(&m_mutex); |
|
121 |
|
122 if (QThread::currentThread() == thread()) { |
|
123 if (!m_surface.isNull()) |
|
124 m_surface->stop(); |
|
125 } else { |
|
126 QMetaObject::invokeMethod(this, "queuedStop", Qt::QueuedConnection); |
|
127 |
|
128 m_setupCondition.wait(&m_mutex); |
|
129 } |
|
130 |
|
131 m_started = false; |
|
132 } |
|
133 |
|
134 bool QVideoSurfaceGstDelegate::isActive() |
|
135 { |
|
136 QMutexLocker locker(&m_mutex); |
|
137 return m_surface->isActive(); |
|
138 } |
|
139 |
|
140 GstFlowReturn QVideoSurfaceGstDelegate::render(GstBuffer *buffer) |
|
141 { |
|
142 if (!m_surface) |
|
143 return GST_FLOW_NOT_NEGOTIATED; |
|
144 |
|
145 QMutexLocker locker(&m_mutex); |
|
146 |
|
147 QGstVideoBuffer *videoBuffer = 0; |
|
148 |
|
149 #if defined(Q_WS_X11) && !defined(QT_NO_XVIDEO) |
|
150 if (G_TYPE_CHECK_INSTANCE_TYPE(buffer, QGstXvImageBuffer::get_type())) { |
|
151 QGstXvImageBuffer *xvBuffer = reinterpret_cast<QGstXvImageBuffer *>(buffer); |
|
152 QVariant handle = QVariant::fromValue(xvBuffer->xvImage); |
|
153 videoBuffer = new QGstVideoBuffer(buffer, m_bytesPerLine, QAbstractVideoBuffer::XvShmImageHandle, handle); |
|
154 } else |
|
155 #endif |
|
156 videoBuffer = new QGstVideoBuffer(buffer, m_bytesPerLine); |
|
157 |
|
158 m_frame = QVideoFrame( |
|
159 videoBuffer, |
|
160 m_format.frameSize(), |
|
161 m_format.pixelFormat()); |
|
162 |
|
163 qint64 startTime = GST_BUFFER_TIMESTAMP(buffer); |
|
164 |
|
165 if (startTime >= 0) { |
|
166 m_frame.setStartTime(startTime/G_GINT64_CONSTANT (1000000)); |
|
167 |
|
168 qint64 duration = GST_BUFFER_DURATION(buffer); |
|
169 |
|
170 if (duration >= 0) |
|
171 m_frame.setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000000)); |
|
172 } |
|
173 |
|
174 QMetaObject::invokeMethod(this, "queuedRender", Qt::QueuedConnection); |
|
175 |
|
176 if (!m_renderCondition.wait(&m_mutex, 300)) { |
|
177 m_frame = QVideoFrame(); |
|
178 |
|
179 return GST_FLOW_OK; |
|
180 } else { |
|
181 return m_renderReturn; |
|
182 } |
|
183 } |
|
184 |
|
185 void QVideoSurfaceGstDelegate::queuedStart() |
|
186 { |
|
187 QMutexLocker locker(&m_mutex); |
|
188 |
|
189 m_started = m_surface->start(m_format); |
|
190 |
|
191 m_setupCondition.wakeAll(); |
|
192 } |
|
193 |
|
194 void QVideoSurfaceGstDelegate::queuedStop() |
|
195 { |
|
196 QMutexLocker locker(&m_mutex); |
|
197 |
|
198 m_surface->stop(); |
|
199 |
|
200 m_setupCondition.wakeAll(); |
|
201 } |
|
202 |
|
203 void QVideoSurfaceGstDelegate::queuedRender() |
|
204 { |
|
205 QMutexLocker locker(&m_mutex); |
|
206 |
|
207 if (m_surface.isNull()) { |
|
208 m_renderReturn = GST_FLOW_ERROR; |
|
209 } else if (m_surface->present(m_frame)) { |
|
210 m_renderReturn = GST_FLOW_OK; |
|
211 } else { |
|
212 switch (m_surface->error()) { |
|
213 case QAbstractVideoSurface::NoError: |
|
214 m_renderReturn = GST_FLOW_OK; |
|
215 break; |
|
216 case QAbstractVideoSurface::StoppedError: |
|
217 m_renderReturn = GST_FLOW_NOT_NEGOTIATED; |
|
218 break; |
|
219 default: |
|
220 m_renderReturn = GST_FLOW_ERROR; |
|
221 break; |
|
222 } |
|
223 } |
|
224 |
|
225 m_renderCondition.wakeAll(); |
|
226 } |
|
227 |
|
228 void QVideoSurfaceGstDelegate::supportedFormatsChanged() |
|
229 { |
|
230 QMutexLocker locker(&m_mutex); |
|
231 |
|
232 m_supportedPixelFormats.clear(); |
|
233 if (m_surface) |
|
234 m_supportedPixelFormats = m_surface->supportedPixelFormats(); |
|
235 } |
|
236 |
|
237 struct YuvFormat |
|
238 { |
|
239 QVideoFrame::PixelFormat pixelFormat; |
|
240 guint32 fourcc; |
|
241 int bitsPerPixel; |
|
242 }; |
|
243 |
|
244 static const YuvFormat qt_yuvColorLookup[] = |
|
245 { |
|
246 { QVideoFrame::Format_YUV420P, GST_MAKE_FOURCC('I','4','2','0'), 8 }, |
|
247 { QVideoFrame::Format_YV12, GST_MAKE_FOURCC('Y','V','1','2'), 8 }, |
|
248 { QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 16 }, |
|
249 { QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','2'), 16 }, |
|
250 { QVideoFrame::Format_NV12, GST_MAKE_FOURCC('N','V','1','2'), 8 }, |
|
251 { QVideoFrame::Format_NV21, GST_MAKE_FOURCC('N','V','2','1'), 8 }, |
|
252 { QVideoFrame::Format_AYUV444, GST_MAKE_FOURCC('A','Y','U','V'), 32 } |
|
253 }; |
|
254 |
|
255 static int indexOfYuvColor(QVideoFrame::PixelFormat format) |
|
256 { |
|
257 const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat); |
|
258 |
|
259 for (int i = 0; i < count; ++i) |
|
260 if (qt_yuvColorLookup[i].pixelFormat == format) |
|
261 return i; |
|
262 |
|
263 return -1; |
|
264 } |
|
265 |
|
266 static int indexOfYuvColor(guint32 fourcc) |
|
267 { |
|
268 const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat); |
|
269 |
|
270 for (int i = 0; i < count; ++i) |
|
271 if (qt_yuvColorLookup[i].fourcc == fourcc) |
|
272 return i; |
|
273 |
|
274 return -1; |
|
275 } |
|
276 |
|
277 struct RgbFormat |
|
278 { |
|
279 QVideoFrame::PixelFormat pixelFormat; |
|
280 int bitsPerPixel; |
|
281 int depth; |
|
282 int endianness; |
|
283 int red; |
|
284 int green; |
|
285 int blue; |
|
286 int alpha; |
|
287 }; |
|
288 |
|
289 static const RgbFormat qt_rgbColorLookup[] = |
|
290 { |
|
291 { QVideoFrame::Format_RGB32 , 32, 24, 4321, 0x0000FF00, 0x00FF0000, 0xFF000000, 0x00000000 }, |
|
292 { QVideoFrame::Format_RGB32 , 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 }, |
|
293 { QVideoFrame::Format_BGR32 , 32, 24, 4321, 0xFF000000, 0x00FF0000, 0x0000FF00, 0x00000000 }, |
|
294 { QVideoFrame::Format_BGR32 , 32, 24, 1234, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 }, |
|
295 { QVideoFrame::Format_ARGB32, 32, 24, 4321, 0x0000FF00, 0x00FF0000, 0xFF000000, 0x000000FF }, |
|
296 { QVideoFrame::Format_ARGB32, 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000 }, |
|
297 { QVideoFrame::Format_RGB24 , 24, 24, 4321, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 }, |
|
298 { QVideoFrame::Format_BGR24 , 24, 24, 4321, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 }, |
|
299 { QVideoFrame::Format_RGB565, 16, 16, 1234, 0x0000F800, 0x000007E0, 0x0000001F, 0x00000000 } |
|
300 }; |
|
301 |
|
302 static int indexOfRgbColor( |
|
303 int bits, int depth, int endianness, int red, int green, int blue, int alpha) |
|
304 { |
|
305 const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat); |
|
306 |
|
307 for (int i = 0; i < count; ++i) { |
|
308 if (qt_rgbColorLookup[i].bitsPerPixel == bits |
|
309 && qt_rgbColorLookup[i].depth == depth |
|
310 && qt_rgbColorLookup[i].endianness == endianness |
|
311 && qt_rgbColorLookup[i].red == red |
|
312 && qt_rgbColorLookup[i].green == green |
|
313 && qt_rgbColorLookup[i].blue == blue |
|
314 && qt_rgbColorLookup[i].alpha == alpha) { |
|
315 return i; |
|
316 } |
|
317 } |
|
318 return -1; |
|
319 } |
|
320 |
|
321 static GstVideoSinkClass *sink_parent_class; |
|
322 |
|
323 #define VO_SINK(s) QVideoSurfaceGstSink *sink(reinterpret_cast<QVideoSurfaceGstSink *>(s)) |
|
324 |
|
325 QVideoSurfaceGstSink *QVideoSurfaceGstSink::createSink(QAbstractVideoSurface *surface) |
|
326 { |
|
327 QVideoSurfaceGstSink *sink = reinterpret_cast<QVideoSurfaceGstSink *>( |
|
328 g_object_new(QVideoSurfaceGstSink::get_type(), 0)); |
|
329 |
|
330 sink->delegate = new QVideoSurfaceGstDelegate(surface); |
|
331 |
|
332 return sink; |
|
333 } |
|
334 |
|
335 GType QVideoSurfaceGstSink::get_type() |
|
336 { |
|
337 static GType type = 0; |
|
338 |
|
339 if (type == 0) { |
|
340 static const GTypeInfo info = |
|
341 { |
|
342 sizeof(QVideoSurfaceGstSinkClass), // class_size |
|
343 base_init, // base_init |
|
344 NULL, // base_finalize |
|
345 class_init, // class_init |
|
346 NULL, // class_finalize |
|
347 NULL, // class_data |
|
348 sizeof(QVideoSurfaceGstSink), // instance_size |
|
349 0, // n_preallocs |
|
350 instance_init, // instance_init |
|
351 0 // value_table |
|
352 }; |
|
353 |
|
354 type = g_type_register_static( |
|
355 GST_TYPE_VIDEO_SINK, "QVideoSurfaceGstSink", &info, GTypeFlags(0)); |
|
356 } |
|
357 |
|
358 return type; |
|
359 } |
|
360 |
|
361 void QVideoSurfaceGstSink::class_init(gpointer g_class, gpointer class_data) |
|
362 { |
|
363 Q_UNUSED(class_data); |
|
364 |
|
365 sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class)); |
|
366 |
|
367 GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class); |
|
368 base_sink_class->get_caps = QVideoSurfaceGstSink::get_caps; |
|
369 base_sink_class->set_caps = QVideoSurfaceGstSink::set_caps; |
|
370 base_sink_class->buffer_alloc = QVideoSurfaceGstSink::buffer_alloc; |
|
371 base_sink_class->start = QVideoSurfaceGstSink::start; |
|
372 base_sink_class->stop = QVideoSurfaceGstSink::stop; |
|
373 // base_sink_class->unlock = QVideoSurfaceGstSink::unlock; // Not implemented. |
|
374 // base_sink_class->event = QVideoSurfaceGstSink::event; // Not implemented. |
|
375 base_sink_class->preroll = QVideoSurfaceGstSink::preroll; |
|
376 base_sink_class->render = QVideoSurfaceGstSink::render; |
|
377 |
|
378 GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class); |
|
379 element_class->change_state = QVideoSurfaceGstSink::change_state; |
|
380 |
|
381 GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class); |
|
382 object_class->finalize = QVideoSurfaceGstSink::finalize; |
|
383 } |
|
384 |
|
385 void QVideoSurfaceGstSink::base_init(gpointer g_class) |
|
386 { |
|
387 static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE( |
|
388 "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS( |
|
389 "video/x-raw-rgb, " |
|
390 "framerate = (fraction) [ 0, MAX ], " |
|
391 "width = (int) [ 1, MAX ], " |
|
392 "height = (int) [ 1, MAX ]; " |
|
393 "video/x-raw-yuv, " |
|
394 "framerate = (fraction) [ 0, MAX ], " |
|
395 "width = (int) [ 1, MAX ], " |
|
396 "height = (int) [ 1, MAX ]")); |
|
397 |
|
398 gst_element_class_add_pad_template( |
|
399 GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template)); |
|
400 } |
|
401 |
|
402 void QVideoSurfaceGstSink::instance_init(GTypeInstance *instance, gpointer g_class) |
|
403 { |
|
404 VO_SINK(instance); |
|
405 |
|
406 Q_UNUSED(g_class); |
|
407 |
|
408 sink->delegate = 0; |
|
409 #if defined(Q_WS_X11) && !defined(QT_NO_XVIDEO) |
|
410 sink->pool = new QGstXvImageBufferPool(); |
|
411 #endif |
|
412 sink->lastRequestedCaps = 0; |
|
413 sink->lastBufferCaps = 0; |
|
414 sink->lastSurfaceFormat = new QVideoSurfaceFormat; |
|
415 } |
|
416 |
|
417 void QVideoSurfaceGstSink::finalize(GObject *object) |
|
418 { |
|
419 VO_SINK(object); |
|
420 #if defined(Q_WS_X11) && !defined(QT_NO_XVIDEO) |
|
421 delete sink->pool; |
|
422 sink->pool = 0; |
|
423 #endif |
|
424 |
|
425 delete sink->lastSurfaceFormat; |
|
426 sink->lastSurfaceFormat = 0; |
|
427 |
|
428 if (sink->lastBufferCaps) |
|
429 gst_caps_unref(sink->lastBufferCaps); |
|
430 sink->lastBufferCaps = 0; |
|
431 |
|
432 if (sink->lastRequestedCaps) |
|
433 gst_caps_unref(sink->lastRequestedCaps); |
|
434 sink->lastRequestedCaps = 0; |
|
435 } |
|
436 |
|
437 GstStateChangeReturn QVideoSurfaceGstSink::change_state( |
|
438 GstElement *element, GstStateChange transition) |
|
439 { |
|
440 Q_UNUSED(element); |
|
441 |
|
442 return GST_ELEMENT_CLASS(sink_parent_class)->change_state( |
|
443 element, transition); |
|
444 } |
|
445 |
|
446 GstCaps *QVideoSurfaceGstSink::get_caps(GstBaseSink *base) |
|
447 { |
|
448 VO_SINK(base); |
|
449 |
|
450 GstCaps *caps = gst_caps_new_empty(); |
|
451 |
|
452 foreach (QVideoFrame::PixelFormat format, sink->delegate->supportedPixelFormats()) { |
|
453 int index = indexOfYuvColor(format); |
|
454 |
|
455 if (index != -1) { |
|
456 gst_caps_append_structure(caps, gst_structure_new( |
|
457 "video/x-raw-yuv", |
|
458 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1, |
|
459 "width" , GST_TYPE_INT_RANGE, 1, INT_MAX, |
|
460 "height" , GST_TYPE_INT_RANGE, 1, INT_MAX, |
|
461 "format" , GST_TYPE_FOURCC, qt_yuvColorLookup[index].fourcc, |
|
462 NULL)); |
|
463 continue; |
|
464 } |
|
465 |
|
466 const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat); |
|
467 |
|
468 for (int i = 0; i < count; ++i) { |
|
469 if (qt_rgbColorLookup[i].pixelFormat == format) { |
|
470 GstStructure *structure = gst_structure_new( |
|
471 "video/x-raw-rgb", |
|
472 "framerate" , GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1, |
|
473 "width" , GST_TYPE_INT_RANGE, 1, INT_MAX, |
|
474 "height" , GST_TYPE_INT_RANGE, 1, INT_MAX, |
|
475 "bpp" , G_TYPE_INT, qt_rgbColorLookup[i].bitsPerPixel, |
|
476 "depth" , G_TYPE_INT, qt_rgbColorLookup[i].depth, |
|
477 "endianness", G_TYPE_INT, qt_rgbColorLookup[i].endianness, |
|
478 "red_mask" , G_TYPE_INT, qt_rgbColorLookup[i].red, |
|
479 "green_mask", G_TYPE_INT, qt_rgbColorLookup[i].green, |
|
480 "blue_mask" , G_TYPE_INT, qt_rgbColorLookup[i].blue, |
|
481 NULL); |
|
482 |
|
483 if (qt_rgbColorLookup[i].alpha != 0) { |
|
484 gst_structure_set( |
|
485 structure, "alpha_mask", G_TYPE_INT, qt_rgbColorLookup[i].alpha, NULL); |
|
486 } |
|
487 gst_caps_append_structure(caps, structure); |
|
488 } |
|
489 } |
|
490 } |
|
491 |
|
492 return caps; |
|
493 } |
|
494 |
|
495 gboolean QVideoSurfaceGstSink::set_caps(GstBaseSink *base, GstCaps *caps) |
|
496 { |
|
497 VO_SINK(base); |
|
498 |
|
499 //qDebug() << "set_caps"; |
|
500 //qDebug() << gst_caps_to_string(caps); |
|
501 |
|
502 if (!caps) { |
|
503 sink->delegate->stop(); |
|
504 |
|
505 return TRUE; |
|
506 } else { |
|
507 int bytesPerLine = 0; |
|
508 QVideoSurfaceFormat format = formatForCaps(caps, &bytesPerLine); |
|
509 |
|
510 if (sink->delegate->isActive()) { |
|
511 QVideoSurfaceFormat surfaceFormst = sink->delegate->surfaceFormat(); |
|
512 |
|
513 if (format.pixelFormat() == surfaceFormst.pixelFormat() && |
|
514 format.frameSize() == surfaceFormst.frameSize()) |
|
515 return TRUE; |
|
516 else |
|
517 sink->delegate->stop(); |
|
518 } |
|
519 |
|
520 if (sink->lastRequestedCaps) |
|
521 gst_caps_unref(sink->lastRequestedCaps); |
|
522 sink->lastRequestedCaps = 0; |
|
523 |
|
524 //qDebug() << "Staring video surface:"; |
|
525 //qDebug() << format; |
|
526 //qDebug() << bytesPerLine; |
|
527 |
|
528 if (sink->delegate->start(format, bytesPerLine)) |
|
529 return TRUE; |
|
530 |
|
531 } |
|
532 |
|
533 return FALSE; |
|
534 } |
|
535 |
|
536 QVideoSurfaceFormat QVideoSurfaceGstSink::formatForCaps(GstCaps *caps, int *bytesPerLine) |
|
537 { |
|
538 const GstStructure *structure = gst_caps_get_structure(caps, 0); |
|
539 |
|
540 QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid; |
|
541 int bitsPerPixel = 0; |
|
542 |
|
543 QSize size; |
|
544 gst_structure_get_int(structure, "width", &size.rwidth()); |
|
545 gst_structure_get_int(structure, "height", &size.rheight()); |
|
546 |
|
547 if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) { |
|
548 guint32 fourcc = 0; |
|
549 gst_structure_get_fourcc(structure, "format", &fourcc); |
|
550 |
|
551 int index = indexOfYuvColor(fourcc); |
|
552 if (index != -1) { |
|
553 pixelFormat = qt_yuvColorLookup[index].pixelFormat; |
|
554 bitsPerPixel = qt_yuvColorLookup[index].bitsPerPixel; |
|
555 } |
|
556 } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) { |
|
557 int depth = 0; |
|
558 int endianness = 0; |
|
559 int red = 0; |
|
560 int green = 0; |
|
561 int blue = 0; |
|
562 int alpha = 0; |
|
563 |
|
564 gst_structure_get_int(structure, "bpp", &bitsPerPixel); |
|
565 gst_structure_get_int(structure, "depth", &depth); |
|
566 gst_structure_get_int(structure, "endianness", &endianness); |
|
567 gst_structure_get_int(structure, "red_mask", &red); |
|
568 gst_structure_get_int(structure, "green_mask", &green); |
|
569 gst_structure_get_int(structure, "blue_mask", &blue); |
|
570 gst_structure_get_int(structure, "alpha_mask", &alpha); |
|
571 |
|
572 int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha); |
|
573 |
|
574 if (index != -1) |
|
575 pixelFormat = qt_rgbColorLookup[index].pixelFormat; |
|
576 } |
|
577 |
|
578 if (pixelFormat != QVideoFrame::Format_Invalid) { |
|
579 QVideoSurfaceFormat format(size, pixelFormat); |
|
580 |
|
581 QPair<int, int> rate; |
|
582 gst_structure_get_fraction(structure, "framerate", &rate.first, &rate.second); |
|
583 |
|
584 if (rate.second) |
|
585 format.setFrameRate(qreal(rate.first)/rate.second); |
|
586 |
|
587 gint aspectNum = 0; |
|
588 gint aspectDenum = 0; |
|
589 if (gst_structure_get_fraction( |
|
590 structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) { |
|
591 if (aspectDenum > 0) |
|
592 format.setPixelAspectRatio(aspectNum, aspectDenum); |
|
593 } |
|
594 |
|
595 if (bytesPerLine) |
|
596 *bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3; |
|
597 |
|
598 return format; |
|
599 } |
|
600 |
|
601 return QVideoSurfaceFormat(); |
|
602 } |
|
603 |
|
604 |
|
605 GstFlowReturn QVideoSurfaceGstSink::buffer_alloc( |
|
606 GstBaseSink *base, guint64 offset, guint size, GstCaps *caps, GstBuffer **buffer) |
|
607 { |
|
608 VO_SINK(base); |
|
609 |
|
610 Q_UNUSED(offset); |
|
611 Q_UNUSED(size); |
|
612 |
|
613 *buffer = 0; |
|
614 |
|
615 #if defined(Q_WS_X11) && !defined(QT_NO_XVIDEO) |
|
616 |
|
617 if (sink->lastRequestedCaps && gst_caps_is_equal(sink->lastRequestedCaps, caps)) { |
|
618 //qDebug() << "reusing last caps"; |
|
619 *buffer = GST_BUFFER(sink->pool->takeBuffer(*sink->lastSurfaceFormat, sink->lastBufferCaps)); |
|
620 return GST_FLOW_OK; |
|
621 } |
|
622 |
|
623 if (sink->delegate->supportedPixelFormats(QAbstractVideoBuffer::XvShmImageHandle).isEmpty()) { |
|
624 //qDebug() << "sink doesn't support Xv buffers, skip buffers allocation"; |
|
625 return GST_FLOW_OK; |
|
626 } |
|
627 |
|
628 GstCaps *intersection = gst_caps_intersect(get_caps(GST_BASE_SINK(sink)), caps); |
|
629 |
|
630 if (gst_caps_is_empty (intersection)) { |
|
631 gst_caps_unref(intersection); |
|
632 return GST_FLOW_NOT_NEGOTIATED; |
|
633 } |
|
634 |
|
635 if (sink->delegate->isActive()) { |
|
636 //if format was changed, restart the surface |
|
637 QVideoSurfaceFormat format = formatForCaps(intersection); |
|
638 QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat(); |
|
639 |
|
640 if (format.pixelFormat() != surfaceFormat.pixelFormat() || |
|
641 format.frameSize() != surfaceFormat.frameSize()) { |
|
642 //qDebug() << "new format requested, restart video surface"; |
|
643 sink->delegate->stop(); |
|
644 } |
|
645 } |
|
646 |
|
647 if (!sink->delegate->isActive()) { |
|
648 int bytesPerLine = 0; |
|
649 QVideoSurfaceFormat format = formatForCaps(intersection, &bytesPerLine); |
|
650 |
|
651 if (!sink->delegate->start(format, bytesPerLine)) { |
|
652 //qDebug() << "failed to start video surface"; |
|
653 return GST_FLOW_NOT_NEGOTIATED; |
|
654 } |
|
655 } |
|
656 |
|
657 QVideoSurfaceFormat surfaceFormat = sink->delegate->surfaceFormat(); |
|
658 |
|
659 if (!sink->pool->isFormatSupported(surfaceFormat)) { |
|
660 //qDebug() << "sink doesn't provide Xv buffer details, skip buffers allocation"; |
|
661 return GST_FLOW_OK; |
|
662 } |
|
663 |
|
664 if (sink->lastRequestedCaps) |
|
665 gst_caps_unref(sink->lastRequestedCaps); |
|
666 sink->lastRequestedCaps = caps; |
|
667 gst_caps_ref(sink->lastRequestedCaps); |
|
668 |
|
669 if (sink->lastBufferCaps) |
|
670 gst_caps_unref(sink->lastBufferCaps); |
|
671 sink->lastBufferCaps = intersection; |
|
672 gst_caps_ref(sink->lastBufferCaps); |
|
673 |
|
674 *sink->lastSurfaceFormat = surfaceFormat; |
|
675 |
|
676 *buffer = GST_BUFFER(sink->pool->takeBuffer(surfaceFormat, intersection)); |
|
677 |
|
678 #endif |
|
679 return GST_FLOW_OK; |
|
680 } |
|
681 |
|
682 gboolean QVideoSurfaceGstSink::start(GstBaseSink *base) |
|
683 { |
|
684 Q_UNUSED(base); |
|
685 |
|
686 return TRUE; |
|
687 } |
|
688 |
|
689 gboolean QVideoSurfaceGstSink::stop(GstBaseSink *base) |
|
690 { |
|
691 Q_UNUSED(base); |
|
692 |
|
693 return TRUE; |
|
694 } |
|
695 |
|
696 gboolean QVideoSurfaceGstSink::unlock(GstBaseSink *base) |
|
697 { |
|
698 Q_UNUSED(base); |
|
699 |
|
700 return TRUE; |
|
701 } |
|
702 |
|
703 gboolean QVideoSurfaceGstSink::event(GstBaseSink *base, GstEvent *event) |
|
704 { |
|
705 Q_UNUSED(base); |
|
706 Q_UNUSED(event); |
|
707 |
|
708 return TRUE; |
|
709 } |
|
710 |
|
711 GstFlowReturn QVideoSurfaceGstSink::preroll(GstBaseSink *base, GstBuffer *buffer) |
|
712 { |
|
713 VO_SINK(base); |
|
714 |
|
715 return sink->delegate->render(buffer); |
|
716 } |
|
717 |
|
718 GstFlowReturn QVideoSurfaceGstSink::render(GstBaseSink *base, GstBuffer *buffer) |
|
719 { |
|
720 VO_SINK(base); |
|
721 return sink->delegate->render(buffer); |
|
722 } |
|
723 |