|
1 /**************************************************************************** |
|
2 ** |
|
3 ** Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). |
|
4 ** All rights reserved. |
|
5 ** Contact: Nokia Corporation (qt-info@nokia.com) |
|
6 ** |
|
7 ** This file is part of the QtMultimedia module of the Qt Toolkit. |
|
8 ** |
|
9 ** $QT_BEGIN_LICENSE:LGPL$ |
|
10 ** No Commercial Usage |
|
11 ** This file contains pre-release code and may not be distributed. |
|
12 ** You may use this file in accordance with the terms and conditions |
|
13 ** contained in the Technology Preview License Agreement accompanying |
|
14 ** this package. |
|
15 ** |
|
16 ** GNU Lesser General Public License Usage |
|
17 ** Alternatively, this file may be used under the terms of the GNU Lesser |
|
18 ** General Public License version 2.1 as published by the Free Software |
|
19 ** Foundation and appearing in the file LICENSE.LGPL included in the |
|
20 ** packaging of this file. Please review the following information to |
|
21 ** ensure the GNU Lesser General Public License version 2.1 requirements |
|
22 ** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. |
|
23 ** |
|
24 ** In addition, as a special exception, Nokia gives you certain additional |
|
25 ** rights. These rights are described in the Nokia Qt LGPL Exception |
|
26 ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. |
|
27 ** |
|
28 ** If you have questions regarding the use of this file, please contact |
|
29 ** Nokia at qt-info@nokia.com. |
|
30 ** |
|
31 ** |
|
32 ** |
|
33 ** |
|
34 ** |
|
35 ** |
|
36 ** |
|
37 ** |
|
38 ** $QT_END_LICENSE$ |
|
39 ** |
|
40 ****************************************************************************/ |
|
41 |
|
42 #include "qvideosurfaceformat.h" |
|
43 |
|
44 #include <qdebug.h> |
|
45 #include <qmetatype.h> |
|
46 #include <qpair.h> |
|
47 #include <qvariant.h> |
|
48 #include <qvector.h> |
|
49 |
|
50 QT_BEGIN_NAMESPACE |
|
51 |
|
52 class QVideoSurfaceFormatPrivate : public QSharedData |
|
53 { |
|
54 public: |
|
55 QVideoSurfaceFormatPrivate() |
|
56 : pixelFormat(QVideoFrame::Format_Invalid) |
|
57 , handleType(QAbstractVideoBuffer::NoHandle) |
|
58 , scanLineDirection(QVideoSurfaceFormat::TopToBottom) |
|
59 , pixelAspectRatio(1, 1) |
|
60 , yuvColorSpace(QVideoSurfaceFormat::YCbCr_Undefined) |
|
61 , frameRate(0.0) |
|
62 { |
|
63 } |
|
64 |
|
65 QVideoSurfaceFormatPrivate( |
|
66 const QSize &size, |
|
67 QVideoFrame::PixelFormat format, |
|
68 QAbstractVideoBuffer::HandleType type) |
|
69 : pixelFormat(format) |
|
70 , handleType(type) |
|
71 , scanLineDirection(QVideoSurfaceFormat::TopToBottom) |
|
72 , frameSize(size) |
|
73 , pixelAspectRatio(1, 1) |
|
74 , yuvColorSpace(QVideoSurfaceFormat::YCbCr_Undefined) |
|
75 , viewport(QPoint(0, 0), size) |
|
76 , frameRate(0.0) |
|
77 { |
|
78 } |
|
79 |
|
80 QVideoSurfaceFormatPrivate(const QVideoSurfaceFormatPrivate &other) |
|
81 : QSharedData(other) |
|
82 , pixelFormat(other.pixelFormat) |
|
83 , handleType(other.handleType) |
|
84 , scanLineDirection(other.scanLineDirection) |
|
85 , frameSize(other.frameSize) |
|
86 , pixelAspectRatio(other.pixelAspectRatio) |
|
87 , yuvColorSpace(other.yuvColorSpace) |
|
88 , viewport(other.viewport) |
|
89 , frameRate(other.frameRate) |
|
90 , propertyNames(other.propertyNames) |
|
91 , propertyValues(other.propertyValues) |
|
92 { |
|
93 } |
|
94 |
|
95 bool operator ==(const QVideoSurfaceFormatPrivate &other) const |
|
96 { |
|
97 if (pixelFormat == other.pixelFormat |
|
98 && handleType == other.handleType |
|
99 && scanLineDirection == other.scanLineDirection |
|
100 && frameSize == other.frameSize |
|
101 && pixelAspectRatio == other.pixelAspectRatio |
|
102 && viewport == other.viewport |
|
103 && frameRatesEqual(frameRate, other.frameRate) |
|
104 && yuvColorSpace == other.yuvColorSpace |
|
105 && propertyNames.count() == other.propertyNames.count()) { |
|
106 for (int i = 0; i < propertyNames.count(); ++i) { |
|
107 int j = other.propertyNames.indexOf(propertyNames.at(i)); |
|
108 |
|
109 if (j == -1 || propertyValues.at(i) != other.propertyValues.at(j)) |
|
110 return false; |
|
111 } |
|
112 return true; |
|
113 } else { |
|
114 return false; |
|
115 } |
|
116 } |
|
117 |
|
118 inline static bool frameRatesEqual(qreal r1, qreal r2) |
|
119 { |
|
120 return qAbs(r1 - r2) <= 0.00001 * qMin(qAbs(r1), qAbs(r2)); |
|
121 } |
|
122 |
|
123 QVideoFrame::PixelFormat pixelFormat; |
|
124 QAbstractVideoBuffer::HandleType handleType; |
|
125 QVideoSurfaceFormat::Direction scanLineDirection; |
|
126 QSize frameSize; |
|
127 QSize pixelAspectRatio; |
|
128 QVideoSurfaceFormat::YuvColorSpace yuvColorSpace; |
|
129 QRect viewport; |
|
130 qreal frameRate; |
|
131 QList<QByteArray> propertyNames; |
|
132 QList<QVariant> propertyValues; |
|
133 }; |
|
134 |
|
135 /*! |
|
136 \class QVideoSurfaceFormat |
|
137 \brief The QVideoSurfaceFormat class specifies the stream format of a video presentation |
|
138 surface. |
|
139 \preliminary |
|
140 \since 4.6 |
|
141 |
|
142 A video surface presents a stream of video frames. The surface's format describes the type of |
|
143 the frames and determines how they should be presented. |
|
144 |
|
145 The core properties of a video stream required to setup a video surface are the pixel format |
|
146 given by pixelFormat(), and the frame dimensions given by frameSize(). |
|
147 |
|
148 If the surface is to present frames using a frame's handle a surface format will also include |
|
149 a handle type which is given by the handleType() function. |
|
150 |
|
151 The region of a frame that is actually displayed on a video surface is given by the viewport(). |
|
152 A stream may have a viewport less than the entire region of a frame to allow for videos smaller |
|
153 than the nearest optimal size of a video frame. For example the width of a frame may be |
|
154 extended so that the start of each scan line is eight byte aligned. |
|
155 |
|
156 Other common properties are the pixelAspectRatio(), scanLineDirection(), and frameRate(). |
|
157 Additionally a stream may have some additional type specific properties which are listed by the |
|
158 dynamicPropertyNames() function and can be accessed using the property(), and setProperty() |
|
159 functions. |
|
160 */ |
|
161 |
|
162 /*! |
|
163 \enum QVideoSurfaceFormat::Direction |
|
164 |
|
165 Enumerates the layout direction of video scan lines. |
|
166 |
|
167 \value TopToBottom Scan lines are arranged from the top of the frame to the bottom. |
|
168 \value BottomToTop Scan lines are arranged from the bottom of the frame to the top. |
|
169 */ |
|
170 |
|
171 /*! |
|
172 \enum QVideoSurfaceFormat::ViewportMode |
|
173 |
|
174 Enumerates the methods for updating the stream viewport when the frame size is changed. |
|
175 |
|
176 \value ResetViewport The viewport is reset to cover an entire frame. |
|
177 \value KeepViewport The viewport is kept within the bounds the frame. |
|
178 */ |
|
179 |
|
180 /*! |
|
181 \enum QVideoSurfaceFormat::YuvColorSpace |
|
182 |
|
183 Enumerates the YUV color space of video frames. |
|
184 |
|
185 \value YCbCr_Undefined |
|
186 No color space is specified. |
|
187 |
|
188 \value YCbCr_BT601 |
|
189 A Y'CbCr color space defined by ITU-R recommendation BT.601 |
|
190 with Y value range from 16 to 235, and Cb/Cr range from 16 to 240. |
|
191 Used in standard definition video. |
|
192 |
|
193 \value YCbCr_BT709 |
|
194 A Y'CbCr color space defined by ITU-R BT.709 with the same values range as YCbCr_BT601. Used |
|
195 for HDTV. |
|
196 |
|
197 \value YCbCr_xvYCC601 |
|
198 The BT.601 color space with the value range extended to 0 to 255. |
|
199 It is backward compatibile with BT.601 and uses values outside BT.601 range to represent |
|
200 wider colors range. |
|
201 |
|
202 \value YCbCr_xvYCC709 |
|
203 The BT.709 color space with the value range extended to 0 to 255. |
|
204 |
|
205 \value YCbCr_JPEG |
|
206 The full range Y'CbCr color space used in JPEG files. |
|
207 */ |
|
208 |
|
209 /*! |
|
210 Constructs a null video stream format. |
|
211 */ |
|
212 |
|
213 QVideoSurfaceFormat::QVideoSurfaceFormat() |
|
214 : d(new QVideoSurfaceFormatPrivate) |
|
215 { |
|
216 } |
|
217 |
|
218 /*! |
|
219 Contructs a description of stream which receives stream of \a type buffers with given frame |
|
220 \a size and pixel \a format. |
|
221 */ |
|
222 |
|
223 QVideoSurfaceFormat::QVideoSurfaceFormat( |
|
224 const QSize& size, QVideoFrame::PixelFormat format, QAbstractVideoBuffer::HandleType type) |
|
225 : d(new QVideoSurfaceFormatPrivate(size, format, type)) |
|
226 { |
|
227 } |
|
228 |
|
229 /*! |
|
230 Constructs a copy of \a other. |
|
231 */ |
|
232 |
|
233 QVideoSurfaceFormat::QVideoSurfaceFormat(const QVideoSurfaceFormat &other) |
|
234 : d(other.d) |
|
235 { |
|
236 } |
|
237 |
|
238 /*! |
|
239 Assigns the values of \a other to a video stream description. |
|
240 */ |
|
241 |
|
242 QVideoSurfaceFormat &QVideoSurfaceFormat::operator =(const QVideoSurfaceFormat &other) |
|
243 { |
|
244 d = other.d; |
|
245 |
|
246 return *this; |
|
247 } |
|
248 |
|
249 /*! |
|
250 Destroys a video stream description. |
|
251 */ |
|
252 |
|
253 QVideoSurfaceFormat::~QVideoSurfaceFormat() |
|
254 { |
|
255 } |
|
256 |
|
257 /*! |
|
258 Identifies if a video surface format has a valid pixel format and frame size. |
|
259 |
|
260 Returns true if the format is valid, and false otherwise. |
|
261 */ |
|
262 |
|
263 bool QVideoSurfaceFormat::isValid() const |
|
264 { |
|
265 return d->pixelFormat == QVideoFrame::Format_Invalid && d->frameSize.isValid(); |
|
266 } |
|
267 |
|
268 /*! |
|
269 Returns true if \a other is the same as a video format, and false if they are the different. |
|
270 */ |
|
271 |
|
272 bool QVideoSurfaceFormat::operator ==(const QVideoSurfaceFormat &other) const |
|
273 { |
|
274 return d == other.d || *d == *other.d; |
|
275 } |
|
276 |
|
277 /*! |
|
278 Returns true if \a other is different to a video format, and false if they are the same. |
|
279 */ |
|
280 |
|
281 bool QVideoSurfaceFormat::operator !=(const QVideoSurfaceFormat &other) const |
|
282 { |
|
283 return d != other.d && !(*d == *other.d); |
|
284 } |
|
285 |
|
286 /*! |
|
287 Returns the pixel format of frames in a video stream. |
|
288 */ |
|
289 |
|
290 QVideoFrame::PixelFormat QVideoSurfaceFormat::pixelFormat() const |
|
291 { |
|
292 return d->pixelFormat; |
|
293 } |
|
294 |
|
295 /*! |
|
296 Returns the type of handle the surface uses to present the frame data. |
|
297 |
|
298 If the handle type is QAbstractVideoBuffer::NoHandle buffers with any handle type are valid |
|
299 provided they can be \l {QAbstractVideoBuffer::map()}{mapped} with the |
|
300 QAbstractVideoBuffer::ReadOnly flag. If the handleType() is not QAbstractVideoBuffer::NoHandle |
|
301 then the handle type of the buffer be the same as that of the surface format. |
|
302 */ |
|
303 |
|
304 QAbstractVideoBuffer::HandleType QVideoSurfaceFormat::handleType() const |
|
305 { |
|
306 return d->handleType; |
|
307 } |
|
308 |
|
309 /*! |
|
310 Returns the size of frames in a video stream. |
|
311 |
|
312 \sa frameWidth(), frameHeight() |
|
313 */ |
|
314 |
|
315 QSize QVideoSurfaceFormat::frameSize() const |
|
316 { |
|
317 return d->frameSize; |
|
318 } |
|
319 |
|
320 /*! |
|
321 Returns the width of frames in a video stream. |
|
322 |
|
323 \sa frameSize(), frameHeight() |
|
324 */ |
|
325 |
|
326 int QVideoSurfaceFormat::frameWidth() const |
|
327 { |
|
328 return d->frameSize.width(); |
|
329 } |
|
330 |
|
331 /*! |
|
332 Returns the height of frame in a video stream. |
|
333 */ |
|
334 |
|
335 int QVideoSurfaceFormat::frameHeight() const |
|
336 { |
|
337 return d->frameSize.height(); |
|
338 } |
|
339 |
|
340 /*! |
|
341 Sets the size of frames in a video stream to \a size. |
|
342 |
|
343 The viewport \a mode indicates how the view port should be updated. |
|
344 */ |
|
345 |
|
346 void QVideoSurfaceFormat::setFrameSize(const QSize &size, ViewportMode mode) |
|
347 { |
|
348 d->frameSize = size; |
|
349 |
|
350 switch (mode) { |
|
351 case ResetViewport: |
|
352 d->viewport = QRect(QPoint(0, 0), size); |
|
353 break; |
|
354 case KeepViewport: |
|
355 d->viewport = QRect(QPoint(0, 0), size).intersected(d->viewport); |
|
356 break; |
|
357 } |
|
358 } |
|
359 |
|
360 /*! |
|
361 \overload |
|
362 |
|
363 Sets the \a width and \a height of frames in a video stream. |
|
364 |
|
365 The viewport \a mode indicates how the view port should be updated. |
|
366 */ |
|
367 |
|
368 void QVideoSurfaceFormat::setFrameSize(int width, int height, ViewportMode mode) |
|
369 { |
|
370 setFrameSize(QSize(width, height), mode); |
|
371 } |
|
372 |
|
373 /*! |
|
374 Returns the viewport of a video stream. |
|
375 |
|
376 The viewport is the region of a video frame that is actually displayed. |
|
377 |
|
378 By default the viewport covers an entire frame. |
|
379 */ |
|
380 |
|
381 QRect QVideoSurfaceFormat::viewport() const |
|
382 { |
|
383 return d->viewport; |
|
384 } |
|
385 |
|
386 /*! |
|
387 Sets the viewport of a video stream to \a viewport. |
|
388 */ |
|
389 |
|
390 void QVideoSurfaceFormat::setViewport(const QRect &viewport) |
|
391 { |
|
392 d->viewport = viewport; |
|
393 } |
|
394 |
|
395 /*! |
|
396 Returns the direction of scan lines. |
|
397 */ |
|
398 |
|
399 QVideoSurfaceFormat::Direction QVideoSurfaceFormat::scanLineDirection() const |
|
400 { |
|
401 return d->scanLineDirection; |
|
402 } |
|
403 |
|
404 /*! |
|
405 Sets the \a direction of scan lines. |
|
406 */ |
|
407 |
|
408 void QVideoSurfaceFormat::setScanLineDirection(Direction direction) |
|
409 { |
|
410 d->scanLineDirection = direction; |
|
411 } |
|
412 |
|
413 /*! |
|
414 Returns the frame rate of a video stream in frames per second. |
|
415 */ |
|
416 |
|
417 qreal QVideoSurfaceFormat::frameRate() const |
|
418 { |
|
419 return d->frameRate; |
|
420 } |
|
421 |
|
422 /*! |
|
423 Sets the frame \a rate of a video stream in frames per second. |
|
424 */ |
|
425 |
|
426 void QVideoSurfaceFormat::setFrameRate(qreal rate) |
|
427 { |
|
428 d->frameRate = rate; |
|
429 } |
|
430 |
|
431 /*! |
|
432 Returns a video stream's pixel aspect ratio. |
|
433 */ |
|
434 |
|
435 QSize QVideoSurfaceFormat::pixelAspectRatio() const |
|
436 { |
|
437 return d->pixelAspectRatio; |
|
438 } |
|
439 |
|
440 /*! |
|
441 Sets a video stream's pixel aspect \a ratio. |
|
442 */ |
|
443 |
|
444 void QVideoSurfaceFormat::setPixelAspectRatio(const QSize &ratio) |
|
445 { |
|
446 d->pixelAspectRatio = ratio; |
|
447 } |
|
448 |
|
449 /*! |
|
450 \overload |
|
451 |
|
452 Sets the \a horizontal and \a vertical elements of a video stream's pixel aspect ratio. |
|
453 */ |
|
454 |
|
455 void QVideoSurfaceFormat::setPixelAspectRatio(int horizontal, int vertical) |
|
456 { |
|
457 d->pixelAspectRatio = QSize(horizontal, vertical); |
|
458 } |
|
459 |
|
460 /*! |
|
461 Returns a YUV color space of a video stream. |
|
462 */ |
|
463 |
|
464 QVideoSurfaceFormat::YuvColorSpace QVideoSurfaceFormat::yuvColorSpace() const |
|
465 { |
|
466 return d->yuvColorSpace; |
|
467 } |
|
468 |
|
469 /*! |
|
470 Sets a YUV color \a space of a video stream. |
|
471 It is only used with raw YUV frame types. |
|
472 */ |
|
473 |
|
474 void QVideoSurfaceFormat::setYuvColorSpace(QVideoSurfaceFormat::YuvColorSpace space) |
|
475 { |
|
476 d->yuvColorSpace = space; |
|
477 } |
|
478 |
|
479 /*! |
|
480 Returns a suggested size in pixels for the video stream. |
|
481 |
|
482 This is the size of the viewport scaled according to the pixel aspect ratio. |
|
483 */ |
|
484 |
|
485 QSize QVideoSurfaceFormat::sizeHint() const |
|
486 { |
|
487 QSize size = d->viewport.size(); |
|
488 |
|
489 if (d->pixelAspectRatio.height() != 0) |
|
490 size.setWidth(size.width() * d->pixelAspectRatio.width() / d->pixelAspectRatio.height()); |
|
491 |
|
492 return size; |
|
493 } |
|
494 |
|
495 /*! |
|
496 Returns a list of video format dynamic property names. |
|
497 */ |
|
498 |
|
499 QList<QByteArray> QVideoSurfaceFormat::propertyNames() const |
|
500 { |
|
501 return (QList<QByteArray>() |
|
502 << "handleType" |
|
503 << "pixelFormat" |
|
504 << "frameSize" |
|
505 << "frameWidth" |
|
506 << "viewport" |
|
507 << "scanLineDirection" |
|
508 << "frameRate" |
|
509 << "pixelAspectRatio" |
|
510 << "sizeHint" |
|
511 << "yuvColorSpace") |
|
512 + d->propertyNames; |
|
513 } |
|
514 |
|
515 /*! |
|
516 Returns the value of the video format's \a name property. |
|
517 */ |
|
518 |
|
519 QVariant QVideoSurfaceFormat::property(const char *name) const |
|
520 { |
|
521 if (qstrcmp(name, "handleType") == 0) { |
|
522 return qVariantFromValue(d->handleType); |
|
523 } else if (qstrcmp(name, "pixelFormat") == 0) { |
|
524 return qVariantFromValue(d->pixelFormat); |
|
525 } else if (qstrcmp(name, "handleType") == 0) { |
|
526 return qVariantFromValue(d->handleType); |
|
527 } else if (qstrcmp(name, "frameSize") == 0) { |
|
528 return d->frameSize; |
|
529 } else if (qstrcmp(name, "frameWidth") == 0) { |
|
530 return d->frameSize.width(); |
|
531 } else if (qstrcmp(name, "frameHeight") == 0) { |
|
532 return d->frameSize.height(); |
|
533 } else if (qstrcmp(name, "viewport") == 0) { |
|
534 return d->viewport; |
|
535 } else if (qstrcmp(name, "scanLineDirection") == 0) { |
|
536 return qVariantFromValue(d->scanLineDirection); |
|
537 } else if (qstrcmp(name, "frameRate") == 0) { |
|
538 return qVariantFromValue(d->frameRate); |
|
539 } else if (qstrcmp(name, "pixelAspectRatio") == 0) { |
|
540 return qVariantFromValue(d->pixelAspectRatio); |
|
541 } else if (qstrcmp(name, "sizeHint") == 0) { |
|
542 return sizeHint(); |
|
543 } else if (qstrcmp(name, "yuvColorSpace") == 0) { |
|
544 return qVariantFromValue(d->yuvColorSpace); |
|
545 } else { |
|
546 int id = 0; |
|
547 for (; id < d->propertyNames.count() && d->propertyNames.at(id) != name; ++id) {} |
|
548 |
|
549 return id < d->propertyValues.count() |
|
550 ? d->propertyValues.at(id) |
|
551 : QVariant(); |
|
552 } |
|
553 } |
|
554 |
|
555 /*! |
|
556 Sets the video format's \a name property to \a value. |
|
557 */ |
|
558 |
|
559 void QVideoSurfaceFormat::setProperty(const char *name, const QVariant &value) |
|
560 { |
|
561 if (qstrcmp(name, "handleType") == 0) { |
|
562 // read only. |
|
563 } else if (qstrcmp(name, "pixelFormat") == 0) { |
|
564 // read only. |
|
565 } else if (qstrcmp(name, "frameSize") == 0) { |
|
566 if (qVariantCanConvert<QSize>(value)) { |
|
567 d->frameSize = qvariant_cast<QSize>(value); |
|
568 d->viewport = QRect(QPoint(0, 0), d->frameSize); |
|
569 } |
|
570 } else if (qstrcmp(name, "frameWidth") == 0) { |
|
571 // read only. |
|
572 } else if (qstrcmp(name, "frameHeight") == 0) { |
|
573 // read only. |
|
574 } else if (qstrcmp(name, "viewport") == 0) { |
|
575 if (qVariantCanConvert<QRect>(value)) |
|
576 d->viewport = qvariant_cast<QRect>(value); |
|
577 } else if (qstrcmp(name, "scanLineDirection") == 0) { |
|
578 if (qVariantCanConvert<Direction>(value)) |
|
579 d->scanLineDirection = qvariant_cast<Direction>(value); |
|
580 } else if (qstrcmp(name, "frameRate") == 0) { |
|
581 if (qVariantCanConvert<qreal>(value)) |
|
582 d->frameRate = qvariant_cast<qreal>(value); |
|
583 } else if (qstrcmp(name, "pixelAspectRatio") == 0) { |
|
584 if (qVariantCanConvert<QSize>(value)) |
|
585 d->pixelAspectRatio = qvariant_cast<QSize>(value); |
|
586 } else if (qstrcmp(name, "sizeHint") == 0) { |
|
587 // read only. |
|
588 } else if (qstrcmp(name, "yuvColorSpace") == 0) { |
|
589 if (qVariantCanConvert<YuvColorSpace>(value)) |
|
590 d->yuvColorSpace = qvariant_cast<YuvColorSpace>(value); |
|
591 } else { |
|
592 int id = 0; |
|
593 for (; id < d->propertyNames.count() && d->propertyNames.at(id) != name; ++id) {} |
|
594 |
|
595 if (id < d->propertyValues.count()) { |
|
596 if (value.isNull()) { |
|
597 d->propertyNames.removeAt(id); |
|
598 d->propertyValues.removeAt(id); |
|
599 } else { |
|
600 d->propertyValues[id] = value; |
|
601 } |
|
602 } else if (!value.isNull()) { |
|
603 d->propertyNames.append(QByteArray(name)); |
|
604 d->propertyValues.append(value); |
|
605 } |
|
606 } |
|
607 } |
|
608 |
|
609 |
|
610 #ifndef QT_NO_DEBUG_STREAM |
|
611 QDebug operator<<(QDebug dbg, const QVideoSurfaceFormat &f) |
|
612 { |
|
613 QString typeName; |
|
614 switch (f.pixelFormat()) { |
|
615 case QVideoFrame::Format_Invalid: |
|
616 typeName = QLatin1String("Format_Invalid"); |
|
617 break; |
|
618 case QVideoFrame::Format_ARGB32: |
|
619 typeName = QLatin1String("Format_ARGB32"); |
|
620 break; |
|
621 case QVideoFrame::Format_ARGB32_Premultiplied: |
|
622 typeName = QLatin1String("Format_ARGB32_Premultiplied"); |
|
623 break; |
|
624 case QVideoFrame::Format_RGB32: |
|
625 typeName = QLatin1String("Format_RGB32"); |
|
626 break; |
|
627 case QVideoFrame::Format_RGB24: |
|
628 typeName = QLatin1String("Format_RGB24"); |
|
629 break; |
|
630 case QVideoFrame::Format_RGB565: |
|
631 typeName = QLatin1String("Format_RGB565"); |
|
632 break; |
|
633 case QVideoFrame::Format_RGB555: |
|
634 typeName = QLatin1String("Format_RGB555"); |
|
635 break; |
|
636 case QVideoFrame::Format_ARGB8565_Premultiplied: |
|
637 typeName = QLatin1String("Format_ARGB8565_Premultiplied"); |
|
638 break; |
|
639 case QVideoFrame::Format_BGRA32: |
|
640 typeName = QLatin1String("Format_BGRA32"); |
|
641 break; |
|
642 case QVideoFrame::Format_BGRA32_Premultiplied: |
|
643 typeName = QLatin1String("Format_BGRA32_Premultiplied"); |
|
644 break; |
|
645 case QVideoFrame::Format_BGR32: |
|
646 typeName = QLatin1String("Format_BGR32"); |
|
647 break; |
|
648 case QVideoFrame::Format_BGR24: |
|
649 typeName = QLatin1String("Format_BGR24"); |
|
650 break; |
|
651 case QVideoFrame::Format_BGR565: |
|
652 typeName = QLatin1String("Format_BGR565"); |
|
653 break; |
|
654 case QVideoFrame::Format_BGR555: |
|
655 typeName = QLatin1String("Format_BGR555"); |
|
656 break; |
|
657 case QVideoFrame::Format_BGRA5658_Premultiplied: |
|
658 typeName = QLatin1String("Format_BGRA5658_Premultiplied"); |
|
659 break; |
|
660 case QVideoFrame::Format_AYUV444: |
|
661 typeName = QLatin1String("Format_AYUV444"); |
|
662 break; |
|
663 case QVideoFrame::Format_AYUV444_Premultiplied: |
|
664 typeName = QLatin1String("Format_AYUV444_Premultiplied"); |
|
665 break; |
|
666 case QVideoFrame::Format_YUV444: |
|
667 typeName = QLatin1String("Format_YUV444"); |
|
668 break; |
|
669 case QVideoFrame::Format_YUV420P: |
|
670 typeName = QLatin1String("Format_YUV420P"); |
|
671 break; |
|
672 case QVideoFrame::Format_YV12: |
|
673 typeName = QLatin1String("Format_YV12"); |
|
674 break; |
|
675 case QVideoFrame::Format_UYVY: |
|
676 typeName = QLatin1String("Format_UYVY"); |
|
677 break; |
|
678 case QVideoFrame::Format_YUYV: |
|
679 typeName = QLatin1String("Format_YUYV"); |
|
680 break; |
|
681 case QVideoFrame::Format_NV12: |
|
682 typeName = QLatin1String("Format_NV12"); |
|
683 break; |
|
684 case QVideoFrame::Format_NV21: |
|
685 typeName = QLatin1String("Format_NV21"); |
|
686 break; |
|
687 case QVideoFrame::Format_IMC1: |
|
688 typeName = QLatin1String("Format_IMC1"); |
|
689 break; |
|
690 case QVideoFrame::Format_IMC2: |
|
691 typeName = QLatin1String("Format_IMC2"); |
|
692 break; |
|
693 case QVideoFrame::Format_IMC3: |
|
694 typeName = QLatin1String("Format_IMC3"); |
|
695 break; |
|
696 case QVideoFrame::Format_IMC4: |
|
697 typeName = QLatin1String("Format_IMC4"); |
|
698 break; |
|
699 case QVideoFrame::Format_Y8: |
|
700 typeName = QLatin1String("Format_Y8"); |
|
701 break; |
|
702 case QVideoFrame::Format_Y16: |
|
703 typeName = QLatin1String("Format_Y16"); |
|
704 default: |
|
705 typeName = QString(QLatin1String("UserType(%1)" )).arg(int(f.pixelFormat())); |
|
706 } |
|
707 |
|
708 dbg.nospace() << "QVideoSurfaceFormat(" << typeName; |
|
709 dbg.nospace() << ", " << f.frameSize(); |
|
710 dbg.nospace() << ", viewport=" << f.viewport(); |
|
711 dbg.nospace() << ", pixelAspectRatio=" << f.pixelAspectRatio(); |
|
712 dbg.nospace() << ")"; |
|
713 |
|
714 foreach(const QByteArray& propertyName, f.propertyNames()) |
|
715 dbg << "\n " << propertyName.data() << " = " << f.property(propertyName.data()); |
|
716 |
|
717 return dbg.space(); |
|
718 } |
|
719 #endif |
|
720 |
|
721 QT_END_NAMESPACE |