|
1 /* GStreamer |
|
2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu> |
|
3 * This file: |
|
4 * Copyright (c) 2002-2004 Ronald Bultje <rbultje@ronald.bitfreak.net> |
|
5 * |
|
6 * This library is free software; you can redistribute it and/or |
|
7 * modify it under the terms of the GNU Library General Public |
|
8 * License as published by the Free Software Foundation; either |
|
9 * version 2 of the License, or (at your option) any later version. |
|
10 * |
|
11 * This library is distributed in the hope that it will be useful, |
|
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
|
14 * Library General Public License for more details. |
|
15 * |
|
16 * You should have received a copy of the GNU Library General Public |
|
17 * License along with this library; if not, write to the |
|
18 * Free Software Foundation, Inc., 59 Temple Place - Suite 330, |
|
19 * Boston, MA 02111-1307, USA. |
|
20 */ |
|
21 |
|
22 #ifdef HAVE_CONFIG_H |
|
23 #include "config.h" |
|
24 #endif |
|
25 |
|
26 #include <string.h> |
|
27 |
|
28 #include <gst/gst.h> |
|
29 |
|
30 #include "avcodec.h" |
|
31 #include "gstffmpegcodecmap.h" |
|
32 |
|
33 static GstCaps * |
|
34 gst_ff_vid_caps_new (AVCodecContext * context, |
|
35 const char *mimetype, const char *fieldname, ...) |
|
36 G_GNUC_NULL_TERMINATED; |
|
37 static GstCaps *gst_ff_aud_caps_new (AVCodecContext * context, |
|
38 const char *mimetype, const char *fieldname, ...) G_GNUC_NULL_TERMINATED; |
|
39 |
|
40 /* |
|
41 * Read a palette from a caps. |
|
42 */ |
|
43 |
|
44 static void |
|
45 gst_ffmpeg_get_palette (const GstCaps * caps, AVCodecContext * context) |
|
46 { |
|
47 GstStructure *str = gst_caps_get_structure (caps, 0); |
|
48 const GValue *palette_v; |
|
49 |
|
50 /* do we have a palette? */ |
|
51 if ((palette_v = gst_structure_get_value (str, "palette_data")) && context) { |
|
52 const GstBuffer *palette; |
|
53 |
|
54 palette = gst_value_get_buffer (palette_v); |
|
55 if (palette && GST_BUFFER_SIZE (palette) >= 256 * 4) { |
|
56 if (context->palctrl) |
|
57 av_free (context->palctrl); |
|
58 context->palctrl = av_malloc (sizeof (AVPaletteControl)); |
|
59 context->palctrl->palette_changed = 1; |
|
60 memcpy (context->palctrl->palette, GST_BUFFER_DATA (palette), |
|
61 AVPALETTE_SIZE); |
|
62 } |
|
63 } |
|
64 } |
|
65 |
|
66 static void |
|
67 gst_ffmpeg_set_palette (GstCaps * caps, AVCodecContext * context) |
|
68 { |
|
69 if (context->palctrl) { |
|
70 GstBuffer *palette = gst_buffer_new_and_alloc (256 * 4); |
|
71 |
|
72 memcpy (GST_BUFFER_DATA (palette), context->palctrl->palette, |
|
73 AVPALETTE_SIZE); |
|
74 gst_caps_set_simple (caps, "palette_data", GST_TYPE_BUFFER, palette, NULL); |
|
75 gst_buffer_unref (palette); |
|
76 } |
|
77 } |
|
78 |
|
79 /* this function creates caps with fixed or unfixed width/height |
|
80 * properties depending on whether we've got a context. |
|
81 * |
|
82 * See below for why we use this. |
|
83 * |
|
84 * We should actually do this stuff at the end, like in riff-media.c, |
|
85 * but I'm too lazy today. Maybe later. |
|
86 */ |
|
87 |
|
88 static GstCaps * |
|
89 gst_ff_vid_caps_new (AVCodecContext * context, const char *mimetype, |
|
90 const char *fieldname, ...) |
|
91 { |
|
92 GstStructure *structure = NULL; |
|
93 GstCaps *caps = NULL; |
|
94 va_list var_args; |
|
95 |
|
96 if (context != NULL) { |
|
97 caps = gst_caps_new_simple (mimetype, |
|
98 "width", G_TYPE_INT, context->width, |
|
99 "height", G_TYPE_INT, context->height, |
|
100 "framerate", GST_TYPE_FRACTION, |
|
101 (gint) context->frame_rate, (gint) context->frame_rate_base, NULL); |
|
102 } else { |
|
103 caps = gst_caps_new_simple (mimetype, |
|
104 "width", GST_TYPE_INT_RANGE, 1, G_MAXINT, |
|
105 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, |
|
106 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); |
|
107 } |
|
108 |
|
109 structure = gst_caps_get_structure (caps, 0); |
|
110 |
|
111 if (structure) { |
|
112 va_start (var_args, fieldname); |
|
113 gst_structure_set_valist (structure, fieldname, var_args); |
|
114 va_end (var_args); |
|
115 } |
|
116 |
|
117 return caps; |
|
118 } |
|
119 |
|
120 /* same for audio - now with channels/sample rate |
|
121 */ |
|
122 |
|
123 static GstCaps * |
|
124 gst_ff_aud_caps_new (AVCodecContext * context, const char *mimetype, |
|
125 const char *fieldname, ...) |
|
126 { |
|
127 GstCaps *caps = NULL; |
|
128 GstStructure *structure = NULL; |
|
129 va_list var_args; |
|
130 |
|
131 if (context != NULL) { |
|
132 caps = gst_caps_new_simple (mimetype, |
|
133 "rate", G_TYPE_INT, context->sample_rate, |
|
134 "channels", G_TYPE_INT, context->channels, NULL); |
|
135 } else { |
|
136 caps = gst_caps_new_simple (mimetype, NULL); |
|
137 } |
|
138 |
|
139 structure = gst_caps_get_structure (caps, 0); |
|
140 |
|
141 if (structure) { |
|
142 va_start (var_args, fieldname); |
|
143 gst_structure_set_valist (structure, fieldname, var_args); |
|
144 va_end (var_args); |
|
145 } |
|
146 |
|
147 return caps; |
|
148 } |
|
149 |
|
150 /* Convert a FFMPEG Pixel Format and optional AVCodecContext |
|
151 * to a GstCaps. If the context is ommitted, no fixed values |
|
152 * for video/audio size will be included in the GstCaps |
|
153 * |
|
154 * See below for usefulness |
|
155 */ |
|
156 |
|
157 static GstCaps * |
|
158 gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, AVCodecContext * context) |
|
159 { |
|
160 GstCaps *caps = NULL; |
|
161 |
|
162 int bpp = 0, depth = 0, endianness = 0; |
|
163 gulong g_mask = 0, r_mask = 0, b_mask = 0, a_mask = 0; |
|
164 guint32 fmt = 0; |
|
165 |
|
166 switch (pix_fmt) { |
|
167 case PIX_FMT_YUV420P: |
|
168 fmt = GST_MAKE_FOURCC ('I', '4', '2', '0'); |
|
169 break; |
|
170 case PIX_FMT_YVU420P: |
|
171 fmt = GST_MAKE_FOURCC ('Y', 'V', '1', '2'); |
|
172 break; |
|
173 case PIX_FMT_YUV422: |
|
174 fmt = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'); |
|
175 break; |
|
176 case PIX_FMT_UYVY422: |
|
177 fmt = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'); |
|
178 break; |
|
179 case PIX_FMT_UYVY411: |
|
180 fmt = GST_MAKE_FOURCC ('I', 'Y', 'U', '1'); |
|
181 break; |
|
182 case PIX_FMT_RGB24: |
|
183 bpp = depth = 24; |
|
184 endianness = G_BIG_ENDIAN; |
|
185 r_mask = 0xff0000; |
|
186 g_mask = 0x00ff00; |
|
187 b_mask = 0x0000ff; |
|
188 break; |
|
189 case PIX_FMT_BGR24: |
|
190 bpp = depth = 24; |
|
191 endianness = G_BIG_ENDIAN; |
|
192 r_mask = 0x0000ff; |
|
193 g_mask = 0x00ff00; |
|
194 b_mask = 0xff0000; |
|
195 break; |
|
196 case PIX_FMT_YUV422P: |
|
197 fmt = GST_MAKE_FOURCC ('Y', '4', '2', 'B'); |
|
198 break; |
|
199 case PIX_FMT_YUV444P: |
|
200 fmt = GST_MAKE_FOURCC ('Y', '4', '4', '4'); |
|
201 break; |
|
202 case PIX_FMT_RGB32: |
|
203 bpp = 32; |
|
204 depth = 24; |
|
205 endianness = G_BIG_ENDIAN; |
|
206 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
207 r_mask = 0x00ff0000; |
|
208 g_mask = 0x0000ff00; |
|
209 b_mask = 0x000000ff; |
|
210 #else |
|
211 r_mask = 0x0000ff00; |
|
212 g_mask = 0x00ff0000; |
|
213 b_mask = 0xff000000; |
|
214 #endif |
|
215 break; |
|
216 case PIX_FMT_BGR32: |
|
217 bpp = 32; |
|
218 depth = 24; |
|
219 endianness = G_BIG_ENDIAN; |
|
220 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
221 r_mask = 0x0000ff00; |
|
222 g_mask = 0x00ff0000; |
|
223 b_mask = 0xff000000; |
|
224 #else |
|
225 r_mask = 0x00ff0000; |
|
226 g_mask = 0x0000ff00; |
|
227 b_mask = 0x000000ff; |
|
228 #endif |
|
229 break; |
|
230 case PIX_FMT_xRGB32: |
|
231 bpp = 32; |
|
232 depth = 24; |
|
233 endianness = G_BIG_ENDIAN; |
|
234 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
235 r_mask = 0xff000000; |
|
236 g_mask = 0x00ff0000; |
|
237 b_mask = 0x0000ff00; |
|
238 #else |
|
239 r_mask = 0x000000ff; |
|
240 g_mask = 0x0000ff00; |
|
241 b_mask = 0x00ff0000; |
|
242 #endif |
|
243 break; |
|
244 case PIX_FMT_BGRx32: |
|
245 bpp = 32; |
|
246 depth = 24; |
|
247 endianness = G_BIG_ENDIAN; |
|
248 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
249 r_mask = 0x000000ff; |
|
250 g_mask = 0x0000ff00; |
|
251 b_mask = 0x00ff0000; |
|
252 #else |
|
253 r_mask = 0xff000000; |
|
254 g_mask = 0x00ff0000; |
|
255 b_mask = 0x0000ff00; |
|
256 #endif |
|
257 break; |
|
258 case PIX_FMT_RGBA32: |
|
259 bpp = 32; |
|
260 depth = 32; |
|
261 endianness = G_BIG_ENDIAN; |
|
262 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
263 r_mask = 0x00ff0000; |
|
264 g_mask = 0x0000ff00; |
|
265 b_mask = 0x000000ff; |
|
266 a_mask = 0xff000000; |
|
267 #else |
|
268 r_mask = 0x0000ff00; |
|
269 g_mask = 0x00ff0000; |
|
270 b_mask = 0xff000000; |
|
271 a_mask = 0x000000ff; |
|
272 #endif |
|
273 break; |
|
274 case PIX_FMT_BGRA32: |
|
275 bpp = 32; |
|
276 depth = 32; |
|
277 endianness = G_BIG_ENDIAN; |
|
278 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
279 r_mask = 0x0000ff00; |
|
280 g_mask = 0x00ff0000; |
|
281 b_mask = 0xff000000; |
|
282 a_mask = 0x000000ff; |
|
283 #else |
|
284 r_mask = 0x00ff0000; |
|
285 g_mask = 0x0000ff00; |
|
286 b_mask = 0x000000ff; |
|
287 a_mask = 0xff000000; |
|
288 #endif |
|
289 break; |
|
290 case PIX_FMT_ARGB32: |
|
291 bpp = 32; |
|
292 depth = 32; |
|
293 endianness = G_BIG_ENDIAN; |
|
294 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
295 r_mask = 0xff000000; |
|
296 g_mask = 0x00ff0000; |
|
297 b_mask = 0x0000ff00; |
|
298 a_mask = 0x000000ff; |
|
299 #else |
|
300 r_mask = 0x000000ff; |
|
301 g_mask = 0x0000ff00; |
|
302 b_mask = 0x00ff0000; |
|
303 a_mask = 0xff000000; |
|
304 #endif |
|
305 break; |
|
306 case PIX_FMT_ABGR32: |
|
307 bpp = 32; |
|
308 depth = 32; |
|
309 endianness = G_BIG_ENDIAN; |
|
310 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
311 r_mask = 0x000000ff; |
|
312 g_mask = 0x0000ff00; |
|
313 b_mask = 0x00ff0000; |
|
314 a_mask = 0xff000000; |
|
315 #else |
|
316 r_mask = 0xff000000; |
|
317 g_mask = 0x00ff0000; |
|
318 b_mask = 0x0000ff00; |
|
319 a_mask = 0x000000ff; |
|
320 #endif |
|
321 break; |
|
322 case PIX_FMT_YUV410P: |
|
323 fmt = GST_MAKE_FOURCC ('Y', 'U', 'V', '9'); |
|
324 break; |
|
325 case PIX_FMT_YVU410P: |
|
326 fmt = GST_MAKE_FOURCC ('Y', 'V', 'U', '9'); |
|
327 break; |
|
328 case PIX_FMT_YUV411P: |
|
329 fmt = GST_MAKE_FOURCC ('Y', '4', '1', 'B'); |
|
330 break; |
|
331 case PIX_FMT_RGB565: |
|
332 bpp = depth = 16; |
|
333 endianness = G_BYTE_ORDER; |
|
334 r_mask = 0xf800; |
|
335 g_mask = 0x07e0; |
|
336 b_mask = 0x001f; |
|
337 break; |
|
338 case PIX_FMT_RGB555: |
|
339 bpp = 16; |
|
340 depth = 15; |
|
341 endianness = G_BYTE_ORDER; |
|
342 r_mask = 0x7c00; |
|
343 g_mask = 0x03e0; |
|
344 b_mask = 0x001f; |
|
345 break; |
|
346 case PIX_FMT_PAL8: |
|
347 bpp = depth = 8; |
|
348 endianness = G_BYTE_ORDER; |
|
349 break; |
|
350 case PIX_FMT_AYUV4444: |
|
351 fmt = GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'); |
|
352 break; |
|
353 case PIX_FMT_GRAY8: |
|
354 bpp = depth = 8; |
|
355 caps = gst_ff_vid_caps_new (context, "video/x-raw-gray", |
|
356 "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, depth, NULL); |
|
357 break; |
|
358 default: |
|
359 /* give up ... */ |
|
360 break; |
|
361 } |
|
362 |
|
363 if (caps == NULL) { |
|
364 if (bpp != 0) { |
|
365 if (a_mask != 0) { |
|
366 caps = gst_ff_vid_caps_new (context, "video/x-raw-rgb", |
|
367 "bpp", G_TYPE_INT, bpp, |
|
368 "depth", G_TYPE_INT, depth, |
|
369 "red_mask", G_TYPE_INT, r_mask, |
|
370 "green_mask", G_TYPE_INT, g_mask, |
|
371 "blue_mask", G_TYPE_INT, b_mask, |
|
372 "alpha_mask", G_TYPE_INT, a_mask, |
|
373 "endianness", G_TYPE_INT, endianness, NULL); |
|
374 } else if (r_mask != 0) { |
|
375 caps = gst_ff_vid_caps_new (context, "video/x-raw-rgb", |
|
376 "bpp", G_TYPE_INT, bpp, |
|
377 "depth", G_TYPE_INT, depth, |
|
378 "red_mask", G_TYPE_INT, r_mask, |
|
379 "green_mask", G_TYPE_INT, g_mask, |
|
380 "blue_mask", G_TYPE_INT, b_mask, |
|
381 "endianness", G_TYPE_INT, endianness, NULL); |
|
382 } else { |
|
383 caps = gst_ff_vid_caps_new (context, "video/x-raw-rgb", |
|
384 "bpp", G_TYPE_INT, bpp, |
|
385 "depth", G_TYPE_INT, depth, |
|
386 "endianness", G_TYPE_INT, endianness, NULL); |
|
387 if (context) { |
|
388 gst_ffmpeg_set_palette (caps, context); |
|
389 } |
|
390 } |
|
391 } else if (fmt) { |
|
392 caps = gst_ff_vid_caps_new (context, "video/x-raw-yuv", |
|
393 "format", GST_TYPE_FOURCC, fmt, NULL); |
|
394 } |
|
395 } |
|
396 |
|
397 if (caps != NULL) { |
|
398 char *str = gst_caps_to_string (caps); |
|
399 |
|
400 GST_DEBUG ("caps for pix_fmt=%d: %s", pix_fmt, str); |
|
401 g_free (str); |
|
402 } else { |
|
403 GST_LOG ("No caps found for pix_fmt=%d", pix_fmt); |
|
404 } |
|
405 |
|
406 return caps; |
|
407 } |
|
408 |
|
409 /* Convert a FFMPEG Sample Format and optional AVCodecContext |
|
410 * to a GstCaps. If the context is ommitted, no fixed values |
|
411 * for video/audio size will be included in the GstCaps |
|
412 * |
|
413 * See below for usefulness |
|
414 */ |
|
415 |
|
416 static GstCaps * |
|
417 gst_ffmpeg_smpfmt_to_caps (enum SampleFormat sample_fmt, |
|
418 AVCodecContext * context) |
|
419 { |
|
420 GstCaps *caps = NULL; |
|
421 |
|
422 int bpp = 0; |
|
423 gboolean signedness = FALSE; |
|
424 |
|
425 switch (sample_fmt) { |
|
426 case SAMPLE_FMT_S16: |
|
427 signedness = TRUE; |
|
428 bpp = 16; |
|
429 break; |
|
430 |
|
431 default: |
|
432 /* .. */ |
|
433 break; |
|
434 } |
|
435 |
|
436 if (bpp) { |
|
437 caps = gst_ff_aud_caps_new (context, "audio/x-raw-int", |
|
438 "signed", G_TYPE_BOOLEAN, signedness, |
|
439 "endianness", G_TYPE_INT, G_BYTE_ORDER, |
|
440 "width", G_TYPE_INT, bpp, "depth", G_TYPE_INT, bpp, NULL); |
|
441 } |
|
442 |
|
443 if (caps != NULL) { |
|
444 char *str = gst_caps_to_string (caps); |
|
445 |
|
446 GST_DEBUG ("caps for sample_fmt=%d: %s", sample_fmt, str); |
|
447 g_free (str); |
|
448 } else { |
|
449 GST_LOG ("No caps found for sample_fmt=%d", sample_fmt); |
|
450 } |
|
451 |
|
452 return caps; |
|
453 } |
|
454 |
|
455 /* Convert a FFMPEG codec Type and optional AVCodecContext |
|
456 * to a GstCaps. If the context is ommitted, no fixed values |
|
457 * for video/audio size will be included in the GstCaps |
|
458 * |
|
459 * CodecType is primarily meant for uncompressed data GstCaps! |
|
460 */ |
|
461 #ifdef __SYMBIAN32__ |
|
462 EXPORT_C |
|
463 #endif |
|
464 |
|
465 |
|
466 GstCaps * |
|
467 gst_ffmpegcsp_codectype_to_caps (enum CodecType codec_type, |
|
468 AVCodecContext * context) |
|
469 { |
|
470 GstCaps *caps; |
|
471 |
|
472 switch (codec_type) { |
|
473 case CODEC_TYPE_VIDEO: |
|
474 if (context) { |
|
475 caps = gst_ffmpeg_pixfmt_to_caps (context->pix_fmt, |
|
476 context->width == -1 ? NULL : context); |
|
477 } else { |
|
478 GstCaps *temp; |
|
479 enum PixelFormat i; |
|
480 |
|
481 caps = gst_caps_new_empty (); |
|
482 for (i = 0; i < PIX_FMT_NB; i++) { |
|
483 temp = gst_ffmpeg_pixfmt_to_caps (i, NULL); |
|
484 if (temp != NULL) { |
|
485 gst_caps_append (caps, temp); |
|
486 } |
|
487 } |
|
488 } |
|
489 break; |
|
490 |
|
491 case CODEC_TYPE_AUDIO: |
|
492 if (context) { |
|
493 caps = gst_ffmpeg_smpfmt_to_caps (context->sample_fmt, context); |
|
494 } else { |
|
495 GstCaps *temp; |
|
496 enum SampleFormat i; |
|
497 |
|
498 caps = gst_caps_new_empty (); |
|
499 for (i = 0; i <= SAMPLE_FMT_S16; i++) { |
|
500 temp = gst_ffmpeg_smpfmt_to_caps (i, NULL); |
|
501 if (temp != NULL) { |
|
502 gst_caps_append (caps, temp); |
|
503 } |
|
504 } |
|
505 } |
|
506 break; |
|
507 |
|
508 default: |
|
509 /* .. */ |
|
510 caps = NULL; |
|
511 break; |
|
512 } |
|
513 |
|
514 return caps; |
|
515 } |
|
516 |
|
517 /* Convert a GstCaps (audio/raw) to a FFMPEG SampleFmt |
|
518 * and other audio properties in a AVCodecContext. |
|
519 * |
|
520 * For usefulness, see below |
|
521 */ |
|
522 |
|
523 static void |
|
524 gst_ffmpeg_caps_to_smpfmt (const GstCaps * caps, |
|
525 AVCodecContext * context, gboolean raw) |
|
526 { |
|
527 GstStructure *structure; |
|
528 gint depth = 0, width = 0, endianness = 0; |
|
529 gboolean signedness = FALSE; |
|
530 |
|
531 g_return_if_fail (gst_caps_get_size (caps) == 1); |
|
532 structure = gst_caps_get_structure (caps, 0); |
|
533 |
|
534 gst_structure_get_int (structure, "channels", &context->channels); |
|
535 gst_structure_get_int (structure, "rate", &context->sample_rate); |
|
536 |
|
537 if (!raw) |
|
538 return; |
|
539 |
|
540 if (gst_structure_get_int (structure, "width", &width) && |
|
541 gst_structure_get_int (structure, "depth", &depth) && |
|
542 gst_structure_get_boolean (structure, "signed", &signedness) && |
|
543 gst_structure_get_int (structure, "endianness", &endianness)) { |
|
544 if (width == 16 && depth == 16 && |
|
545 endianness == G_BYTE_ORDER && signedness == TRUE) { |
|
546 context->sample_fmt = SAMPLE_FMT_S16; |
|
547 } |
|
548 } |
|
549 } |
|
550 |
|
551 |
|
552 /* Convert a GstCaps (video/raw) to a FFMPEG PixFmt |
|
553 * and other video properties in a AVCodecContext. |
|
554 * |
|
555 * For usefulness, see below |
|
556 */ |
|
557 |
|
558 static void |
|
559 gst_ffmpeg_caps_to_pixfmt (const GstCaps * caps, |
|
560 AVCodecContext * context, gboolean raw) |
|
561 { |
|
562 GstStructure *structure; |
|
563 const GValue *fps; |
|
564 gboolean ret; |
|
565 |
|
566 g_return_if_fail (gst_caps_get_size (caps) == 1); |
|
567 structure = gst_caps_get_structure (caps, 0); |
|
568 |
|
569 ret = gst_structure_get_int (structure, "width", &context->width); |
|
570 ret &= gst_structure_get_int (structure, "height", &context->height); |
|
571 g_return_if_fail (ret == TRUE); |
|
572 |
|
573 fps = gst_structure_get_value (structure, "framerate"); |
|
574 g_return_if_fail (GST_VALUE_HOLDS_FRACTION (fps)); |
|
575 |
|
576 /* framerate does not really matter */ |
|
577 context->frame_rate = gst_value_get_fraction_numerator (fps); |
|
578 context->frame_rate_base = gst_value_get_fraction_denominator (fps); |
|
579 |
|
580 if (!raw) |
|
581 return; |
|
582 |
|
583 if (gst_structure_has_name (structure, "video/x-raw-yuv")) { |
|
584 guint32 fourcc; |
|
585 |
|
586 if (gst_structure_get_fourcc (structure, "format", &fourcc)) { |
|
587 switch (fourcc) { |
|
588 case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): |
|
589 context->pix_fmt = PIX_FMT_YUV422; |
|
590 break; |
|
591 case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'): |
|
592 context->pix_fmt = PIX_FMT_UYVY422; |
|
593 break; |
|
594 case GST_MAKE_FOURCC ('I', 'Y', 'U', '1'): |
|
595 context->pix_fmt = PIX_FMT_UYVY411; |
|
596 break; |
|
597 case GST_MAKE_FOURCC ('I', '4', '2', '0'): |
|
598 context->pix_fmt = PIX_FMT_YUV420P; |
|
599 break; |
|
600 case GST_MAKE_FOURCC ('Y', 'V', '1', '2'): |
|
601 context->pix_fmt = PIX_FMT_YVU420P; |
|
602 break; |
|
603 case GST_MAKE_FOURCC ('Y', '4', '1', 'B'): |
|
604 context->pix_fmt = PIX_FMT_YUV411P; |
|
605 break; |
|
606 case GST_MAKE_FOURCC ('Y', '4', '2', 'B'): |
|
607 context->pix_fmt = PIX_FMT_YUV422P; |
|
608 break; |
|
609 case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'): |
|
610 context->pix_fmt = PIX_FMT_YUV410P; |
|
611 break; |
|
612 case GST_MAKE_FOURCC ('Y', 'V', 'U', '9'): |
|
613 context->pix_fmt = PIX_FMT_YVU410P; |
|
614 break; |
|
615 case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'): |
|
616 context->pix_fmt = PIX_FMT_AYUV4444; |
|
617 break; |
|
618 case GST_MAKE_FOURCC ('Y', '4', '4', '4'): |
|
619 context->pix_fmt = PIX_FMT_YUV444P; |
|
620 break; |
|
621 } |
|
622 } |
|
623 } else if (gst_structure_has_name (structure, "video/x-raw-rgb")) { |
|
624 gint bpp = 0, rmask = 0, endianness = 0, amask = 0, depth = 0; |
|
625 |
|
626 if (gst_structure_get_int (structure, "bpp", &bpp) && |
|
627 gst_structure_get_int (structure, "endianness", &endianness)) { |
|
628 if (gst_structure_get_int (structure, "red_mask", &rmask)) { |
|
629 switch (bpp) { |
|
630 case 32: |
|
631 if (gst_structure_get_int (structure, "alpha_mask", &amask)) { |
|
632 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
633 if (rmask == 0x0000ff00) |
|
634 context->pix_fmt = PIX_FMT_BGRA32; |
|
635 else if (rmask == 0x00ff0000) |
|
636 context->pix_fmt = PIX_FMT_RGBA32; |
|
637 else if (rmask == 0xff000000) |
|
638 context->pix_fmt = PIX_FMT_ARGB32; |
|
639 else // if (r_mask = 0x000000ff) |
|
640 context->pix_fmt = PIX_FMT_ABGR32; |
|
641 #else |
|
642 if (rmask == 0x00ff0000) |
|
643 context->pix_fmt = PIX_FMT_BGRA32; |
|
644 else if (rmask == 0x0000ff00) |
|
645 context->pix_fmt = PIX_FMT_RGBA32; |
|
646 else if (rmask == 0x000000ff) |
|
647 context->pix_fmt = PIX_FMT_ARGB32; |
|
648 else // if (rmask == 0xff000000) |
|
649 context->pix_fmt = PIX_FMT_ABGR32; |
|
650 #endif |
|
651 } else { |
|
652 #if (G_BYTE_ORDER == G_BIG_ENDIAN) |
|
653 if (rmask == 0x00ff0000) |
|
654 context->pix_fmt = PIX_FMT_RGB32; |
|
655 else if (rmask == 0x0000ff00) |
|
656 context->pix_fmt = PIX_FMT_BGR32; |
|
657 else if (rmask == 0xff000000) |
|
658 context->pix_fmt = PIX_FMT_xRGB32; |
|
659 else // if (rmask == 0x000000ff) |
|
660 context->pix_fmt = PIX_FMT_BGRx32; |
|
661 #else |
|
662 if (rmask == 0x0000ff00) |
|
663 context->pix_fmt = PIX_FMT_RGB32; |
|
664 else if (rmask == 0x00ff0000) |
|
665 context->pix_fmt = PIX_FMT_BGR32; |
|
666 else if (rmask == 0x000000ff) |
|
667 context->pix_fmt = PIX_FMT_xRGB32; |
|
668 else // if (rmask == 0xff000000) |
|
669 context->pix_fmt = PIX_FMT_BGRx32; |
|
670 #endif |
|
671 } |
|
672 break; |
|
673 case 24: |
|
674 if (rmask == 0x0000FF) |
|
675 context->pix_fmt = PIX_FMT_BGR24; |
|
676 else |
|
677 context->pix_fmt = PIX_FMT_RGB24; |
|
678 break; |
|
679 case 16: |
|
680 if (endianness == G_BYTE_ORDER) { |
|
681 context->pix_fmt = PIX_FMT_RGB565; |
|
682 if (gst_structure_get_int (structure, "depth", &depth)) { |
|
683 if (depth == 15) |
|
684 context->pix_fmt = PIX_FMT_RGB555; |
|
685 } |
|
686 } |
|
687 break; |
|
688 case 15: |
|
689 if (endianness == G_BYTE_ORDER) |
|
690 context->pix_fmt = PIX_FMT_RGB555; |
|
691 break; |
|
692 default: |
|
693 /* nothing */ |
|
694 break; |
|
695 } |
|
696 } else { |
|
697 if (bpp == 8) { |
|
698 context->pix_fmt = PIX_FMT_PAL8; |
|
699 gst_ffmpeg_get_palette (caps, context); |
|
700 } |
|
701 } |
|
702 } |
|
703 } else if (gst_structure_has_name (structure, "video/x-raw-gray")) { |
|
704 gint bpp = 0; |
|
705 |
|
706 if (gst_structure_get_int (structure, "bpp", &bpp)) { |
|
707 switch (bpp) { |
|
708 case 8: |
|
709 context->pix_fmt = PIX_FMT_GRAY8; |
|
710 break; |
|
711 } |
|
712 } |
|
713 } |
|
714 } |
|
715 |
|
716 /* Convert a GstCaps and a FFMPEG codec Type to a |
|
717 * AVCodecContext. If the context is ommitted, no fixed values |
|
718 * for video/audio size will be included in the context |
|
719 * |
|
720 * CodecType is primarily meant for uncompressed data GstCaps! |
|
721 */ |
|
722 #ifdef __SYMBIAN32__ |
|
723 EXPORT_C |
|
724 #endif |
|
725 |
|
726 |
|
727 void |
|
728 gst_ffmpegcsp_caps_with_codectype (enum CodecType type, |
|
729 const GstCaps * caps, AVCodecContext * context) |
|
730 { |
|
731 if (context == NULL) |
|
732 return; |
|
733 |
|
734 switch (type) { |
|
735 case CODEC_TYPE_VIDEO: |
|
736 gst_ffmpeg_caps_to_pixfmt (caps, context, TRUE); |
|
737 break; |
|
738 |
|
739 case CODEC_TYPE_AUDIO: |
|
740 gst_ffmpeg_caps_to_smpfmt (caps, context, TRUE); |
|
741 break; |
|
742 |
|
743 default: |
|
744 /* unknown */ |
|
745 break; |
|
746 } |
|
747 } |
|
748 |
|
749 #define GEN_MASK(x) ((1<<(x))-1) |
|
750 #define ROUND_UP_X(v,x) (((v) + GEN_MASK(x)) & ~GEN_MASK(x)) |
|
751 #define DIV_ROUND_UP_X(v,x) (((v) + GEN_MASK(x)) >> (x)) |
|
752 |
|
753 /* |
|
754 * Fill in pointers to memory in a AVPicture, where |
|
755 * everything is aligned by 4 (as required by X). |
|
756 * This is mostly a copy from imgconvert.c with some |
|
757 * small changes. |
|
758 */ |
|
759 #ifdef __SYMBIAN32__ |
|
760 EXPORT_C |
|
761 #endif |
|
762 |
|
763 int |
|
764 gst_ffmpegcsp_avpicture_fill (AVPicture * picture, |
|
765 uint8_t * ptr, enum PixelFormat pix_fmt, int width, int height) |
|
766 { |
|
767 int size, w2, h2, size2; |
|
768 int stride, stride2; |
|
769 PixFmtInfo *pinfo; |
|
770 |
|
771 pinfo = get_pix_fmt_info (pix_fmt); |
|
772 |
|
773 switch (pix_fmt) { |
|
774 case PIX_FMT_YUV420P: |
|
775 case PIX_FMT_YUV422P: |
|
776 case PIX_FMT_YUV444P: |
|
777 case PIX_FMT_YUV410P: |
|
778 case PIX_FMT_YUV411P: |
|
779 case PIX_FMT_YUVJ420P: |
|
780 case PIX_FMT_YUVJ422P: |
|
781 case PIX_FMT_YUVJ444P: |
|
782 stride = GST_ROUND_UP_4 (width); |
|
783 h2 = ROUND_UP_X (height, pinfo->y_chroma_shift); |
|
784 size = stride * h2; |
|
785 w2 = DIV_ROUND_UP_X (width, pinfo->x_chroma_shift); |
|
786 stride2 = GST_ROUND_UP_4 (w2); |
|
787 h2 = DIV_ROUND_UP_X (height, pinfo->y_chroma_shift); |
|
788 size2 = stride2 * h2; |
|
789 picture->data[0] = ptr; |
|
790 picture->data[1] = picture->data[0] + size; |
|
791 picture->data[2] = picture->data[1] + size2; |
|
792 picture->linesize[0] = stride; |
|
793 picture->linesize[1] = stride2; |
|
794 picture->linesize[2] = stride2; |
|
795 return size + 2 * size2; |
|
796 /* PIX_FMT_YVU420P = YV12: same as PIX_FMT_YUV420P, but |
|
797 * with U and V plane swapped. Strides as in videotestsrc */ |
|
798 case PIX_FMT_YVU410P: |
|
799 case PIX_FMT_YVU420P: |
|
800 stride = GST_ROUND_UP_4 (width); |
|
801 h2 = ROUND_UP_X (height, pinfo->y_chroma_shift); |
|
802 size = stride * h2; |
|
803 w2 = DIV_ROUND_UP_X (width, pinfo->x_chroma_shift); |
|
804 stride2 = GST_ROUND_UP_4 (w2); |
|
805 h2 = DIV_ROUND_UP_X (height, pinfo->y_chroma_shift); |
|
806 size2 = stride2 * h2; |
|
807 picture->data[0] = ptr; |
|
808 picture->data[2] = picture->data[0] + size; |
|
809 picture->data[1] = picture->data[2] + size2; |
|
810 picture->linesize[0] = stride; |
|
811 picture->linesize[1] = stride2; |
|
812 picture->linesize[2] = stride2; |
|
813 return size + 2 * size2; |
|
814 case PIX_FMT_RGB24: |
|
815 case PIX_FMT_BGR24: |
|
816 stride = GST_ROUND_UP_4 (width * 3); |
|
817 size = stride * height; |
|
818 picture->data[0] = ptr; |
|
819 picture->data[1] = NULL; |
|
820 picture->data[2] = NULL; |
|
821 picture->linesize[0] = stride; |
|
822 return size; |
|
823 case PIX_FMT_AYUV4444: |
|
824 case PIX_FMT_RGB32: |
|
825 case PIX_FMT_RGBA32: |
|
826 case PIX_FMT_ARGB32: |
|
827 case PIX_FMT_BGR32: |
|
828 case PIX_FMT_BGRA32: |
|
829 case PIX_FMT_ABGR32: |
|
830 case PIX_FMT_xRGB32: |
|
831 case PIX_FMT_BGRx32: |
|
832 stride = width * 4; |
|
833 size = stride * height; |
|
834 picture->data[0] = ptr; |
|
835 picture->data[1] = NULL; |
|
836 picture->data[2] = NULL; |
|
837 picture->linesize[0] = stride; |
|
838 return size; |
|
839 case PIX_FMT_RGB555: |
|
840 case PIX_FMT_RGB565: |
|
841 case PIX_FMT_YUV422: |
|
842 case PIX_FMT_UYVY422: |
|
843 stride = GST_ROUND_UP_4 (width * 2); |
|
844 size = stride * height; |
|
845 picture->data[0] = ptr; |
|
846 picture->data[1] = NULL; |
|
847 picture->data[2] = NULL; |
|
848 picture->linesize[0] = stride; |
|
849 return size; |
|
850 case PIX_FMT_UYVY411: |
|
851 /* FIXME, probably not the right stride */ |
|
852 stride = GST_ROUND_UP_4 (width); |
|
853 size = stride * height; |
|
854 picture->data[0] = ptr; |
|
855 picture->data[1] = NULL; |
|
856 picture->data[2] = NULL; |
|
857 picture->linesize[0] = width + width / 2; |
|
858 return size + size / 2; |
|
859 case PIX_FMT_GRAY8: |
|
860 stride = GST_ROUND_UP_4 (width); |
|
861 size = stride * height; |
|
862 picture->data[0] = ptr; |
|
863 picture->data[1] = NULL; |
|
864 picture->data[2] = NULL; |
|
865 picture->linesize[0] = stride; |
|
866 return size; |
|
867 case PIX_FMT_MONOWHITE: |
|
868 case PIX_FMT_MONOBLACK: |
|
869 stride = GST_ROUND_UP_4 ((width + 7) >> 3); |
|
870 size = stride * height; |
|
871 picture->data[0] = ptr; |
|
872 picture->data[1] = NULL; |
|
873 picture->data[2] = NULL; |
|
874 picture->linesize[0] = stride; |
|
875 return size; |
|
876 case PIX_FMT_PAL8: |
|
877 /* already forced to be with stride, so same result as other function */ |
|
878 stride = GST_ROUND_UP_4 (width); |
|
879 size = stride * height; |
|
880 picture->data[0] = ptr; |
|
881 picture->data[1] = ptr + size; /* palette is stored here as 256 32 bit words */ |
|
882 picture->data[2] = NULL; |
|
883 picture->linesize[0] = stride; |
|
884 picture->linesize[1] = 4; |
|
885 return size + 256 * 4; |
|
886 default: |
|
887 picture->data[0] = NULL; |
|
888 picture->data[1] = NULL; |
|
889 picture->data[2] = NULL; |
|
890 picture->data[3] = NULL; |
|
891 return -1; |
|
892 } |
|
893 |
|
894 return 0; |
|
895 } |