|
1 /* GStreamer |
|
2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu> |
|
3 * Copyright (C) 2005 David Schleef <ds@schleef.org> |
|
4 * |
|
5 * This library is free software; you can redistribute it and/or |
|
6 * modify it under the terms of the GNU Library General Public |
|
7 * License as published by the Free Software Foundation; either |
|
8 * version 2 of the License, or (at your option) any later version. |
|
9 * |
|
10 * This library is distributed in the hope that it will be useful, |
|
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
|
13 * Library General Public License for more details. |
|
14 * |
|
15 * You should have received a copy of the GNU Library General Public |
|
16 * License along with this library; if not, write to the |
|
17 * Free Software Foundation, Inc., 59 Temple Place - Suite 330, |
|
18 * Boston, MA 02111-1307, USA. |
|
19 */ |
|
20 |
|
21 /** |
|
22 * SECTION:element-videoscale |
|
23 * @see_also: videorate, ffmpegcolorspace |
|
24 * |
|
25 * <refsect2> |
|
26 * <para> |
|
27 * This element resizes video frames. By default the element will try to |
|
28 * negotiate to the same size on the source and sinkpad so that no scaling |
|
29 * is needed. It is therefore safe to insert this element in a pipeline to |
|
30 * get more robust behaviour without any cost if no scaling is needed. |
|
31 * </para> |
|
32 * <para> |
|
33 * This element supports a wide range of color spaces including various YUV and |
|
34 * RGB formats and is therefore generally able to operate anywhere in a |
|
35 * pipeline. |
|
36 * </para> |
|
37 * <title>Example pipelines</title> |
|
38 * <para> |
|
39 * <programlisting> |
|
40 * gst-launch -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! ffmpegcolorspace ! videoscale ! ximagesink |
|
41 * </programlisting> |
|
42 * Decode an Ogg/Theora and display the video using ximagesink. Since |
|
43 * ximagesink cannot perform scaling, the video scaling will be performed by |
|
44 * videoscale when you resize the video window. |
|
45 * To create the test Ogg/Theora file refer to the documentation of theoraenc. |
|
46 * </para> |
|
47 * <para> |
|
48 * <programlisting> |
|
49 * gst-launch -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoscale ! video/x-raw-yuv, width=50 ! xvimagesink |
|
50 * </programlisting> |
|
51 * Decode an Ogg/Theora and display the video using xvimagesink with a width of |
|
52 * 50. |
|
53 * </para> |
|
54 * </refsect2> |
|
55 * |
|
56 * Last reviewed on 2006-03-02 (0.10.4) |
|
57 */ |
|
58 |
|
59 #ifdef HAVE_CONFIG_H |
|
60 #include "config.h" |
|
61 #endif |
|
62 |
|
63 #include <string.h> |
|
64 |
|
65 #include <gst/video/video.h> |
|
66 #include <gst/liboil.h> |
|
67 |
|
68 |
|
69 #include "gstvideoscale.h" |
|
70 #include "vs_image.h" |
|
71 #include "vs_4tap.h" |
|
72 |
|
73 #include <glib_global.h> |
|
74 |
|
75 |
|
76 /* debug variable definition */ |
|
77 GST_DEBUG_CATEGORY (video_scale_debug); |
|
78 |
|
79 /* elementfactory information */ |
|
80 static const GstElementDetails video_scale_details = |
|
81 GST_ELEMENT_DETAILS ("Video scaler", |
|
82 "Filter/Effect/Video", |
|
83 "Resizes video", |
|
84 "Wim Taymans <wim.taymans@chello.be>"); |
|
85 |
|
86 #define DEFAULT_PROP_METHOD GST_VIDEO_SCALE_NEAREST |
|
87 |
|
88 enum |
|
89 { |
|
90 PROP_0, |
|
91 PROP_METHOD |
|
92 /* FILL ME */ |
|
93 }; |
|
94 |
|
95 /* can't handle width/height of 1 yet, since we divide a lot by (n-1) */ |
|
96 #undef GST_VIDEO_SIZE_RANGE |
|
97 #define GST_VIDEO_SIZE_RANGE "(int) [ 2, MAX ]" |
|
98 |
|
99 static GstStaticCaps gst_video_scale_format_caps[] = { |
|
100 GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx), |
|
101 GST_STATIC_CAPS (GST_VIDEO_CAPS_xRGB), |
|
102 GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx), |
|
103 GST_STATIC_CAPS (GST_VIDEO_CAPS_xBGR), |
|
104 GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBA), |
|
105 GST_STATIC_CAPS (GST_VIDEO_CAPS_ARGB), |
|
106 GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRA), |
|
107 GST_STATIC_CAPS (GST_VIDEO_CAPS_ABGR), |
|
108 GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB), |
|
109 GST_STATIC_CAPS (GST_VIDEO_CAPS_BGR), |
|
110 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV")), |
|
111 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("YUY2")), |
|
112 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("YVYU")), |
|
113 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("UYVY")), |
|
114 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("Y800")), |
|
115 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")), |
|
116 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("YV12")), |
|
117 GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB_16), |
|
118 GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB_15) |
|
119 }; |
|
120 |
|
121 enum |
|
122 { |
|
123 GST_VIDEO_SCALE_RGBx = 0, |
|
124 GST_VIDEO_SCALE_xRGB, |
|
125 GST_VIDEO_SCALE_BGRx, |
|
126 GST_VIDEO_SCALE_xBGR, |
|
127 GST_VIDEO_SCALE_RGBA, |
|
128 GST_VIDEO_SCALE_ARGB, |
|
129 GST_VIDEO_SCALE_BGRA, |
|
130 GST_VIDEO_SCALE_ABGR, |
|
131 GST_VIDEO_SCALE_RGB, |
|
132 GST_VIDEO_SCALE_BGR, |
|
133 GST_VIDEO_SCALE_AYUV, |
|
134 GST_VIDEO_SCALE_YUY2, |
|
135 GST_VIDEO_SCALE_YVYU, |
|
136 GST_VIDEO_SCALE_UYVY, |
|
137 GST_VIDEO_SCALE_Y, |
|
138 GST_VIDEO_SCALE_I420, |
|
139 GST_VIDEO_SCALE_YV12, |
|
140 GST_VIDEO_SCALE_RGB565, |
|
141 GST_VIDEO_SCALE_RGB555 |
|
142 }; |
|
143 |
|
144 #define GST_TYPE_VIDEO_SCALE_METHOD (gst_video_scale_method_get_type()) |
|
145 static GType |
|
146 gst_video_scale_method_get_type (void) |
|
147 { |
|
148 static GType video_scale_method_type = 0; |
|
149 static const GEnumValue video_scale_methods[] = { |
|
150 {GST_VIDEO_SCALE_NEAREST, "Nearest Neighbour", "nearest-neighbour"}, |
|
151 {GST_VIDEO_SCALE_BILINEAR, "Bilinear", "bilinear"}, |
|
152 {GST_VIDEO_SCALE_4TAP, "4-tap", "4-tap"}, |
|
153 {0, NULL, NULL}, |
|
154 }; |
|
155 |
|
156 if (!video_scale_method_type) { |
|
157 video_scale_method_type = |
|
158 g_enum_register_static ("GstVideoScaleMethod", video_scale_methods); |
|
159 } |
|
160 return video_scale_method_type; |
|
161 } |
|
162 |
|
163 static GstCaps * |
|
164 gst_video_scale_get_capslist (void) |
|
165 { |
|
166 static GstCaps *caps; |
|
167 |
|
168 if (caps == NULL) { |
|
169 int i; |
|
170 |
|
171 caps = gst_caps_new_empty (); |
|
172 for (i = 0; i < G_N_ELEMENTS (gst_video_scale_format_caps); i++) |
|
173 gst_caps_append (caps, |
|
174 gst_caps_make_writable |
|
175 (gst_static_caps_get (&gst_video_scale_format_caps[i]))); |
|
176 } |
|
177 |
|
178 return caps; |
|
179 } |
|
180 |
|
181 static GstPadTemplate * |
|
182 gst_video_scale_src_template_factory (void) |
|
183 { |
|
184 return gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, |
|
185 gst_caps_ref (gst_video_scale_get_capslist ())); |
|
186 } |
|
187 |
|
188 static GstPadTemplate * |
|
189 gst_video_scale_sink_template_factory (void) |
|
190 { |
|
191 return gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, |
|
192 gst_caps_ref (gst_video_scale_get_capslist ())); |
|
193 } |
|
194 |
|
195 |
|
196 static void gst_video_scale_base_init (gpointer g_class); |
|
197 static void gst_video_scale_class_init (GstVideoScaleClass * klass); |
|
198 static void gst_video_scale_init (GstVideoScale * videoscale); |
|
199 static void gst_video_scale_finalize (GstVideoScale * videoscale); |
|
200 static gboolean gst_video_scale_src_event (GstBaseTransform * trans, |
|
201 GstEvent * event); |
|
202 |
|
203 /* base transform vmethods */ |
|
204 static GstCaps *gst_video_scale_transform_caps (GstBaseTransform * trans, |
|
205 GstPadDirection direction, GstCaps * caps); |
|
206 static gboolean gst_video_scale_set_caps (GstBaseTransform * trans, |
|
207 GstCaps * in, GstCaps * out); |
|
208 static gboolean gst_video_scale_get_unit_size (GstBaseTransform * trans, |
|
209 GstCaps * caps, guint * size); |
|
210 static GstFlowReturn gst_video_scale_transform (GstBaseTransform * trans, |
|
211 GstBuffer * in, GstBuffer * out); |
|
212 static void gst_video_scale_fixate_caps (GstBaseTransform * base, |
|
213 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps); |
|
214 |
|
215 static void gst_video_scale_set_property (GObject * object, guint prop_id, |
|
216 const GValue * value, GParamSpec * pspec); |
|
217 static void gst_video_scale_get_property (GObject * object, guint prop_id, |
|
218 GValue * value, GParamSpec * pspec); |
|
219 |
|
220 static GstElementClass *parent_class = NULL; |
|
221 #ifdef __SYMBIAN32__ |
|
222 EXPORT_C |
|
223 #endif |
|
224 |
|
225 |
|
226 |
|
227 GType |
|
228 gst_video_scale_get_type (void) |
|
229 { |
|
230 static GType video_scale_type = 0; |
|
231 |
|
232 if (!video_scale_type) { |
|
233 static const GTypeInfo video_scale_info = { |
|
234 sizeof (GstVideoScaleClass), |
|
235 gst_video_scale_base_init, |
|
236 NULL, |
|
237 (GClassInitFunc) gst_video_scale_class_init, |
|
238 NULL, |
|
239 NULL, |
|
240 sizeof (GstVideoScale), |
|
241 0, |
|
242 (GInstanceInitFunc) gst_video_scale_init, |
|
243 }; |
|
244 |
|
245 video_scale_type = |
|
246 g_type_register_static (GST_TYPE_BASE_TRANSFORM, "GstVideoScale", |
|
247 &video_scale_info, 0); |
|
248 } |
|
249 return video_scale_type; |
|
250 } |
|
251 |
|
252 static void |
|
253 gst_video_scale_base_init (gpointer g_class) |
|
254 { |
|
255 GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); |
|
256 |
|
257 gst_element_class_set_details (element_class, &video_scale_details); |
|
258 |
|
259 gst_element_class_add_pad_template (element_class, |
|
260 gst_video_scale_sink_template_factory ()); |
|
261 gst_element_class_add_pad_template (element_class, |
|
262 gst_video_scale_src_template_factory ()); |
|
263 } |
|
264 |
|
265 static void |
|
266 gst_video_scale_class_init (GstVideoScaleClass * klass) |
|
267 { |
|
268 GObjectClass *gobject_class; |
|
269 GstBaseTransformClass *trans_class; |
|
270 |
|
271 gobject_class = (GObjectClass *) klass; |
|
272 trans_class = (GstBaseTransformClass *) klass; |
|
273 |
|
274 gobject_class->finalize = (GObjectFinalizeFunc) gst_video_scale_finalize; |
|
275 gobject_class->set_property = gst_video_scale_set_property; |
|
276 gobject_class->get_property = gst_video_scale_get_property; |
|
277 |
|
278 g_object_class_install_property (gobject_class, PROP_METHOD, |
|
279 g_param_spec_enum ("method", "method", "method", |
|
280 GST_TYPE_VIDEO_SCALE_METHOD, DEFAULT_PROP_METHOD, G_PARAM_READWRITE)); |
|
281 |
|
282 trans_class->transform_caps = |
|
283 GST_DEBUG_FUNCPTR (gst_video_scale_transform_caps); |
|
284 trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_scale_set_caps); |
|
285 trans_class->get_unit_size = |
|
286 GST_DEBUG_FUNCPTR (gst_video_scale_get_unit_size); |
|
287 trans_class->transform = GST_DEBUG_FUNCPTR (gst_video_scale_transform); |
|
288 trans_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_video_scale_fixate_caps); |
|
289 trans_class->src_event = GST_DEBUG_FUNCPTR (gst_video_scale_src_event); |
|
290 |
|
291 trans_class->passthrough_on_same_caps = TRUE; |
|
292 |
|
293 parent_class = g_type_class_peek_parent (klass); |
|
294 } |
|
295 |
|
296 static void |
|
297 gst_video_scale_init (GstVideoScale * videoscale) |
|
298 { |
|
299 gst_base_transform_set_qos_enabled (GST_BASE_TRANSFORM (videoscale), TRUE); |
|
300 videoscale->tmp_buf = NULL; |
|
301 videoscale->method = DEFAULT_PROP_METHOD; |
|
302 } |
|
303 |
|
304 static void |
|
305 gst_video_scale_finalize (GstVideoScale * videoscale) |
|
306 { |
|
307 if (videoscale->tmp_buf) |
|
308 g_free (videoscale->tmp_buf); |
|
309 |
|
310 G_OBJECT_CLASS (parent_class)->finalize (G_OBJECT (videoscale)); |
|
311 } |
|
312 |
|
313 static void |
|
314 gst_video_scale_set_property (GObject * object, guint prop_id, |
|
315 const GValue * value, GParamSpec * pspec) |
|
316 { |
|
317 GstVideoScale *vscale = GST_VIDEO_SCALE (object); |
|
318 |
|
319 switch (prop_id) { |
|
320 case PROP_METHOD: |
|
321 GST_OBJECT_LOCK (vscale); |
|
322 vscale->method = g_value_get_enum (value); |
|
323 GST_OBJECT_UNLOCK (vscale); |
|
324 break; |
|
325 default: |
|
326 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); |
|
327 break; |
|
328 } |
|
329 } |
|
330 |
|
331 static void |
|
332 gst_video_scale_get_property (GObject * object, guint prop_id, GValue * value, |
|
333 GParamSpec * pspec) |
|
334 { |
|
335 GstVideoScale *vscale = GST_VIDEO_SCALE (object); |
|
336 |
|
337 switch (prop_id) { |
|
338 case PROP_METHOD: |
|
339 GST_OBJECT_LOCK (vscale); |
|
340 g_value_set_enum (value, vscale->method); |
|
341 GST_OBJECT_UNLOCK (vscale); |
|
342 break; |
|
343 default: |
|
344 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); |
|
345 break; |
|
346 } |
|
347 } |
|
348 |
|
349 static GstCaps * |
|
350 gst_video_scale_transform_caps (GstBaseTransform * trans, |
|
351 GstPadDirection direction, GstCaps * caps) |
|
352 { |
|
353 GstVideoScale *videoscale; |
|
354 GstCaps *ret; |
|
355 GstStructure *structure; |
|
356 const GValue *par; |
|
357 gint method; |
|
358 |
|
359 /* this function is always called with a simple caps */ |
|
360 g_return_val_if_fail (GST_CAPS_IS_SIMPLE (caps), NULL); |
|
361 |
|
362 videoscale = GST_VIDEO_SCALE (trans); |
|
363 |
|
364 GST_OBJECT_LOCK (videoscale); |
|
365 method = videoscale->method; |
|
366 GST_OBJECT_UNLOCK (videoscale); |
|
367 |
|
368 structure = gst_caps_get_structure (caps, 0); |
|
369 |
|
370 /* check compatibility of format and method before we copy the input caps */ |
|
371 if (method == GST_VIDEO_SCALE_4TAP) { |
|
372 guint32 fourcc; |
|
373 |
|
374 if (!gst_structure_has_name (structure, "video/x-raw-yuv")) |
|
375 goto method_not_implemented_for_format; |
|
376 if (!gst_structure_get_fourcc (structure, "format", &fourcc)) |
|
377 goto method_not_implemented_for_format; |
|
378 if (fourcc != GST_MAKE_FOURCC ('I', '4', '2', '0') && |
|
379 fourcc != GST_MAKE_FOURCC ('Y', 'V', '1', '2')) |
|
380 goto method_not_implemented_for_format; |
|
381 } |
|
382 |
|
383 ret = gst_caps_copy (caps); |
|
384 structure = gst_caps_get_structure (ret, 0); |
|
385 |
|
386 gst_structure_set (structure, |
|
387 "width", GST_TYPE_INT_RANGE, 1, G_MAXINT, |
|
388 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL); |
|
389 |
|
390 /* if pixel aspect ratio, make a range of it */ |
|
391 if ((par = gst_structure_get_value (structure, "pixel-aspect-ratio"))) { |
|
392 GstCaps *copy; |
|
393 GstStructure *cstruct; |
|
394 |
|
395 /* copy input PAR first, this is the prefered PAR */ |
|
396 gst_structure_set_value (structure, "pixel-aspect-ratio", par); |
|
397 |
|
398 /* then make a copy with a fraction range as a second choice */ |
|
399 copy = gst_caps_copy (ret); |
|
400 cstruct = gst_caps_get_structure (copy, 0); |
|
401 gst_structure_set (cstruct, |
|
402 "pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); |
|
403 |
|
404 /* and append */ |
|
405 gst_caps_append (ret, copy); |
|
406 } |
|
407 |
|
408 GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret); |
|
409 |
|
410 return ret; |
|
411 |
|
412 method_not_implemented_for_format: |
|
413 { |
|
414 GST_DEBUG_OBJECT (trans, "method %d not implemented for format %" |
|
415 GST_PTR_FORMAT ", returning empty caps", method, caps); |
|
416 return gst_caps_new_empty (); |
|
417 } |
|
418 } |
|
419 |
|
420 static int |
|
421 gst_video_scale_get_format (GstCaps * caps) |
|
422 { |
|
423 int i; |
|
424 GstCaps *icaps, *scaps; |
|
425 |
|
426 for (i = 0; i < G_N_ELEMENTS (gst_video_scale_format_caps); i++) { |
|
427 scaps = gst_static_caps_get (&gst_video_scale_format_caps[i]); |
|
428 icaps = gst_caps_intersect (caps, scaps); |
|
429 if (!gst_caps_is_empty (icaps)) { |
|
430 gst_caps_unref (icaps); |
|
431 return i; |
|
432 } |
|
433 gst_caps_unref (icaps); |
|
434 } |
|
435 |
|
436 return -1; |
|
437 } |
|
438 |
|
439 /* calculate the size of a buffer */ |
|
440 static gboolean |
|
441 gst_video_scale_prepare_size (GstVideoScale * videoscale, gint format, |
|
442 VSImage * img, gint width, gint height, guint * size) |
|
443 { |
|
444 gboolean res = TRUE; |
|
445 |
|
446 img->width = width; |
|
447 img->height = height; |
|
448 |
|
449 switch (format) { |
|
450 case GST_VIDEO_SCALE_RGBx: |
|
451 case GST_VIDEO_SCALE_xRGB: |
|
452 case GST_VIDEO_SCALE_BGRx: |
|
453 case GST_VIDEO_SCALE_xBGR: |
|
454 case GST_VIDEO_SCALE_RGBA: |
|
455 case GST_VIDEO_SCALE_ARGB: |
|
456 case GST_VIDEO_SCALE_BGRA: |
|
457 case GST_VIDEO_SCALE_ABGR: |
|
458 case GST_VIDEO_SCALE_AYUV: |
|
459 img->stride = img->width * 4; |
|
460 *size = img->stride * img->height; |
|
461 break; |
|
462 case GST_VIDEO_SCALE_RGB: |
|
463 case GST_VIDEO_SCALE_BGR: |
|
464 img->stride = GST_ROUND_UP_4 (img->width * 3); |
|
465 *size = img->stride * img->height; |
|
466 break; |
|
467 case GST_VIDEO_SCALE_YUY2: |
|
468 case GST_VIDEO_SCALE_YVYU: |
|
469 case GST_VIDEO_SCALE_UYVY: |
|
470 img->stride = GST_ROUND_UP_4 (img->width * 2); |
|
471 *size = img->stride * img->height; |
|
472 break; |
|
473 case GST_VIDEO_SCALE_Y: |
|
474 img->stride = GST_ROUND_UP_4 (img->width); |
|
475 *size = img->stride * img->height; |
|
476 break; |
|
477 case GST_VIDEO_SCALE_I420: |
|
478 case GST_VIDEO_SCALE_YV12: |
|
479 { |
|
480 gulong img_u_stride, img_u_height; |
|
481 |
|
482 img->stride = GST_ROUND_UP_4 (img->width); |
|
483 |
|
484 img_u_height = GST_ROUND_UP_2 (img->height) / 2; |
|
485 img_u_stride = GST_ROUND_UP_4 (img->stride / 2); |
|
486 |
|
487 *size = img->stride * GST_ROUND_UP_2 (img->height) + |
|
488 2 * img_u_stride * img_u_height; |
|
489 break; |
|
490 } |
|
491 case GST_VIDEO_SCALE_RGB565: |
|
492 img->stride = GST_ROUND_UP_4 (img->width * 2); |
|
493 *size = img->stride * img->height; |
|
494 break; |
|
495 case GST_VIDEO_SCALE_RGB555: |
|
496 img->stride = GST_ROUND_UP_4 (img->width * 2); |
|
497 *size = img->stride * img->height; |
|
498 break; |
|
499 default: |
|
500 goto unknown_format; |
|
501 } |
|
502 |
|
503 return res; |
|
504 |
|
505 /* ERRORS */ |
|
506 unknown_format: |
|
507 { |
|
508 GST_ELEMENT_ERROR (videoscale, STREAM, NOT_IMPLEMENTED, (NULL), |
|
509 ("Unsupported format %d", videoscale->format)); |
|
510 return FALSE; |
|
511 } |
|
512 } |
|
513 |
|
514 static gboolean |
|
515 parse_caps (GstCaps * caps, gint * format, gint * width, gint * height) |
|
516 { |
|
517 gboolean ret; |
|
518 GstStructure *structure; |
|
519 |
|
520 structure = gst_caps_get_structure (caps, 0); |
|
521 ret = gst_structure_get_int (structure, "width", width); |
|
522 ret &= gst_structure_get_int (structure, "height", height); |
|
523 |
|
524 if (format) |
|
525 *format = gst_video_scale_get_format (caps); |
|
526 |
|
527 return ret; |
|
528 } |
|
529 |
|
530 static gboolean |
|
531 gst_video_scale_set_caps (GstBaseTransform * trans, GstCaps * in, GstCaps * out) |
|
532 { |
|
533 GstVideoScale *videoscale; |
|
534 gboolean ret; |
|
535 |
|
536 videoscale = GST_VIDEO_SCALE (trans); |
|
537 |
|
538 ret = parse_caps (in, &videoscale->format, &videoscale->from_width, |
|
539 &videoscale->from_height); |
|
540 ret &= parse_caps (out, NULL, &videoscale->to_width, &videoscale->to_height); |
|
541 if (!ret) |
|
542 goto done; |
|
543 |
|
544 if (!(ret = gst_video_scale_prepare_size (videoscale, videoscale->format, |
|
545 &videoscale->src, videoscale->from_width, videoscale->from_height, |
|
546 &videoscale->src_size))) |
|
547 /* prepare size has posted an error when it returns FALSE */ |
|
548 goto done; |
|
549 |
|
550 if (!(ret = gst_video_scale_prepare_size (videoscale, videoscale->format, |
|
551 &videoscale->dest, videoscale->to_width, videoscale->to_height, |
|
552 &videoscale->dest_size))) |
|
553 /* prepare size has posted an error when it returns FALSE */ |
|
554 goto done; |
|
555 |
|
556 if (videoscale->tmp_buf) |
|
557 g_free (videoscale->tmp_buf); |
|
558 |
|
559 videoscale->tmp_buf = g_malloc (videoscale->dest.stride * 4); |
|
560 |
|
561 /* FIXME: par */ |
|
562 GST_DEBUG_OBJECT (videoscale, "from=%dx%d, size %d -> to=%dx%d, size %d", |
|
563 videoscale->from_width, videoscale->from_height, videoscale->src_size, |
|
564 videoscale->to_width, videoscale->to_height, videoscale->dest_size); |
|
565 |
|
566 done: |
|
567 return ret; |
|
568 } |
|
569 |
|
570 static gboolean |
|
571 gst_video_scale_get_unit_size (GstBaseTransform * trans, GstCaps * caps, |
|
572 guint * size) |
|
573 { |
|
574 GstVideoScale *videoscale; |
|
575 gint format, width, height; |
|
576 VSImage img; |
|
577 |
|
578 g_assert (size); |
|
579 |
|
580 videoscale = GST_VIDEO_SCALE (trans); |
|
581 |
|
582 if (!parse_caps (caps, &format, &width, &height)) |
|
583 return FALSE; |
|
584 |
|
585 if (!gst_video_scale_prepare_size (videoscale, format, &img, width, height, |
|
586 size)) |
|
587 return FALSE; |
|
588 |
|
589 return TRUE; |
|
590 } |
|
591 |
|
592 static void |
|
593 gst_video_scale_fixate_caps (GstBaseTransform * base, GstPadDirection direction, |
|
594 GstCaps * caps, GstCaps * othercaps) |
|
595 { |
|
596 GstStructure *ins, *outs; |
|
597 const GValue *from_par, *to_par; |
|
598 |
|
599 g_return_if_fail (gst_caps_is_fixed (caps)); |
|
600 |
|
601 GST_DEBUG_OBJECT (base, "trying to fixate othercaps %" GST_PTR_FORMAT |
|
602 " based on caps %" GST_PTR_FORMAT, othercaps, caps); |
|
603 |
|
604 ins = gst_caps_get_structure (caps, 0); |
|
605 outs = gst_caps_get_structure (othercaps, 0); |
|
606 |
|
607 from_par = gst_structure_get_value (ins, "pixel-aspect-ratio"); |
|
608 to_par = gst_structure_get_value (outs, "pixel-aspect-ratio"); |
|
609 |
|
610 /* we have both PAR but they might not be fixated */ |
|
611 if (from_par && to_par) { |
|
612 gint from_w, from_h, from_par_n, from_par_d, to_par_n, to_par_d; |
|
613 gint count = 0, w = 0, h = 0; |
|
614 guint num, den; |
|
615 |
|
616 /* from_par should be fixed */ |
|
617 g_return_if_fail (gst_value_is_fixed (from_par)); |
|
618 |
|
619 from_par_n = gst_value_get_fraction_numerator (from_par); |
|
620 from_par_d = gst_value_get_fraction_denominator (from_par); |
|
621 |
|
622 /* fixate the out PAR */ |
|
623 if (!gst_value_is_fixed (to_par)) { |
|
624 GST_DEBUG_OBJECT (base, "fixating to_par to %dx%d", from_par_n, |
|
625 from_par_d); |
|
626 gst_structure_fixate_field_nearest_fraction (outs, "pixel-aspect-ratio", |
|
627 from_par_n, from_par_d); |
|
628 } |
|
629 |
|
630 to_par_n = gst_value_get_fraction_numerator (to_par); |
|
631 to_par_d = gst_value_get_fraction_denominator (to_par); |
|
632 |
|
633 /* if both width and height are already fixed, we can't do anything |
|
634 * about it anymore */ |
|
635 if (gst_structure_get_int (outs, "width", &w)) |
|
636 ++count; |
|
637 if (gst_structure_get_int (outs, "height", &h)) |
|
638 ++count; |
|
639 if (count == 2) { |
|
640 GST_DEBUG_OBJECT (base, "dimensions already set to %dx%d, not fixating", |
|
641 w, h); |
|
642 return; |
|
643 } |
|
644 |
|
645 gst_structure_get_int (ins, "width", &from_w); |
|
646 gst_structure_get_int (ins, "height", &from_h); |
|
647 |
|
648 if (!gst_video_calculate_display_ratio (&num, &den, from_w, from_h, |
|
649 from_par_n, from_par_d, to_par_n, to_par_d)) { |
|
650 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), |
|
651 ("Error calculating the output scaled size - integer overflow")); |
|
652 return; |
|
653 } |
|
654 |
|
655 GST_DEBUG_OBJECT (base, |
|
656 "scaling input with %dx%d and PAR %d/%d to output PAR %d/%d", |
|
657 from_w, from_h, from_par_n, from_par_d, to_par_n, to_par_d); |
|
658 GST_DEBUG_OBJECT (base, "resulting output should respect ratio of %d/%d", |
|
659 num, den); |
|
660 |
|
661 /* now find a width x height that respects this display ratio. |
|
662 * prefer those that have one of w/h the same as the incoming video |
|
663 * using wd / hd = num / den */ |
|
664 |
|
665 /* if one of the output width or height is fixed, we work from there */ |
|
666 if (h) { |
|
667 GST_DEBUG_OBJECT (base, "height is fixed,scaling width"); |
|
668 w = (guint) gst_util_uint64_scale_int (h, num, den); |
|
669 } else if (w) { |
|
670 GST_DEBUG_OBJECT (base, "width is fixed, scaling height"); |
|
671 h = (guint) gst_util_uint64_scale_int (w, den, num); |
|
672 } else { |
|
673 /* none of width or height is fixed, figure out both of them based only on |
|
674 * the input width and height */ |
|
675 /* check hd / den is an integer scale factor, and scale wd with the PAR */ |
|
676 if (from_h % den == 0) { |
|
677 GST_DEBUG_OBJECT (base, "keeping video height"); |
|
678 h = from_h; |
|
679 w = (guint) gst_util_uint64_scale_int (h, num, den); |
|
680 } else if (from_w % num == 0) { |
|
681 GST_DEBUG_OBJECT (base, "keeping video width"); |
|
682 w = from_w; |
|
683 h = (guint) gst_util_uint64_scale_int (w, den, num); |
|
684 } else { |
|
685 GST_DEBUG_OBJECT (base, "approximating but keeping video height"); |
|
686 h = from_h; |
|
687 w = (guint) gst_util_uint64_scale_int (h, num, den); |
|
688 } |
|
689 } |
|
690 GST_DEBUG_OBJECT (base, "scaling to %dx%d", w, h); |
|
691 |
|
692 /* now fixate */ |
|
693 gst_structure_fixate_field_nearest_int (outs, "width", w); |
|
694 gst_structure_fixate_field_nearest_int (outs, "height", h); |
|
695 } else { |
|
696 gint width, height; |
|
697 |
|
698 if (gst_structure_get_int (ins, "width", &width)) { |
|
699 if (gst_structure_has_field (outs, "width")) { |
|
700 gst_structure_fixate_field_nearest_int (outs, "width", width); |
|
701 } |
|
702 } |
|
703 if (gst_structure_get_int (ins, "height", &height)) { |
|
704 if (gst_structure_has_field (outs, "height")) { |
|
705 gst_structure_fixate_field_nearest_int (outs, "height", height); |
|
706 } |
|
707 } |
|
708 } |
|
709 |
|
710 GST_DEBUG_OBJECT (base, "fixated othercaps to %" GST_PTR_FORMAT, othercaps); |
|
711 } |
|
712 |
|
713 static gboolean |
|
714 gst_video_scale_prepare_image (gint format, GstBuffer * buf, |
|
715 VSImage * img, VSImage * img_u, VSImage * img_v) |
|
716 { |
|
717 gboolean res = TRUE; |
|
718 |
|
719 img->pixels = GST_BUFFER_DATA (buf); |
|
720 |
|
721 switch (format) { |
|
722 case GST_VIDEO_SCALE_I420: |
|
723 case GST_VIDEO_SCALE_YV12: |
|
724 img_u->pixels = img->pixels + GST_ROUND_UP_2 (img->height) * img->stride; |
|
725 img_u->height = GST_ROUND_UP_2 (img->height) / 2; |
|
726 img_u->width = GST_ROUND_UP_2 (img->width) / 2; |
|
727 img_u->stride = GST_ROUND_UP_4 (img_u->width); |
|
728 memcpy (img_v, img_u, sizeof (*img_v)); |
|
729 img_v->pixels = img_u->pixels + img_u->height * img_u->stride; |
|
730 break; |
|
731 default: |
|
732 break; |
|
733 } |
|
734 return res; |
|
735 } |
|
736 |
|
737 static GstFlowReturn |
|
738 gst_video_scale_transform (GstBaseTransform * trans, GstBuffer * in, |
|
739 GstBuffer * out) |
|
740 { |
|
741 GstVideoScale *videoscale; |
|
742 GstFlowReturn ret = GST_FLOW_OK; |
|
743 VSImage *dest; |
|
744 VSImage *src; |
|
745 VSImage dest_u; |
|
746 VSImage dest_v; |
|
747 VSImage src_u; |
|
748 VSImage src_v; |
|
749 gint method; |
|
750 |
|
751 videoscale = GST_VIDEO_SCALE (trans); |
|
752 |
|
753 GST_OBJECT_LOCK (videoscale); |
|
754 method = videoscale->method; |
|
755 GST_OBJECT_UNLOCK (videoscale); |
|
756 |
|
757 src = &videoscale->src; |
|
758 dest = &videoscale->dest; |
|
759 |
|
760 gst_video_scale_prepare_image (videoscale->format, in, src, &src_u, &src_v); |
|
761 gst_video_scale_prepare_image (videoscale->format, out, dest, &dest_u, |
|
762 &dest_v); |
|
763 |
|
764 switch (method) { |
|
765 case GST_VIDEO_SCALE_NEAREST: |
|
766 switch (videoscale->format) { |
|
767 case GST_VIDEO_SCALE_RGBx: |
|
768 case GST_VIDEO_SCALE_xRGB: |
|
769 case GST_VIDEO_SCALE_BGRx: |
|
770 case GST_VIDEO_SCALE_xBGR: |
|
771 case GST_VIDEO_SCALE_RGBA: |
|
772 case GST_VIDEO_SCALE_ARGB: |
|
773 case GST_VIDEO_SCALE_BGRA: |
|
774 case GST_VIDEO_SCALE_ABGR: |
|
775 case GST_VIDEO_SCALE_AYUV: |
|
776 vs_image_scale_nearest_RGBA (dest, src, videoscale->tmp_buf); |
|
777 break; |
|
778 case GST_VIDEO_SCALE_RGB: |
|
779 case GST_VIDEO_SCALE_BGR: |
|
780 vs_image_scale_nearest_RGB (dest, src, videoscale->tmp_buf); |
|
781 break; |
|
782 case GST_VIDEO_SCALE_YUY2: |
|
783 case GST_VIDEO_SCALE_YVYU: |
|
784 vs_image_scale_nearest_YUYV (dest, src, videoscale->tmp_buf); |
|
785 break; |
|
786 case GST_VIDEO_SCALE_UYVY: |
|
787 vs_image_scale_nearest_UYVY (dest, src, videoscale->tmp_buf); |
|
788 break; |
|
789 case GST_VIDEO_SCALE_Y: |
|
790 vs_image_scale_nearest_Y (dest, src, videoscale->tmp_buf); |
|
791 break; |
|
792 case GST_VIDEO_SCALE_I420: |
|
793 case GST_VIDEO_SCALE_YV12: |
|
794 vs_image_scale_nearest_Y (dest, src, videoscale->tmp_buf); |
|
795 vs_image_scale_nearest_Y (&dest_u, &src_u, videoscale->tmp_buf); |
|
796 vs_image_scale_nearest_Y (&dest_v, &src_v, videoscale->tmp_buf); |
|
797 break; |
|
798 case GST_VIDEO_SCALE_RGB565: |
|
799 vs_image_scale_nearest_RGB565 (dest, src, videoscale->tmp_buf); |
|
800 break; |
|
801 case GST_VIDEO_SCALE_RGB555: |
|
802 vs_image_scale_nearest_RGB555 (dest, src, videoscale->tmp_buf); |
|
803 break; |
|
804 default: |
|
805 goto unsupported; |
|
806 } |
|
807 break; |
|
808 case GST_VIDEO_SCALE_BILINEAR: |
|
809 switch (videoscale->format) { |
|
810 case GST_VIDEO_SCALE_RGBx: |
|
811 case GST_VIDEO_SCALE_xRGB: |
|
812 case GST_VIDEO_SCALE_BGRx: |
|
813 case GST_VIDEO_SCALE_xBGR: |
|
814 case GST_VIDEO_SCALE_RGBA: |
|
815 case GST_VIDEO_SCALE_ARGB: |
|
816 case GST_VIDEO_SCALE_BGRA: |
|
817 case GST_VIDEO_SCALE_ABGR: |
|
818 case GST_VIDEO_SCALE_AYUV: |
|
819 vs_image_scale_linear_RGBA (dest, src, videoscale->tmp_buf); |
|
820 break; |
|
821 case GST_VIDEO_SCALE_RGB: |
|
822 case GST_VIDEO_SCALE_BGR: |
|
823 vs_image_scale_linear_RGB (dest, src, videoscale->tmp_buf); |
|
824 break; |
|
825 case GST_VIDEO_SCALE_YUY2: |
|
826 case GST_VIDEO_SCALE_YVYU: |
|
827 vs_image_scale_linear_YUYV (dest, src, videoscale->tmp_buf); |
|
828 break; |
|
829 case GST_VIDEO_SCALE_UYVY: |
|
830 vs_image_scale_linear_UYVY (dest, src, videoscale->tmp_buf); |
|
831 break; |
|
832 case GST_VIDEO_SCALE_Y: |
|
833 vs_image_scale_linear_Y (dest, src, videoscale->tmp_buf); |
|
834 break; |
|
835 case GST_VIDEO_SCALE_I420: |
|
836 case GST_VIDEO_SCALE_YV12: |
|
837 vs_image_scale_linear_Y (dest, src, videoscale->tmp_buf); |
|
838 vs_image_scale_linear_Y (&dest_u, &src_u, videoscale->tmp_buf); |
|
839 vs_image_scale_linear_Y (&dest_v, &src_v, videoscale->tmp_buf); |
|
840 break; |
|
841 case GST_VIDEO_SCALE_RGB565: |
|
842 vs_image_scale_linear_RGB565 (dest, src, videoscale->tmp_buf); |
|
843 break; |
|
844 case GST_VIDEO_SCALE_RGB555: |
|
845 vs_image_scale_linear_RGB555 (dest, src, videoscale->tmp_buf); |
|
846 break; |
|
847 default: |
|
848 goto unsupported; |
|
849 } |
|
850 break; |
|
851 case GST_VIDEO_SCALE_4TAP: |
|
852 switch (videoscale->format) { |
|
853 case GST_VIDEO_SCALE_I420: |
|
854 case GST_VIDEO_SCALE_YV12: |
|
855 vs_image_scale_4tap_Y (dest, src, videoscale->tmp_buf); |
|
856 vs_image_scale_4tap_Y (&dest_u, &src_u, videoscale->tmp_buf); |
|
857 vs_image_scale_4tap_Y (&dest_v, &src_v, videoscale->tmp_buf); |
|
858 break; |
|
859 default: |
|
860 /* FIXME: update gst_video_scale_transform_caps once RGB and/or |
|
861 * other YUV formats work too */ |
|
862 goto unsupported; |
|
863 } |
|
864 break; |
|
865 default: |
|
866 goto unknown_mode; |
|
867 } |
|
868 |
|
869 GST_LOG_OBJECT (videoscale, "pushing buffer of %d bytes", |
|
870 GST_BUFFER_SIZE (out)); |
|
871 |
|
872 return ret; |
|
873 |
|
874 /* ERRORS */ |
|
875 unsupported: |
|
876 { |
|
877 GST_ELEMENT_ERROR (videoscale, STREAM, NOT_IMPLEMENTED, (NULL), |
|
878 ("Unsupported format %d for scaling method %d", |
|
879 videoscale->format, method)); |
|
880 return GST_FLOW_ERROR; |
|
881 } |
|
882 unknown_mode: |
|
883 { |
|
884 GST_ELEMENT_ERROR (videoscale, STREAM, NOT_IMPLEMENTED, (NULL), |
|
885 ("Unknown scaling method %d", videoscale->method)); |
|
886 return GST_FLOW_ERROR; |
|
887 } |
|
888 } |
|
889 |
|
890 static gboolean |
|
891 gst_video_scale_src_event (GstBaseTransform * trans, GstEvent * event) |
|
892 { |
|
893 GstVideoScale *videoscale; |
|
894 gboolean ret; |
|
895 double a; |
|
896 GstStructure *structure; |
|
897 |
|
898 videoscale = GST_VIDEO_SCALE (trans); |
|
899 |
|
900 GST_DEBUG_OBJECT (videoscale, "handling %s event", |
|
901 GST_EVENT_TYPE_NAME (event)); |
|
902 |
|
903 switch (GST_EVENT_TYPE (event)) { |
|
904 case GST_EVENT_NAVIGATION: |
|
905 event = |
|
906 GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event))); |
|
907 |
|
908 structure = (GstStructure *) gst_event_get_structure (event); |
|
909 if (gst_structure_get_double (structure, "pointer_x", &a)) { |
|
910 gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, |
|
911 a * videoscale->from_width / videoscale->to_width, NULL); |
|
912 } |
|
913 if (gst_structure_get_double (structure, "pointer_y", &a)) { |
|
914 gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE, |
|
915 a * videoscale->from_height / videoscale->to_height, NULL); |
|
916 } |
|
917 break; |
|
918 case GST_EVENT_QOS: |
|
919 break; |
|
920 default: |
|
921 break; |
|
922 } |
|
923 |
|
924 ret = GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event); |
|
925 |
|
926 return ret; |
|
927 } |
|
928 |
|
929 static gboolean |
|
930 plugin_init (GstPlugin * plugin) |
|
931 { |
|
932 #ifndef __SYMBIAN32__ |
|
933 oil_init (); |
|
934 #endif |
|
935 if (!gst_element_register (plugin, "videoscale", GST_RANK_NONE, |
|
936 GST_TYPE_VIDEO_SCALE)) |
|
937 return FALSE; |
|
938 |
|
939 GST_DEBUG_CATEGORY_INIT (video_scale_debug, "videoscale", 0, |
|
940 "videoscale element"); |
|
941 |
|
942 vs_4tap_init (); |
|
943 |
|
944 return TRUE; |
|
945 } |
|
946 |
|
947 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, |
|
948 GST_VERSION_MINOR, |
|
949 "videoscale", |
|
950 "Resizes video", plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, |
|
951 GST_PACKAGE_ORIGIN); |
|
952 |
|
953 #ifdef __SYMBIAN32__ |
|
954 EXPORT_C |
|
955 #endif |
|
956 GstPluginDesc* _GST_PLUGIN_DESC() |
|
957 { |
|
958 return &gst_plugin_desc; |
|
959 } |