19
|
1 |
/*
|
|
2 |
* Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
|
|
3 |
* All rights reserved.
|
|
4 |
* This component and the accompanying materials are made available
|
|
5 |
* under the terms of "Eclipse Public License v1.0"
|
|
6 |
* which accompanies this distribution, and is available
|
|
7 |
* at the URL "http://www.eclipse.org/legal/epl-v10.html".
|
|
8 |
*
|
|
9 |
* Initial Contributors:
|
|
10 |
* Nokia Corporation - initial contribution.
|
|
11 |
*
|
|
12 |
* Contributors:
|
|
13 |
*
|
|
14 |
* Description:
|
|
15 |
*
|
|
16 |
*/
|
|
17 |
|
20
|
18 |
#include <gst/gst.h>
|
|
19 |
#include <gst/video/video.h>
|
19
|
20 |
#include "xaadaptationgst.h"
|
|
21 |
#include "xamediaplayeradaptctx.h"
|
|
22 |
#include "xamediarecorderadaptctx.h"
|
|
23 |
#include "xacameraadaptctx.h"
|
|
24 |
#include "xavideopostprosessingitfadaptation.h"
|
|
25 |
#include "xastaticcameracapsadaptation.h"
|
|
26 |
|
|
27 |
/*
|
|
28 |
* XAresult XAVideoPostProcessingItfAdapt_IsArbitraryRotationSupported(XAAdaptationGstCtx *bCtx,
|
|
29 |
* XAboolean *pSupported)
|
|
30 |
*/
|
|
31 |
XAresult XAVideoPostProcessingItfAdapt_IsArbitraryRotationSupported(XAAdaptationGstCtx *bCtx,
|
|
32 |
XAboolean *pSupported)
|
|
33 |
{
|
|
34 |
XAresult ret = XA_RESULT_SUCCESS;
|
|
35 |
DEBUG_API("->XAVideoPostProcessingItfAdapt_IsArbitraryRotationSupported");
|
|
36 |
|
|
37 |
if(!bCtx || (bCtx->baseObj.ctxId != XAMediaPlayerAdaptation && bCtx->baseObj.ctxId != XAMediaRecorderAdaptation
|
|
38 |
&& bCtx->baseObj.ctxId != XACameraAdaptation) || !pSupported)
|
|
39 |
{
|
|
40 |
DEBUG_ERR("XA_RESULT_PARAMETER_INVALID");
|
|
41 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_IsArbitraryRotationSupported");
|
|
42 |
return XA_RESULT_PARAMETER_INVALID;
|
|
43 |
}
|
|
44 |
|
|
45 |
/* This feature is not supported, return boolean false */
|
|
46 |
*pSupported = XA_BOOLEAN_FALSE;
|
|
47 |
|
|
48 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_IsArbitraryRotationSupported");
|
|
49 |
return ret;
|
|
50 |
}
|
|
51 |
|
|
52 |
/*
|
|
53 |
* XAresult XAVideoPostProcessingItfAdapt_Commit(XAAdaptationGstCtx *bCtx)
|
|
54 |
*/
|
|
55 |
XAresult XAVideoPostProcessingItfAdapt_Commit(XAAdaptationGstCtx *bCtx,
|
|
56 |
XAmillidegree rotation,
|
|
57 |
XAuint32 scaleOptions,
|
|
58 |
XAuint32 backgroundColor,
|
|
59 |
XAuint32 renderingHints,
|
|
60 |
const XARectangle *pSrcRect,
|
|
61 |
const XARectangle *pDestRect,
|
|
62 |
XAuint32 mirror,
|
|
63 |
XAboolean isMirror,
|
|
64 |
XAboolean isRotate,
|
|
65 |
XAboolean isDestRect,
|
|
66 |
XAboolean isSrcRect,
|
|
67 |
XAboolean isScaleOptions
|
|
68 |
)
|
|
69 |
{
|
|
70 |
XAresult ret = XA_RESULT_SUCCESS;
|
|
71 |
XAmillidegree tempRotation = 0;
|
|
72 |
XAmillidegree newRotation = 0;
|
|
73 |
XAuint32 tempMirror = XA_VIDEOMIRROR_NONE;
|
|
74 |
XAuint32 newMirror = XA_VIDEOMIRROR_NONE;
|
|
75 |
XADataSource *dataSrc = NULL;
|
|
76 |
GstElement *cropElement = NULL;
|
|
77 |
GstElement *rotateElement = NULL;
|
|
78 |
GstElement *mirrorElement = NULL;
|
|
79 |
GstElement *boxElement = NULL;
|
|
80 |
/*
|
|
81 |
GstElement *balanceElement = NULL;
|
|
82 |
GstElement *gammaElement = NULL;
|
|
83 |
*/
|
|
84 |
GstElement *sink = NULL;
|
|
85 |
GstElement *col1 = NULL;
|
|
86 |
GstElement *scale = NULL;
|
|
87 |
/*
|
|
88 |
gdouble alphaValue = 1;
|
|
89 |
gint videoBackgroundColor = 0;
|
|
90 |
*/
|
|
91 |
gint cropscaleRight = 0, cropscaleBottom = 0,
|
|
92 |
cropscaleLeft = 0,
|
|
93 |
cropscaleTop = 0,
|
|
94 |
videoscaleHeight = 0;
|
|
95 |
GstStateChangeReturn gstRet = GST_STATE_CHANGE_SUCCESS;
|
|
96 |
GstState gstOrigState = GST_STATE_PLAYING;
|
|
97 |
GstState gstTmpState = GST_STATE_PLAYING;
|
|
98 |
|
|
99 |
DEBUG_API("->XAVideoPostProcessingItfAdapt_Commit");
|
|
100 |
|
|
101 |
if( !bCtx || (bCtx->baseObj.ctxId != XAMediaPlayerAdaptation && bCtx->baseObj.ctxId != XAMediaRecorderAdaptation
|
|
102 |
&& bCtx->baseObj.ctxId != XACameraAdaptation))
|
|
103 |
{
|
|
104 |
DEBUG_ERR("XA_RESULT_PARAMETER_INVALID");
|
|
105 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit");
|
|
106 |
return XA_RESULT_PARAMETER_INVALID;
|
|
107 |
}
|
|
108 |
|
|
109 |
if( bCtx->baseObj.ctxId == XAMediaPlayerAdaptation )
|
|
110 |
{
|
|
111 |
XAMediaPlayerAdaptationCtx* ctx = (XAMediaPlayerAdaptationCtx*) bCtx;
|
|
112 |
|
|
113 |
|
|
114 |
/* Get video pp elements */
|
|
115 |
GstPad *pad = NULL;
|
|
116 |
GstCaps *caps = NULL;
|
|
117 |
col1 = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_colsp1");
|
|
118 |
cropElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_crop");
|
|
119 |
rotateElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_rotate");
|
|
120 |
mirrorElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_mirror");
|
|
121 |
boxElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_box");
|
|
122 |
/*
|
|
123 |
balanceElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_balance");
|
|
124 |
gammaElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_gamma");
|
|
125 |
*/
|
|
126 |
sink = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "videosink");
|
|
127 |
pad = gst_element_get_static_pad(GST_ELEMENT(sink),"sink");
|
|
128 |
caps = gst_caps_new_simple("video/x-raw-yuv",
|
|
129 |
"width", G_TYPE_INT,0,
|
|
130 |
"height", G_TYPE_INT,0,
|
|
131 |
NULL);
|
|
132 |
gst_pad_set_caps(pad, caps);
|
|
133 |
|
|
134 |
/* get current mirror state and rotate value */
|
|
135 |
tempMirror = ctx->curMirror;
|
|
136 |
tempRotation = ctx->curRotation;
|
|
137 |
dataSrc = ctx->xaSource;
|
|
138 |
}
|
|
139 |
|
|
140 |
if( bCtx->baseObj.ctxId == XAMediaRecorderAdaptation )
|
|
141 |
{
|
|
142 |
XAMediaRecorderAdaptationCtx* ctx = (XAMediaRecorderAdaptationCtx*) bCtx;
|
|
143 |
|
|
144 |
/* Get video pp elements */
|
|
145 |
GstPad *pad = NULL;
|
|
146 |
GstCaps *caps = NULL;
|
|
147 |
scale = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_scale2");
|
|
148 |
col1 = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_colsp1");
|
|
149 |
cropElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_crop");
|
|
150 |
rotateElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_rotate");
|
|
151 |
mirrorElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_mirror");
|
|
152 |
boxElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_box");
|
|
153 |
/* balanceElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_balance");
|
|
154 |
gammaElement = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_gamma");*/
|
|
155 |
sink = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "datasink");
|
|
156 |
pad = gst_element_get_static_pad(GST_ELEMENT(sink),"sink");
|
|
157 |
caps = gst_caps_new_simple("video/x-raw-yuv",
|
|
158 |
"width", G_TYPE_INT,0,
|
|
159 |
"height", G_TYPE_INT,0,
|
|
160 |
NULL);
|
|
161 |
gst_pad_set_caps(pad, caps);
|
|
162 |
|
|
163 |
/* get current mirror state and rotate value */
|
|
164 |
tempMirror = ctx->curMirror;
|
|
165 |
tempRotation = ctx->curRotation;
|
|
166 |
}
|
|
167 |
|
|
168 |
if( bCtx->baseObj.ctxId == XACameraAdaptation )
|
|
169 |
{
|
|
170 |
XACameraAdaptationCtx* ctx = (XACameraAdaptationCtx*) bCtx;
|
|
171 |
|
|
172 |
GstElement *videoPP = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "videopp_camera");
|
|
173 |
if( !videoPP )
|
|
174 |
{
|
|
175 |
DEBUG_ERR("Could not receive videopp from camerabin!");
|
|
176 |
}
|
|
177 |
else
|
|
178 |
{
|
|
179 |
/* Get camera pp elements */
|
|
180 |
GstPad *pad = NULL;
|
|
181 |
GstCaps *caps = NULL;
|
|
182 |
rotateElement = gst_bin_get_by_name( GST_BIN(videoPP), "pp_rotate");
|
|
183 |
col1 = gst_bin_get_by_name( GST_BIN(videoPP), "pp_colsp1");
|
|
184 |
cropElement = gst_bin_get_by_name( GST_BIN(videoPP), "pp_crop");
|
|
185 |
mirrorElement = gst_bin_get_by_name( GST_BIN(videoPP), "pp_mirror");
|
|
186 |
boxElement = gst_bin_get_by_name( GST_BIN(videoPP), "pp_box");
|
|
187 |
/* balanceElement = gst_bin_get_by_name( GST_BIN(videoPP), "pp_balance");
|
|
188 |
gammaElement = gst_bin_get_by_name( GST_BIN(videoPP), "pp_gamma");*/
|
|
189 |
sink = gst_bin_get_by_name( GST_BIN(ctx->baseObj.bin), "pp_crop");
|
|
190 |
pad = gst_element_get_static_pad(GST_ELEMENT(sink),"sink");
|
|
191 |
caps = gst_caps_new_simple("video/x-raw-yuv",
|
|
192 |
"width", G_TYPE_INT,0,
|
|
193 |
"height", G_TYPE_INT,0,
|
|
194 |
NULL);
|
|
195 |
gst_pad_set_caps(pad, caps);
|
|
196 |
|
|
197 |
/* get current mirror state and rotate value */
|
|
198 |
tempMirror = ctx->curMirror;
|
|
199 |
tempRotation = ctx->curRotation;
|
|
200 |
}
|
|
201 |
}
|
|
202 |
|
|
203 |
/* Cropping */
|
|
204 |
if( isSrcRect && pSrcRect && cropElement && col1 )
|
|
205 |
{
|
|
206 |
|
|
207 |
gint cropRight = 0;
|
|
208 |
gint cropBottom = 0;
|
|
209 |
gint cropLeft = 0;
|
|
210 |
gint cropTop = 0;
|
|
211 |
GstPad *videoPad = NULL;
|
|
212 |
gint videoWidth = 0;
|
|
213 |
gint videoHeight = 0;
|
|
214 |
DEBUG_INFO("Start cropping!");
|
|
215 |
|
|
216 |
DEBUG_INFO_A1("pSrcRect->left:%d",(int)pSrcRect->left);
|
|
217 |
DEBUG_INFO_A1("pSrcRect->top:%d",(int)pSrcRect->top);
|
|
218 |
DEBUG_INFO_A1("pSrcRect->width:%d",(int)pSrcRect->width);
|
|
219 |
DEBUG_INFO_A1("pSrcRect->height:%d", (int)pSrcRect->height);
|
|
220 |
|
|
221 |
if( bCtx->baseObj.ctxId == XACameraAdaptation )
|
|
222 |
{
|
|
223 |
GstCaps *caps = NULL;
|
|
224 |
XACameraAdaptationCtx* ctx = (XACameraAdaptationCtx*) bCtx;
|
|
225 |
gint capsCount = 0;
|
|
226 |
gint iterator = 0;
|
|
227 |
GstStructure *capsStruct = NULL;
|
|
228 |
|
|
229 |
g_object_get( G_OBJECT(ctx->baseObj.bin), "filter-caps", &caps, NULL );
|
|
230 |
|
|
231 |
if( !caps )
|
|
232 |
{
|
|
233 |
DEBUG_ERR("Cannot receive caps (filter-caps) from camerabin!");
|
|
234 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit - XA_RESULT_INTERNAL_ERROR");
|
|
235 |
return XA_RESULT_INTERNAL_ERROR;
|
|
236 |
}
|
|
237 |
|
|
238 |
capsCount = gst_caps_get_size( caps );
|
|
239 |
|
|
240 |
for( iterator= 0; iterator < capsCount; iterator++ )
|
|
241 |
{
|
|
242 |
capsStruct = gst_caps_get_structure( caps, iterator );
|
|
243 |
if( capsStruct )
|
|
244 |
{
|
|
245 |
if( !gst_structure_get_int( capsStruct, "width", &videoWidth ) )
|
|
246 |
{
|
|
247 |
DEBUG_ERR("Could not get width from filter-caps");
|
|
248 |
videoWidth = TEST_VIDEO_WIDTH;
|
|
249 |
}
|
|
250 |
if( !gst_structure_get_int( capsStruct, "height", &videoHeight) )
|
|
251 |
{
|
|
252 |
DEBUG_ERR("Could not get height from filter-caps");
|
|
253 |
videoHeight = TEST_VIDEO_HEIGHT;
|
|
254 |
}
|
|
255 |
DEBUG_INFO_A2("videoWidth:%d, videoHeight:%d",videoWidth,videoHeight);
|
|
256 |
}
|
|
257 |
}
|
|
258 |
}
|
|
259 |
else
|
|
260 |
{
|
|
261 |
GstCaps* negcapas=NULL;
|
|
262 |
videoPad = gst_element_get_pad( col1, "src" );
|
|
263 |
negcapas = gst_pad_get_negotiated_caps( GST_PAD(videoPad) );
|
|
264 |
if ( negcapas )
|
|
265 |
{
|
|
266 |
if( !gst_video_get_size( videoPad, &videoWidth, &videoHeight ) )
|
|
267 |
{
|
|
268 |
DEBUG_ERR("WARN: Cannot receive video size, using defaults");
|
|
269 |
videoWidth = TEST_VIDEO_WIDTH;
|
|
270 |
videoHeight = TEST_VIDEO_HEIGHT;
|
|
271 |
}
|
|
272 |
}
|
|
273 |
else
|
|
274 |
{
|
|
275 |
videoWidth = TEST_VIDEO_WIDTH;
|
|
276 |
videoHeight = TEST_VIDEO_HEIGHT;
|
|
277 |
}
|
|
278 |
}
|
|
279 |
|
|
280 |
DEBUG_INFO_A2("Received video frame info, videoWidth:%d, videoHeight:%d",videoWidth,videoHeight);
|
|
281 |
cropLeft = (gint)pSrcRect->left;
|
|
282 |
cropTop = (gint)pSrcRect->top;
|
|
283 |
cropRight = videoWidth - ((gint)pSrcRect->left + (gint)pSrcRect->width);
|
|
284 |
cropBottom = videoHeight - ((gint)pSrcRect->top + (gint)pSrcRect->height);
|
|
285 |
DEBUG_INFO_A4("Crop values - cropLeft:%d ,cropTop:%d,cropRight:%d,cropBottom:%d", cropLeft,cropTop,cropRight,cropBottom);
|
|
286 |
|
|
287 |
if( cropBottom >= 0 && cropLeft >=0 && cropRight >= 0 && cropTop >= 0 )
|
|
288 |
{
|
|
289 |
g_object_set(G_OBJECT(cropElement), "bottom",cropBottom, NULL);
|
|
290 |
g_object_set(G_OBJECT(cropElement), "left", cropLeft, NULL);
|
|
291 |
g_object_set(G_OBJECT(cropElement), "right", cropRight, NULL);
|
|
292 |
g_object_set(G_OBJECT(cropElement), "top", cropTop, NULL);
|
|
293 |
}
|
|
294 |
else
|
|
295 |
{
|
|
296 |
if( cropLeft > videoWidth || cropLeft < 0 ||
|
|
297 |
cropRight > videoWidth || cropRight < 0 ||
|
|
298 |
cropBottom > videoHeight || cropBottom < 0 ||
|
|
299 |
cropTop > videoHeight || cropTop < 0)
|
|
300 |
{
|
|
301 |
DEBUG_INFO("Cropped params out of original frame.");
|
|
302 |
}
|
|
303 |
}
|
|
304 |
}
|
|
305 |
|
|
306 |
if(rotateElement && isRotate)
|
|
307 |
{
|
|
308 |
DEBUG_INFO("Start rotating!");
|
|
309 |
|
|
310 |
/* calculate rotation */
|
|
311 |
newRotation = tempRotation + rotation;
|
|
312 |
|
|
313 |
if( newRotation > ROTATION_RATIO || newRotation < ROTATION_RATIO_NEG )
|
|
314 |
{
|
|
315 |
newRotation = newRotation % ROTATION_RATIO;
|
|
316 |
}
|
|
317 |
|
|
318 |
/* Set rotation */
|
|
319 |
switch(newRotation)
|
|
320 |
{
|
|
321 |
case 0:
|
|
322 |
{
|
|
323 |
/* no rotation */
|
|
324 |
DEBUG_INFO("Set rotation FLIP_NONE");
|
|
325 |
g_object_set(G_OBJECT(rotateElement), "method", FLIP_NONE, NULL);
|
|
326 |
break;
|
|
327 |
}
|
|
328 |
case 90000:
|
|
329 |
case -270000:
|
|
330 |
{
|
|
331 |
/* rotation 90 degree */
|
|
332 |
DEBUG_INFO("Set rotation 90 degrees");
|
|
333 |
g_object_set(G_OBJECT(rotateElement), "method", FLIP_CLOCKWISE, NULL);
|
|
334 |
break;
|
|
335 |
}
|
|
336 |
case 180000:
|
|
337 |
case -180000:
|
|
338 |
{
|
|
339 |
/* rotation 180 degree */
|
|
340 |
DEBUG_INFO("Set rotation 180 degrees");
|
|
341 |
g_object_set(G_OBJECT(rotateElement), "method", FLIP_ROTATE_180, NULL);
|
|
342 |
break;
|
|
343 |
}
|
|
344 |
case 270000:
|
|
345 |
case -90000:
|
|
346 |
{
|
|
347 |
/* rotation 270 degree */
|
|
348 |
DEBUG_INFO("Set rotation 270 degrees");
|
|
349 |
g_object_set(G_OBJECT(rotateElement), "method", FLIP_COUNTERCLOCKWISE, NULL);
|
|
350 |
break;
|
|
351 |
}
|
|
352 |
case 360000:
|
|
353 |
case -360000:
|
|
354 |
{
|
|
355 |
/* rotation 360 degree */
|
|
356 |
DEBUG_INFO("Set rotation 360 degrees");
|
|
357 |
g_object_set(G_OBJECT(rotateElement), "method", FLIP_NONE, NULL);
|
|
358 |
break;
|
|
359 |
}
|
|
360 |
default:
|
|
361 |
{
|
|
362 |
/* no rotation */
|
|
363 |
DEBUG_INFO("Set rotation default (FLIP_NONE) degree");
|
|
364 |
g_object_set(G_OBJECT(rotateElement), "method", FLIP_NONE, NULL);
|
|
365 |
break;
|
|
366 |
}
|
|
367 |
}
|
|
368 |
|
|
369 |
/* Store current rotate value */
|
|
370 |
if( bCtx->baseObj.ctxId == XAMediaPlayerAdaptation )
|
|
371 |
{
|
|
372 |
XAMediaPlayerAdaptationCtx* ctx = (XAMediaPlayerAdaptationCtx*) bCtx;
|
|
373 |
ctx->curRotation = newRotation;
|
|
374 |
}
|
|
375 |
if( bCtx->baseObj.ctxId == XAMediaRecorderAdaptation )
|
|
376 |
{
|
|
377 |
XAMediaRecorderAdaptationCtx* ctx = (XAMediaRecorderAdaptationCtx*) bCtx;
|
|
378 |
ctx->curRotation = newRotation;
|
|
379 |
}
|
|
380 |
if( bCtx->baseObj.ctxId == XACameraAdaptation )
|
|
381 |
{
|
|
382 |
XACameraAdaptationCtx* ctx = (XACameraAdaptationCtx*) bCtx;
|
|
383 |
ctx->curRotation = newRotation;
|
|
384 |
}
|
|
385 |
}
|
|
386 |
|
|
387 |
if(mirrorElement && isMirror)
|
|
388 |
{
|
|
389 |
/* solve new mirror state */
|
|
390 |
switch(mirror)
|
|
391 |
{
|
|
392 |
case XA_VIDEOMIRROR_NONE:
|
|
393 |
{
|
|
394 |
newMirror = tempMirror;
|
|
395 |
break;
|
|
396 |
}
|
|
397 |
case XA_VIDEOMIRROR_VERTICAL:
|
|
398 |
{
|
|
399 |
if( tempMirror == XA_VIDEOMIRROR_VERTICAL )
|
|
400 |
{
|
|
401 |
newMirror = XA_VIDEOMIRROR_NONE;
|
|
402 |
}
|
|
403 |
else if( tempMirror == XA_VIDEOMIRROR_HORIZONTAL )
|
|
404 |
{
|
|
405 |
newMirror = XA_VIDEOMIRROR_BOTH;
|
|
406 |
}
|
|
407 |
else if( tempMirror == XA_VIDEOMIRROR_BOTH )
|
|
408 |
{
|
|
409 |
newMirror = XA_VIDEOMIRROR_HORIZONTAL;
|
|
410 |
}
|
|
411 |
else
|
|
412 |
{
|
|
413 |
newMirror = XA_VIDEOMIRROR_VERTICAL;
|
|
414 |
}
|
|
415 |
break;
|
|
416 |
}
|
|
417 |
case XA_VIDEOMIRROR_HORIZONTAL:
|
|
418 |
{
|
|
419 |
if( tempMirror == XA_VIDEOMIRROR_VERTICAL )
|
|
420 |
{
|
|
421 |
newMirror = XA_VIDEOMIRROR_BOTH;
|
|
422 |
}
|
|
423 |
else if( tempMirror == XA_VIDEOMIRROR_HORIZONTAL )
|
|
424 |
{
|
|
425 |
newMirror = XA_VIDEOMIRROR_NONE;
|
|
426 |
}
|
|
427 |
else if( tempMirror == XA_VIDEOMIRROR_BOTH )
|
|
428 |
{
|
|
429 |
newMirror = XA_VIDEOMIRROR_VERTICAL;
|
|
430 |
}
|
|
431 |
else
|
|
432 |
{
|
|
433 |
newMirror = XA_VIDEOMIRROR_HORIZONTAL;
|
|
434 |
}
|
|
435 |
break;
|
|
436 |
}
|
|
437 |
case XA_VIDEOMIRROR_BOTH:
|
|
438 |
{
|
|
439 |
if( tempMirror == XA_VIDEOMIRROR_VERTICAL )
|
|
440 |
{
|
|
441 |
newMirror = XA_VIDEOMIRROR_HORIZONTAL;
|
|
442 |
}
|
|
443 |
else if( tempMirror == XA_VIDEOMIRROR_HORIZONTAL )
|
|
444 |
{
|
|
445 |
newMirror = XA_VIDEOMIRROR_VERTICAL;
|
|
446 |
}
|
|
447 |
else if( tempMirror == XA_VIDEOMIRROR_BOTH )
|
|
448 |
{
|
|
449 |
newMirror = XA_VIDEOMIRROR_NONE;
|
|
450 |
}
|
|
451 |
else
|
|
452 |
{
|
|
453 |
newMirror = XA_VIDEOMIRROR_BOTH;
|
|
454 |
}
|
|
455 |
break;
|
|
456 |
}
|
|
457 |
default:
|
|
458 |
break;
|
|
459 |
}
|
|
460 |
|
|
461 |
|
|
462 |
DEBUG_INFO("Start mirroring!");
|
|
463 |
/* Set mirror */
|
|
464 |
switch(newMirror)
|
|
465 |
{
|
|
466 |
case XA_VIDEOMIRROR_NONE:
|
|
467 |
{
|
|
468 |
/* none */
|
|
469 |
DEBUG_INFO("Set mirror none");
|
|
470 |
g_object_set(G_OBJECT(mirrorElement), "method", FLIP_NONE, NULL);
|
|
471 |
break;
|
|
472 |
}
|
|
473 |
case XA_VIDEOMIRROR_VERTICAL:
|
|
474 |
{
|
|
475 |
/* vertical mirror */
|
|
476 |
DEBUG_INFO("Set mirror vertical");
|
|
477 |
g_object_set(G_OBJECT(mirrorElement), "method", FLIP_VERTICAL, NULL);
|
|
478 |
break;
|
|
479 |
}
|
|
480 |
case XA_VIDEOMIRROR_HORIZONTAL:
|
|
481 |
{
|
|
482 |
/* horizontal mirror */
|
|
483 |
DEBUG_INFO("Set mirror horizontal");
|
|
484 |
g_object_set(G_OBJECT(mirrorElement), "method", FLIP_HORIZONTAL, NULL);
|
|
485 |
break;
|
|
486 |
}
|
|
487 |
case XA_VIDEOMIRROR_BOTH:
|
|
488 |
{
|
|
489 |
/* both mirror */
|
|
490 |
DEBUG_INFO("Set mirror vertical and horizontal");
|
|
491 |
g_object_set(G_OBJECT(mirrorElement), "method", FLIP_ROTATE_180, NULL);
|
|
492 |
break;
|
|
493 |
}
|
|
494 |
default:
|
|
495 |
{
|
|
496 |
/* Default no mirroring */
|
|
497 |
g_object_set(G_OBJECT(mirrorElement), "method", FLIP_NONE, NULL);
|
|
498 |
break;
|
|
499 |
}
|
|
500 |
}
|
|
501 |
|
|
502 |
/* Store current mirror state */
|
|
503 |
if( bCtx->baseObj.ctxId == XAMediaPlayerAdaptation )
|
|
504 |
{
|
|
505 |
XAMediaPlayerAdaptationCtx* ctx = (XAMediaPlayerAdaptationCtx*) bCtx;
|
|
506 |
ctx->curMirror = newMirror;
|
|
507 |
}
|
|
508 |
if( bCtx->baseObj.ctxId == XAMediaRecorderAdaptation )
|
|
509 |
{
|
|
510 |
XAMediaRecorderAdaptationCtx* ctx = (XAMediaRecorderAdaptationCtx*) bCtx;
|
|
511 |
ctx->curMirror = newMirror;
|
|
512 |
}
|
|
513 |
if( bCtx->baseObj.ctxId == XACameraAdaptation )
|
|
514 |
{
|
|
515 |
XACameraAdaptationCtx* ctx = (XACameraAdaptationCtx*) bCtx;
|
|
516 |
ctx->curMirror = newMirror;
|
|
517 |
}
|
|
518 |
}
|
|
519 |
|
|
520 |
/* Set scale */
|
|
521 |
if ( isScaleOptions || isDestRect || isSrcRect )
|
|
522 |
{
|
|
523 |
switch( scaleOptions )
|
|
524 |
{
|
|
525 |
|
|
526 |
case XA_VIDEOSCALE_STRETCH:
|
|
527 |
{
|
|
528 |
DEBUG_INFO("XA_VIDEOSCALE_STRETCH");
|
|
529 |
/* The source and destination rectangle's width and height params are used to calculate
|
|
530 |
* the scaling factors independently. Aspect ratio is ignored. */
|
|
531 |
if (pDestRect)
|
|
532 |
{
|
|
533 |
if (bCtx->baseObj.ctxId != XAMediaRecorderAdaptation)
|
|
534 |
{
|
|
535 |
GstPad *pad = NULL;
|
|
536 |
GstCaps* simplecaps = NULL;
|
|
537 |
g_object_set ( G_OBJECT(sink), "force-aspect-ratio", FALSE, NULL);
|
|
538 |
simplecaps = gst_caps_new_simple("video/x-raw-rgb",
|
|
539 |
"width", G_TYPE_INT, pDestRect->width,
|
|
540 |
"height", G_TYPE_INT, pDestRect->height,
|
|
541 |
"framerate", GST_TYPE_FRACTION, 0,1,
|
|
542 |
NULL);
|
|
543 |
DEBUG_API_A1("caps: %s",gst_caps_to_string(simplecaps));
|
|
544 |
pad = gst_element_get_static_pad(GST_ELEMENT(sink),"sink");
|
|
545 |
if (!gst_pad_set_caps(pad, simplecaps))
|
|
546 |
{
|
|
547 |
DEBUG_INFO("Stubbed at this point");
|
|
548 |
DEBUG_INFO("Cannot set destrect size during XA_VIDEOSCALE_STRETCH!");
|
|
549 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit - XA_VIDEOSCALE_STRETCH");
|
|
550 |
return XA_RESULT_SUCCESS;
|
|
551 |
}
|
|
552 |
DEBUG_API_A1("ret: %lu",ret);
|
|
553 |
}
|
|
554 |
else
|
|
555 |
{
|
|
556 |
GstCaps* simplecaps = NULL;
|
|
557 |
GstPad *pad = NULL;
|
|
558 |
if ( !scale )
|
|
559 |
{
|
|
560 |
DEBUG_ERR("Could not get scaling element from pipeline!");
|
|
561 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit - XA_RESULT_INTERNAL_ERROR");
|
|
562 |
return XA_RESULT_INTERNAL_ERROR;
|
|
563 |
}
|
|
564 |
simplecaps = gst_caps_new_simple("video/x-raw-yuv",
|
|
565 |
"width", G_TYPE_INT, pDestRect->width,
|
|
566 |
"height", G_TYPE_INT, pDestRect->height,
|
|
567 |
"framerate", GST_TYPE_FRACTION, 0,1,
|
|
568 |
NULL);
|
|
569 |
DEBUG_API_A1("caps: %s",gst_caps_to_string(simplecaps));
|
|
570 |
pad = gst_element_get_static_pad(GST_ELEMENT(scale),"src");
|
|
571 |
if (!gst_pad_set_caps(pad, simplecaps))
|
|
572 |
{
|
|
573 |
DEBUG_INFO("Stubbed at this point");
|
|
574 |
DEBUG_INFO("Cannot set destrect size during XA_VIDEOSCALE_STRETCH!");
|
|
575 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit - XA_VIDEOSCALE_STRETCH");
|
|
576 |
return XA_RESULT_SUCCESS;
|
|
577 |
}
|
|
578 |
|
|
579 |
}
|
|
580 |
}
|
|
581 |
DEBUG_INFO("XA_VIDEOSCALE_STRETCH Done");
|
|
582 |
|
|
583 |
break;
|
|
584 |
}
|
|
585 |
case XA_VIDEOSCALE_FIT:
|
|
586 |
{
|
|
587 |
DEBUG_INFO("XA_VIDEOSCALE_FIT");
|
|
588 |
/* The miminum scale factor between the destination rectangle's width over the
|
|
589 |
* source rectangle's source rectangle's width and the destination rectangle's height over
|
|
590 |
* the source rectangle's height is used. Aspect ratio is maintained. Frame is centered */
|
|
591 |
if (pDestRect)
|
|
592 |
{
|
|
593 |
if (bCtx->baseObj.ctxId != XAMediaRecorderAdaptation)
|
|
594 |
{
|
|
595 |
GstPad *pad = NULL;
|
|
596 |
GstCaps* simplecaps = NULL;
|
|
597 |
g_object_set ( G_OBJECT(sink), "force-aspect-ratio", TRUE, NULL);
|
|
598 |
simplecaps = gst_caps_new_simple("video/x-raw-rgb",
|
|
599 |
"width", G_TYPE_INT, pDestRect->width,
|
|
600 |
"height", G_TYPE_INT, pDestRect->height,
|
|
601 |
"framerate", GST_TYPE_FRACTION, 0,1,
|
|
602 |
NULL);
|
|
603 |
DEBUG_API_A1("caps: %s",gst_caps_to_string(simplecaps));
|
|
604 |
pad = gst_element_get_static_pad(GST_ELEMENT(sink),"sink");
|
|
605 |
if (!gst_pad_set_caps(pad, simplecaps))
|
|
606 |
{
|
|
607 |
DEBUG_INFO("Stubbed at this point");
|
|
608 |
DEBUG_INFO("Cannot set destrect size during XA_VIDEOSCALE_FIT!");
|
|
609 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit - XA_VIDEOSCALE_FIT");
|
|
610 |
return XA_RESULT_SUCCESS;
|
|
611 |
}
|
|
612 |
}
|
|
613 |
else
|
|
614 |
{
|
|
615 |
GstPad *videoScalePad = NULL;
|
|
616 |
GstCaps *negcaps = NULL;
|
|
617 |
gint videoScalePadHeight = 0, videoScalePadWidth = 0;
|
|
618 |
gfloat scaleFactorWidth = 0;
|
|
619 |
gfloat scaleFactorHeight = 0;
|
|
620 |
gfloat scaleFactor = 0;
|
|
621 |
videoScalePad = gst_element_get_pad( col1, "src" );
|
|
622 |
negcaps = gst_pad_get_negotiated_caps( GST_PAD(videoScalePad) );
|
|
623 |
if ( negcaps )
|
|
624 |
{
|
|
625 |
if( !gst_video_get_size( videoScalePad, &videoScalePadWidth, &videoScalePadHeight ) )
|
|
626 |
{
|
|
627 |
DEBUG_ERR("Cannot receive current cropscalevideo size!");
|
|
628 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit - XA_RESULT_INTERNAL_ERROR");
|
|
629 |
return XA_RESULT_INTERNAL_ERROR;
|
|
630 |
}
|
|
631 |
}
|
|
632 |
else
|
|
633 |
{
|
|
634 |
DEBUG_ERR("No negotiated caps in col1:src!");
|
|
635 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit");
|
|
636 |
return XA_RESULT_SUCCESS;
|
|
637 |
}
|
|
638 |
if (pSrcRect->width != 0 && pSrcRect->height != 0)
|
|
639 |
{
|
|
640 |
scaleFactorWidth = (gfloat)videoScalePadWidth / (gfloat)pSrcRect->width;
|
|
641 |
scaleFactorHeight = (gfloat)videoScalePadHeight / (gfloat)pSrcRect->height;
|
|
642 |
if (scaleFactorWidth < scaleFactorHeight)
|
|
643 |
{
|
|
644 |
scaleFactor = scaleFactorWidth;
|
|
645 |
}
|
|
646 |
else
|
|
647 |
{
|
|
648 |
scaleFactor = scaleFactorHeight;
|
|
649 |
}
|
|
650 |
|
|
651 |
cropscaleBottom = (gint)(pSrcRect->height*scaleFactor - videoScalePadHeight)/2;
|
|
652 |
cropscaleLeft = (gint)(pSrcRect->width*scaleFactor - videoScalePadWidth)/2;
|
|
653 |
if (cropscaleLeft > 0){
|
|
654 |
cropscaleLeft *= -1;
|
|
655 |
}
|
|
656 |
cropscaleRight = cropscaleLeft;
|
|
657 |
if (cropscaleBottom > 0){
|
|
658 |
cropscaleBottom *= -1;
|
|
659 |
}
|
|
660 |
cropscaleTop = cropscaleBottom;
|
|
661 |
g_object_set (G_OBJECT (boxElement), "bottom", cropscaleBottom , NULL);
|
|
662 |
g_object_set (G_OBJECT (boxElement), "right", cropscaleRight, NULL);
|
|
663 |
g_object_set (G_OBJECT (boxElement), "left", cropscaleLeft, NULL);
|
|
664 |
g_object_set (G_OBJECT (boxElement), "top", cropscaleTop, NULL);
|
|
665 |
}
|
|
666 |
}
|
|
667 |
}
|
|
668 |
DEBUG_INFO("XA_VIDEOSCALE_FIT done");
|
|
669 |
|
|
670 |
break;
|
|
671 |
}
|
|
672 |
case XA_VIDEOSCALE_CROP:
|
|
673 |
{
|
|
674 |
DEBUG_INFO("XA_VIDEOSCALE_CROP");
|
|
675 |
/* The maximum scale factor between the destination rectangle's width over the source
|
|
676 |
* rectangle's width and destination rectangle's height over the source rectangle's
|
|
677 |
* height is used. Aspect ratio is maintained. Frame is centered. */
|
|
678 |
if( pDestRect && pSrcRect )
|
|
679 |
{
|
|
680 |
GstPad *videoScalePad = NULL;
|
|
681 |
GstCaps *negcaps = NULL;
|
|
682 |
gint videoScalePadHeight = 0, videoScalePadWidth = 0;
|
|
683 |
videoScalePad = gst_element_get_pad( col1, "src" );
|
|
684 |
negcaps = gst_pad_get_negotiated_caps( GST_PAD(videoScalePad) );
|
|
685 |
if ( negcaps )
|
|
686 |
{
|
|
687 |
if( !gst_video_get_size( videoScalePad, &videoScalePadWidth, &videoScalePadHeight ) )
|
|
688 |
{
|
|
689 |
DEBUG_ERR("Cannot receive current cropscalevideo size!");
|
|
690 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit - XA_RESULT_INTERNAL_ERROR");
|
|
691 |
return XA_RESULT_INTERNAL_ERROR;
|
|
692 |
}
|
|
693 |
}
|
|
694 |
else
|
|
695 |
{
|
|
696 |
DEBUG_ERR("No negotiated caps in col1:src!");
|
|
697 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit");
|
|
698 |
return XA_RESULT_SUCCESS;
|
|
699 |
}
|
|
700 |
|
|
701 |
DEBUG_INFO_A2( "Received video scale frame info, videoScalePadWidth:%d, "
|
|
702 |
"videoScalePadHeight:%d",videoScalePadWidth,videoScalePadHeight);
|
|
703 |
|
|
704 |
if( pSrcRect->height > 0 && pSrcRect->width > 0 )
|
|
705 |
{
|
|
706 |
if( pSrcRect->height > pDestRect->height )
|
|
707 |
{
|
|
708 |
videoscaleHeight = pSrcRect->height;
|
|
709 |
if( pDestRect->top == 0)
|
|
710 |
{
|
|
711 |
cropscaleTop = ((videoscaleHeight - pDestRect->height)/2);
|
|
712 |
cropscaleBottom = ((videoscaleHeight - pDestRect->height)/2);
|
|
713 |
}
|
|
714 |
else
|
|
715 |
{
|
|
716 |
cropscaleTop = (pDestRect->top/2);
|
|
717 |
cropscaleBottom = (pDestRect->top/2);
|
|
718 |
}
|
|
719 |
}
|
|
720 |
else if( pDestRect->height > pSrcRect->height )
|
|
721 |
{
|
|
722 |
videoscaleHeight = pDestRect->height;
|
|
723 |
if( pDestRect->top == 0)
|
|
724 |
{
|
|
725 |
cropscaleTop = -((videoscaleHeight - pSrcRect->height)/2);
|
|
726 |
cropscaleBottom = -((videoscaleHeight - pSrcRect->height)/2);
|
|
727 |
}
|
|
728 |
else
|
|
729 |
{
|
|
730 |
cropscaleTop = -(pDestRect->top/2);
|
|
731 |
cropscaleBottom = -(pDestRect->top/2);
|
|
732 |
}
|
|
733 |
}
|
|
734 |
else if( pDestRect->height == pSrcRect->height )
|
|
735 |
{
|
|
736 |
}
|
|
737 |
else
|
|
738 |
{
|
|
739 |
}
|
|
740 |
if( pSrcRect->width > pDestRect->width )
|
|
741 |
{
|
|
742 |
if( pDestRect->left == 0 )
|
|
743 |
{
|
|
744 |
cropscaleLeft = ((gint)(pSrcRect->width -pDestRect->width)/2);
|
|
745 |
cropscaleRight = ((gint)(pSrcRect->width -pDestRect->width)/2);
|
|
746 |
}
|
|
747 |
else
|
|
748 |
{
|
|
749 |
cropscaleLeft = (pDestRect->left/2);
|
|
750 |
cropscaleRight = (pDestRect->left/2);
|
|
751 |
}
|
|
752 |
}
|
|
753 |
else if( pDestRect->width > pSrcRect->width )
|
|
754 |
{
|
|
755 |
if( pDestRect->left == 0 )
|
|
756 |
{
|
|
757 |
cropscaleLeft =-((gint)(pDestRect->width -pSrcRect->width)/2);
|
|
758 |
cropscaleRight =-((gint)(pDestRect->width -pSrcRect->width)/2);
|
|
759 |
}
|
|
760 |
else
|
|
761 |
{
|
|
762 |
cropscaleLeft = -(pDestRect->left/2);
|
|
763 |
cropscaleRight = -(pDestRect->left/2);
|
|
764 |
}
|
|
765 |
}
|
|
766 |
else if( pDestRect->width == pSrcRect->width )
|
|
767 |
{
|
|
768 |
}
|
|
769 |
else
|
|
770 |
{
|
|
771 |
}
|
|
772 |
}
|
|
773 |
else
|
|
774 |
{
|
|
775 |
DEBUG_ERR("Invalid rectangle values in source rectangles");
|
|
776 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit, Exit with invalid source rectangle values");
|
|
777 |
return XA_RESULT_PARAMETER_INVALID;
|
|
778 |
}
|
|
779 |
if( pDestRect->width != pSrcRect->width && pDestRect->height != pSrcRect->height )
|
|
780 |
{
|
|
781 |
DEBUG_INFO_A4("Crop values - cropscaleLeft:%d "
|
|
782 |
",cropscaleTop:%d,"
|
|
783 |
"cropscaleRight:%d,"
|
|
784 |
"cropscaleBottom:%d",
|
|
785 |
cropscaleLeft,
|
|
786 |
cropscaleTop,
|
|
787 |
cropscaleRight,
|
|
788 |
cropscaleBottom);
|
|
789 |
g_object_set (G_OBJECT (boxElement), "bottom",cropscaleBottom , NULL);
|
|
790 |
g_object_set (G_OBJECT (boxElement), "right", cropscaleRight, NULL);
|
|
791 |
g_object_set (G_OBJECT (boxElement), "left", cropscaleLeft, NULL);
|
|
792 |
g_object_set (G_OBJECT (boxElement), "top",cropscaleTop, NULL);
|
|
793 |
}
|
|
794 |
}
|
|
795 |
break;
|
|
796 |
}
|
|
797 |
default:
|
|
798 |
DEBUG_INFO("no scale options!");
|
|
799 |
break;
|
|
800 |
}
|
|
801 |
}
|
|
802 |
|
|
803 |
|
|
804 |
/*TODO The Code below does nothing. just set the variable which are never used.
|
|
805 |
* commenting the code below. */
|
|
806 |
/* if(pDestRect && boxElement)
|
|
807 |
{
|
|
808 |
is background color black
|
|
809 |
if((backgroundColor >> 8) & BLACK_BG_COLOR_MASK)
|
|
810 |
{
|
|
811 |
videoBackgroundColor = 0;
|
|
812 |
}
|
|
813 |
is background color green
|
|
814 |
else if((backgroundColor >> 8) & GREEN_BG_COLOR_MASK)
|
|
815 |
{
|
|
816 |
videoBackgroundColor = 1;
|
|
817 |
}
|
|
818 |
is background color blue
|
|
819 |
else if((backgroundColor >> 8) & BLUE_BG_COLOR_MASK)
|
|
820 |
{
|
|
821 |
videoBackgroundColor = 2;
|
|
822 |
}
|
|
823 |
else
|
|
824 |
{
|
|
825 |
by default black
|
|
826 |
videoBackgroundColor = 0;
|
|
827 |
}
|
|
828 |
|
|
829 |
check alpha value. Gst support values 0 to 1 and XA 0 to 255
|
|
830 |
{
|
|
831 |
XAuint32 tempColor = 0;
|
|
832 |
tempColor = backgroundColor & ALPHA_VALUE_MASK;
|
|
833 |
|
|
834 |
alphaValue = (gdouble)(tempColor/ALPHA_VALUE_MASK);
|
|
835 |
}
|
|
836 |
|
|
837 |
|
|
838 |
}*/
|
|
839 |
|
|
840 |
if( dataSrc )
|
|
841 |
{
|
|
842 |
XAMediaType mediaType = XA_MEDIATYPE_UNKNOWN;
|
|
843 |
ret = XACommon_CheckDataSource(dataSrc, &mediaType);
|
|
844 |
if( ret == XA_RESULT_SUCCESS && mediaType == XA_MEDIATYPE_IMAGE )
|
|
845 |
{
|
|
846 |
gstOrigState = GST_STATE(bCtx->bin);
|
|
847 |
|
|
848 |
DEBUG_INFO_A1("Sending change state request to state %d", GST_STATE_READY);
|
|
849 |
gstRet = gst_element_set_state( GST_ELEMENT(bCtx->bin), GST_STATE_READY);
|
|
850 |
gstTmpState = GST_STATE(bCtx->bin);
|
|
851 |
|
|
852 |
if(gstRet == GST_STATE_CHANGE_SUCCESS && gstTmpState == GST_STATE_READY)
|
|
853 |
{
|
|
854 |
DEBUG_INFO_A1("Sending change state request to state %d", gstOrigState);
|
|
855 |
gstRet = gst_element_set_state( GST_ELEMENT(bCtx->bin), gstOrigState);
|
|
856 |
}
|
|
857 |
}
|
|
858 |
}
|
|
859 |
|
|
860 |
DEBUG_API("<-XAVideoPostProcessingItfAdapt_Commit");
|
|
861 |
return ret;
|
|
862 |
}
|
|
863 |
|