khronosfws/openmax_al/src/gst_adaptation/xamediaplayeradaptctx.c
changeset 33 5e8b14bae8c3
parent 28 ebf79c79991a
child 36 73253677b50a
equal deleted inserted replaced
28:ebf79c79991a 33:5e8b14bae8c3
     1 /*
       
     2 * Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
       
     3 * All rights reserved.
       
     4 * This component and the accompanying materials are made available
       
     5 * under the terms of "Eclipse Public License v1.0"
       
     6 * which accompanies this distribution, and is available
       
     7 * at the URL "http://www.eclipse.org/legal/epl-v10.html".
       
     8 *
       
     9 * Initial Contributors:
       
    10 * Nokia Corporation - initial contribution.
       
    11 *
       
    12 * Contributors:
       
    13 *
       
    14 * Description: 
       
    15 *
       
    16 */
       
    17 
       
    18 #include <assert.h>
       
    19 #include <stdlib.h>
       
    20 #include <gst/gst.h>
       
    21 #include <gst/app/gstappsrc.h>
       
    22 #include "xamediaplayeradaptctx.h"
       
    23 #include "xamediaplayeradaptctxmmf.h"
       
    24 #include "xaadaptationgst.h"
       
    25 #include "xaobjectitf.h"
       
    26 #include "xacameradevice.h"
       
    27 #include "xaoutputmix.h"
       
    28 #include "xametadataadaptation.h"
       
    29 /*#include "xangavideosink.h"*/
       
    30 #include "xacameraadaptctx.h"
       
    31 #include "xaoutputmixadaptctx.h"
       
    32 
       
    33 /* forward declarations */
       
    34 XAresult XAMediaPlayerAdapt_CreatePipeline( XAMediaPlayerAdaptationCtx* ctx );
       
    35 
       
    36 void* ngaVideoSinkPtr = NULL;
       
    37 
       
    38 
       
    39 extern XAboolean cameraRealized;
       
    40 extern XACameraAdaptationCtx_* cameraCtx;
       
    41 
       
    42 /*
       
    43  * static void XAMediaPlayerAdapt_NewPadCb (GstElement *element, GstPad *pad,  gpointer data)
       
    44  * Listen to codec bin dynamic pads
       
    45  */
       
    46 static void XAMediaPlayerAdapt_NewPadCb (GstElement *element, GstPad *pad,  gpointer data)
       
    47 {
       
    48   XAMediaPlayerAdaptationCtx* mCtx = (XAMediaPlayerAdaptationCtx*)data;
       
    49   gchar *name = gst_pad_get_name (pad);
       
    50   DEBUG_API_A1("->XAMediaPlayerAdapt_NewPadCb: A new pad %s was created", name);
       
    51   /*try first newly created pad to video pipe*/
       
    52   if( mCtx->videoppbin && !(gst_pad_is_linked(gst_element_get_static_pad(mCtx->videoppbin, "videopp_sink"))) )
       
    53   {
       
    54       if(gst_element_link_pads (mCtx->codecbin, name, mCtx->videoppbin, "videopp_sink"))
       
    55       {
       
    56           DEBUG_INFO_A1("Pads linked, codecbin:%s to videopp:sink", name);
       
    57           g_free (name);
       
    58           DEBUG_API("<-XAMediaPlayerAdapt_NewPadCb");
       
    59           return;
       
    60       }
       
    61   }
       
    62   /*..and then to audio pipe*/
       
    63   if( mCtx->audioppbin && !(gst_pad_is_linked(gst_element_get_static_pad(mCtx->audioppbin, "sink"))) )
       
    64   {
       
    65       if(gst_element_link_pads (mCtx->codecbin, name, mCtx->audioppbin, "sink"))
       
    66       {
       
    67           DEBUG_INFO_A1("Pads linked, codecbin:%s to audiopp:sink", name);
       
    68           g_free (name);
       
    69           DEBUG_API("<-XAMediaPlayerAdapt_NewPadCb");
       
    70           return;
       
    71       }
       
    72   }
       
    73 
       
    74   g_free (name);
       
    75   DEBUG_INFO("Warning: Could not find anything to link to new pad.");
       
    76   DEBUG_API("<-XAMediaPlayerAdapt_NewPadCb");
       
    77 }
       
    78 
       
    79 /*
       
    80  * void  push_data_for_prerolling (GstElement * pipeline, GstBuffer *buffer, XAMediaPlayerAdaptationCtx* ctx)
       
    81  * Called when "push-buffer" signal is emitted
       
    82  */
       
    83 void  push_data_for_prerolling (GstElement * pipeline, GstBuffer *buffer, XAMediaPlayerAdaptationCtx* ctx)
       
    84 {
       
    85 	DEBUG_API("->push_data_for_prerolling");
       
    86 	gst_app_src_push_buffer( GST_APP_SRC(ctx->source), GST_BUFFER(buffer) );
       
    87 	/*GstPad* prerollPad = NULL;
       
    88 	prerollPad = gst_element_get_static_pad(GST_ELEMENT(ctx->source),"src");
       
    89 	gst_pad_push (prerollPad, buffer);
       
    90 	gst_element_send_event(GST_ELEMENT(ctx->source),gst_event_new_flush_start());
       
    91 	gst_element_send_event(GST_ELEMENT(ctx->source),gst_event_new_flush_stop());*/
       
    92 	DEBUG_API("<-push_data_for_prerolling");
       
    93 }
       
    94 
       
    95 
       
    96 /*
       
    97  * gboolean XAMediaPlayerAdapt_GstBusCb( GstBus *bus, GstMessage *message, gpointer data )
       
    98  * MediaPlayer Gst-bus message handler (Callback)
       
    99  */
       
   100 gboolean XAMediaPlayerAdapt_GstBusCb( GstBus *bus, GstMessage *message, gpointer data )
       
   101 {
       
   102     XAAdaptationGstCtx* bCtx = (XAAdaptationGstCtx*)data;
       
   103     /* only listen to bin messages */
       
   104     if(GST_MESSAGE_SRC(message)==(GstObject*)(bCtx->bin))
       
   105     {
       
   106         XAMediaPlayerAdaptationCtx* mCtx = (XAMediaPlayerAdaptationCtx*)data;
       
   107         DEBUG_API_A2("->XAMediaPlayerAdapt_GstBusCb:\"%s\" from object \"%s\"",
       
   108                         GST_MESSAGE_TYPE_NAME(message), GST_OBJECT_NAME(GST_MESSAGE_SRC(message)));
       
   109 
       
   110         switch( GST_MESSAGE_TYPE(message))
       
   111         {
       
   112             case GST_MESSAGE_EOS:
       
   113             {
       
   114                 if( mCtx && mCtx->loopingenabled && mCtx->loopend == GST_CLOCK_TIME_NONE)
       
   115                 {
       
   116                     DEBUG_INFO_A2("Restart loop from %"GST_TIME_FORMAT" to %"GST_TIME_FORMAT,
       
   117                                   GST_TIME_ARGS(mCtx->loopstart), GST_TIME_ARGS(mCtx->loopend));
       
   118                     gst_element_seek( bCtx->bin, mCtx->playrate, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE),
       
   119                                       GST_SEEK_TYPE_SET, mCtx->loopstart,
       
   120                                       GST_SEEK_TYPE_NONE, (gint64)GST_CLOCK_TIME_NONE );
       
   121                     gst_element_get_state(bCtx->bin,NULL,NULL,XA_ADAPT_ASYNC_TIMEOUT_SHORT_NSEC);
       
   122                     mCtx->lastpos = mCtx->loopstart;
       
   123                     if( mCtx && mCtx->trackpositionenabled )
       
   124                     {
       
   125                         XAmillisecond posMsec = GST_TIME_AS_MSECONDS(mCtx->lastpos);/*Warning ok due to used API specification*/
       
   126                         XAAdaptEvent event = {XA_PLAYITFEVENTS, XA_ADAPT_POSITION_UPDATE_EVT, 1, NULL};
       
   127                         event.data = &posMsec;
       
   128                         XAAdaptationBase_SendAdaptEvents(&bCtx->baseObj, &event );
       
   129                     }
       
   130                 }
       
   131                 else
       
   132                 {
       
   133                 /* stop position tracking */
       
   134                 if(mCtx->runpositiontimer > 0)
       
   135                 {
       
   136                     g_source_remove(mCtx->runpositiontimer);
       
   137                     mCtx->runpositiontimer=0;
       
   138                 }
       
   139 
       
   140                 /* complete any ongoing client async operations */
       
   141                 XAAdaptationGst_CompleteAsyncWait(bCtx);
       
   142 
       
   143                 /* send needed events */
       
   144                 {
       
   145 					XAMediaType mediatype;
       
   146 /*
       
   147 					if( mCtx->baseObj.pipeSrcThrCtx.pipe )
       
   148 					{
       
   149 						XACommon_CheckDataSource((XADataSource*)mCtx->xaAudioSink, &mediatype);
       
   150 					}
       
   151 					else
       
   152 */
       
   153 					{
       
   154 						XACommon_CheckDataSource(mCtx->xaSource, &mediatype);
       
   155 					}
       
   156 					if(mediatype!=XA_MEDIATYPE_IMAGE)
       
   157 					{
       
   158 						XAAdaptEvent event = { XA_PLAYITFEVENTS, XA_PLAYEVENT_HEADATEND, 0, NULL };
       
   159 						XAAdaptationBase_SendAdaptEvents(&bCtx->baseObj, &event );
       
   160 					}
       
   161                 }
       
   162                 if(mCtx->positionCb)
       
   163                 {
       
   164                     mCtx->positionCb(bCtx);
       
   165                 }
       
   166                 bCtx->binWantedState = GST_STATE_PAUSED;
       
   167                 }
       
   168                 break;
       
   169             }
       
   170 
       
   171             case GST_MESSAGE_STATE_CHANGED:
       
   172             {
       
   173                 GstState oldstate, newstate, pendingstate, gsttargetstate;
       
   174                 gst_message_parse_state_changed(message, &oldstate, &newstate, &pendingstate);
       
   175                 gsttargetstate = GST_STATE_TARGET(bCtx->bin);
       
   176                 DEBUG_INFO_A4("old %s -> new %s ( pending %s, gsttarget %s )",
       
   177                                gst_element_state_get_name(oldstate),
       
   178                                gst_element_state_get_name(newstate),
       
   179                                gst_element_state_get_name(pendingstate),
       
   180                                gst_element_state_get_name(gsttargetstate) );
       
   181                 if(gsttargetstate!=bCtx->binWantedState)
       
   182                 {
       
   183                     DEBUG_ERR_A1("WARNING: Gst target is not wanted target [%s]!!!",
       
   184                                     gst_element_state_get_name(bCtx->binWantedState));
       
   185                 }
       
   186                 /* print out some more info */
       
   187                 if( pendingstate == GST_STATE_VOID_PENDING )
       
   188                 {
       
   189                     if( newstate != bCtx->binWantedState )
       
   190                     {
       
   191                         DEBUG_INFO_A2("Gst in intermediate state transition (curr %s, target %s)",
       
   192                                         gst_element_state_get_name(newstate),
       
   193                                         gst_element_state_get_name(bCtx->binWantedState));
       
   194                        break; // TL: add to avoid extra event sent below in this case...                
       
   195                     }
       
   196                     else
       
   197                     {
       
   198                         DEBUG_INFO_A1("Gst in wanted target state (%s)",
       
   199                                         gst_element_state_get_name(newstate));
       
   200                     }
       
   201                 }
       
   202                 if( oldstate!=GST_STATE_PLAYING && newstate==GST_STATE_PLAYING )
       
   203                 {
       
   204                     /* send needed events */
       
   205                     XAAdaptEvent event = {XA_PLAYITFEVENTS, XA_PLAYEVENT_HEADMOVING, 0, NULL };
       
   206                     XAAdaptationBase_SendAdaptEvents(&bCtx->baseObj, &event );
       
   207                     /* enable position tracking if needed */
       
   208                     XAMediaPlayerAdapt_UpdatePositionCbTimer(mCtx);
       
   209                 }
       
   210                 if ( oldstate== GST_STATE_READY && newstate==GST_STATE_PAUSED)
       
   211                 {
       
   212 /*                    if ( mCtx->baseObj.pipeSinkThrCtx.dataHandle )
       
   213                     {
       
   214                         mCtx->baseObj.pipeSrcThrCtx.state = CPStateInitialized;
       
   215                     }*/
       
   216                 }
       
   217                 break;
       
   218             }
       
   219 
       
   220             case GST_MESSAGE_ASYNC_DONE:
       
   221             {
       
   222                 /* some async sequence ended */
       
   223                 XAAdaptationGst_CompleteAsyncWait(bCtx);
       
   224                 break;
       
   225             }
       
   226 
       
   227             case GST_MESSAGE_ERROR:
       
   228             {
       
   229                 GError* error;
       
   230                 gchar* debug;
       
   231                 gst_message_parse_error(message, &error, &debug);
       
   232                 DEBUG_ERR_A1("Gst reports error \"%s\"", debug);
       
   233                 /* stop waiting any ongoing async operations */
       
   234                 XAAdaptationGst_CompleteAsyncWait(bCtx);
       
   235                 break;
       
   236             }
       
   237             case GST_MESSAGE_BUFFERING:
       
   238             {
       
   239                 gint percent;
       
   240                 gst_message_parse_buffering(message, &percent);
       
   241                 DEBUG_INFO_A1("Gst message buffering %d", percent);
       
   242                 mCtx->buffering = percent;
       
   243                 {
       
   244                     XAAdaptEvent event = {XA_PREFETCHITFEVENTS, XA_ADAPT_BUFFERING, 1, NULL };
       
   245                     event.data = &mCtx->buffering;
       
   246                 XAAdaptationBase_SendAdaptEvents(&bCtx->baseObj, &event );
       
   247                 }
       
   248                 break;
       
   249             }
       
   250             case GST_MESSAGE_ELEMENT:
       
   251             {
       
   252                 DEBUG_INFO("GST_MESSAGE_ELEMENT");
       
   253                 if ((gst_structure_has_name(message->structure, "graphics-surface-created")) || 
       
   254                     (gst_structure_has_name(message->structure, "graphics-surface-updated")))
       
   255                 {
       
   256                      DEBUG_INFO("graphics-surface-created message recived");
       
   257                 }
       
   258                 break;
       
   259             }
       
   260             default:
       
   261                 break;
       
   262         }
       
   263     }
       
   264     else //if (GST_MESSAGE_SRC(message)==(GstObject*)(bCtx->videosink))
       
   265     {
       
   266         switch( GST_MESSAGE_TYPE(message))
       
   267         {
       
   268             case GST_MESSAGE_ELEMENT:
       
   269             {
       
   270                 DEBUG_INFO("GST_MESSAGE_ELEMENT");
       
   271                 if ((gst_structure_has_name(message->structure, "graphics-surface-created")) || 
       
   272                     (gst_structure_has_name(message->structure, "graphics-surface-updated")))
       
   273                 {
       
   274                      guint32 surfaceid0, surfaceid1, surfaceid2, surfaceid3;
       
   275                      gint crop_rect_tl_x, crop_rect_tl_y, crop_rect_br_x, crop_rect_br_y;
       
   276                      gint aspect_ratio_num, aspect_ratio_denom;
       
   277                      
       
   278                      GstObject *sink= GST_MESSAGE_SRC(message);
       
   279      
       
   280                      g_object_get(sink, "surfaceid0", &surfaceid0, NULL);
       
   281                      g_object_get(sink, "surfaceid1", &surfaceid1,NULL);
       
   282                      g_object_get(sink, "surfaceid2", &surfaceid2,NULL);
       
   283                      g_object_get(sink, "surfaceid3", &surfaceid3,NULL);
       
   284                      g_object_get(sink, "croprect_tl_x", &crop_rect_tl_x, NULL);
       
   285                      g_object_get(sink, "croprect_tl_y", &crop_rect_tl_y, NULL);
       
   286                      g_object_get(sink, "croprect_br_x", &crop_rect_br_x, NULL);
       
   287                      g_object_get(sink, "croprect_br_y", &crop_rect_br_y, NULL);                
       
   288                      g_object_get(sink, "aspectratio_num", &aspect_ratio_num, NULL);
       
   289                      g_object_get(sink, "aspectratio_denom", &aspect_ratio_denom, NULL);                     
       
   290 /*
       
   291                      surface_created(ngaVideoSinkPtr, surfaceid0,surfaceid1,surfaceid2,surfaceid3,crop_rect_tl_x,
       
   292                      									crop_rect_tl_y,crop_rect_br_x,crop_rect_br_y,aspect_ratio_num,aspect_ratio_denom);                     
       
   293 */
       
   294                 }
       
   295                 break;
       
   296             }
       
   297             default:
       
   298                 break;
       
   299         }   
       
   300     }
       
   301     DEBUG_API("<-XAMediaPlayerAdapt_GstBusCb");    
       
   302     return TRUE;
       
   303 }
       
   304 
       
   305 /*
       
   306  * XAMediaPlayerAdaptationCtx* XAMediaPlayerAdapt_Create()
       
   307  * Allocates memory for Media Player Adaptation Context and makes 1st phase initialization
       
   308  * @param XADataSource *pDataSrc - pointer to OMX-AL data source
       
   309  * @param XADataSource *pBankSrc - pointer to instrument bank structure in Mobile DLS, if NULL default will be used.
       
   310  * @param XADataSink *pAudioSnk - pointer to OMX-AL audio sink definition
       
   311  * @param XADataSink *pImageVideoSnk - pointer to OMX-AL image and video sink definition
       
   312  * @returns XAMediaPlayerAdaptationCtx* - Pointer to created context, NULL if error occurs.
       
   313  */
       
   314 XAAdaptationBaseCtx* XAMediaPlayerAdapt_Create(XADataSource *pDataSrc, XADataSource *pBankSrc,
       
   315                                                       XADataSink *pAudioSnk, XADataSink *pImageVideoSnk,
       
   316                                                       XADataSink *pVibra, XADataSink *pLEDArray)
       
   317 {
       
   318     XAMediaPlayerAdaptationCtx *pSelf = NULL;
       
   319     XAuint32 locType = 0;
       
   320     XADataLocator_IODevice *ioDevice;
       
   321     
       
   322     DEBUG_API("->XAMediaPlayerAdapt_Create");
       
   323     
       
   324     //Create NGA Video Sink class
       
   325 /*    if(pImageVideoSnk)
       
   326     {
       
   327        ngaVideoSinkPtr = nga_video_sink_init();
       
   328     }*/
       
   329 
       
   330     pSelf = calloc(1, sizeof(XAMediaPlayerAdaptationCtx));
       
   331     if ( pSelf)
       
   332     {
       
   333         if( XAAdaptationBase_Init(&(pSelf->baseObj.baseObj),XAMediaPlayerAdaptation)
       
   334             != XA_RESULT_SUCCESS )
       
   335         {
       
   336             DEBUG_ERR("Failed to init base context!!!");
       
   337             free(pSelf);
       
   338             pSelf = NULL;
       
   339             return NULL;
       
   340         }
       
   341         else
       
   342         {
       
   343             pSelf->xaSource = pDataSrc;
       
   344             pSelf->xaBankSrc = pBankSrc;
       
   345             pSelf->xaAudioSink = pAudioSnk;
       
   346             pSelf->xaVideoSink = pImageVideoSnk;
       
   347             pSelf->xaLEDArray = pLEDArray;
       
   348             pSelf->xaVibra = pVibra;
       
   349             pSelf->loopstart = 0;
       
   350             pSelf->loopend = (gint64)GST_CLOCK_TIME_NONE;
       
   351             pSelf->playrate = 1.0;
       
   352             pSelf->rateprops = (XA_RATEPROP_SMOOTHVIDEO | XA_RATEPROP_SILENTAUDIO);
       
   353             pSelf->curMirror = XA_VIDEOMIRROR_NONE;
       
   354             pSelf->curRotation = 0;
       
   355             pSelf->isobjsrc = XA_BOOLEAN_FALSE;
       
   356             pSelf->cameraSinkSynced = XA_BOOLEAN_FALSE;
       
   357 /*            if(pImageVideoSnk && ngaVideoSinkPtr)
       
   358             {
       
   359                setup_native_display(ngaVideoSinkPtr, pImageVideoSnk);
       
   360             }*/
       
   361         }
       
   362 
       
   363         if ( pDataSrc )
       
   364 		{
       
   365 			locType = *((XAuint32*)(pDataSrc->pLocator));
       
   366 			if ( locType == XA_DATALOCATOR_IODEVICE  )
       
   367 			{
       
   368 				ioDevice = (XADataLocator_IODevice*)(pDataSrc->pLocator);
       
   369 				if ( ioDevice->deviceType == XA_IODEVICE_CAMERA && !cameraRealized )
       
   370 				{
       
   371 					DEBUG_ERR("Preconditions violated - Camera object not realized");
       
   372 					XAAdaptationBase_Free(&pSelf->baseObj.baseObj);
       
   373 					free(pSelf);
       
   374 					pSelf = NULL;
       
   375 				}
       
   376 			}
       
   377 		}
       
   378     }
       
   379 
       
   380     DEBUG_API("<-XAMediaPlayerAdapt_Create");
       
   381     return (XAAdaptationBaseCtx*)(&pSelf->baseObj);
       
   382 }
       
   383 
       
   384 
       
   385 
       
   386 /*
       
   387  * XAresult XAMediaPlayerAdapt_PostInit()
       
   388  * 2nd phase initialization of Media Player Adaptation Context
       
   389  * @param XAMediaPlayerAdaptationCtx* ctx - pointer to Media Player adaptation context
       
   390  * @return XAresult - Success value
       
   391  */
       
   392 XAresult XAMediaPlayerAdapt_PostInit( XAAdaptationGstCtx* bCtx )
       
   393 {
       
   394     XAresult ret = XA_RESULT_SUCCESS;
       
   395     GstStateChangeReturn gret;
       
   396 
       
   397 	XAMediaPlayerAdaptationCtx* ctx = NULL;
       
   398 	GstElement *videotest=NULL;
       
   399 
       
   400     DEBUG_API("->XAMediaPlayerAdapt_PostInit");
       
   401     if(bCtx == NULL || bCtx->baseObj.ctxId != XAMediaPlayerAdaptation )
       
   402     {
       
   403         DEBUG_ERR("Invalid parameter!!");
       
   404         DEBUG_API("<-XAMediaPlayerAdapt_PostInit");
       
   405         return XA_RESULT_PARAMETER_INVALID;
       
   406     }
       
   407     ctx = (XAMediaPlayerAdaptationCtx*)bCtx;
       
   408     assert(ctx);
       
   409     ret = XAAdaptationBase_PostInit( &bCtx->baseObj );
       
   410     if( ret!=XA_RESULT_SUCCESS )
       
   411     {
       
   412         DEBUG_ERR("Base context postinit failed!!");
       
   413         return ret;
       
   414     }
       
   415 
       
   416     /* top level bin for media player */
       
   417     ctx->baseObj.bin = gst_pipeline_new("media_player");
       
   418     /* Create Gst bus listener. */
       
   419     ret = XAAdaptationGst_InitGstListener(bCtx);
       
   420     if( ret!=XA_RESULT_SUCCESS )
       
   421     {
       
   422         DEBUG_ERR("Bus listener creation failed!!");
       
   423         return ret;
       
   424     }
       
   425     /* Add Media Player specific handler */
       
   426     if(ctx->baseObj.bus)
       
   427     {
       
   428         ctx->baseObj.busCb = XAMediaPlayerAdapt_GstBusCb;
       
   429         gst_bus_add_signal_watch( ctx->baseObj.bus );
       
   430         gst_bus_enable_sync_message_emission( ctx->baseObj.bus );
       
   431         g_signal_connect(ctx->baseObj.bus, "message::eos", G_CALLBACK(bCtx->busCb), ctx );
       
   432         g_signal_connect(ctx->baseObj.bus, "message::error", G_CALLBACK(bCtx->busCb), ctx );
       
   433         g_signal_connect(ctx->baseObj.bus, "message::warning", G_CALLBACK(bCtx->busCb), ctx );
       
   434         g_signal_connect(ctx->baseObj.bus, "message::state-changed", G_CALLBACK(bCtx->busCb), ctx );
       
   435         g_signal_connect(ctx->baseObj.bus, "message::segment-done", G_CALLBACK(bCtx->busCb), ctx );
       
   436         g_signal_connect(ctx->baseObj.bus, "message::async-done", G_CALLBACK(bCtx->busCb), ctx );
       
   437         g_signal_connect(ctx->baseObj.bus, "message::element", G_CALLBACK(bCtx->busCb), ctx );
       
   438     }
       
   439     else
       
   440     {
       
   441         DEBUG_ERR("Failed to create message bus");
       
   442         return XA_RESULT_INTERNAL_ERROR;
       
   443     }
       
   444 
       
   445     XAMetadataAdapt_PreInit(bCtx);
       
   446 
       
   447     /* create pipeline */
       
   448     ret = XAMediaPlayerAdapt_CreatePipeline(ctx);
       
   449     if ( ret != XA_RESULT_SUCCESS )
       
   450     {
       
   451         DEBUG_ERR("Failed to create Media Player pipeline");
       
   452         return ret;
       
   453     }
       
   454 
       
   455 #ifdef XA_IMPL_MEASURE_GST_DELAY
       
   456     ctx->baseObj.startTime = clock();
       
   457 #endif /* XA_IMPL_MEASURE_GST_DELAY */
       
   458     /* roll up bin */
       
   459     ctx->baseObj.binWantedState = GST_STATE_PAUSED;
       
   460 
       
   461     XAAdaptationGst_PrepareAsyncWait(bCtx);
       
   462     gret = gst_element_set_state( GST_ELEMENT(ctx->baseObj.bin), bCtx->binWantedState);
       
   463     if( gret == GST_STATE_CHANGE_ASYNC )
       
   464     {
       
   465         DEBUG_INFO("Wait for preroll");
       
   466         XAAdaptationGst_StartAsyncWait(bCtx);
       
   467         DEBUG_INFO("Preroll ready");
       
   468     }
       
   469     else if( gret == GST_STATE_CHANGE_FAILURE )
       
   470     {
       
   471         DEBUG_ERR("Preroll FAILED");
       
   472         ret = XA_RESULT_INTERNAL_ERROR;
       
   473     }
       
   474 
       
   475     bCtx->waitingasyncop = XA_BOOLEAN_FALSE;
       
   476 
       
   477     gret = gst_element_get_state( GST_ELEMENT(bCtx->bin), NULL, NULL, XA_ADAPT_ASYNC_TIMEOUT_SHORT_NSEC);
       
   478     if(GST_STATE(bCtx->bin)<GST_STATE_PAUSED)
       
   479     {
       
   480         DEBUG_INFO("Warning! Preroll not ready");
       
   481         if( ctx->audioppbin && !(gst_pad_is_linked(gst_element_get_static_pad(ctx->audioppbin, "sink"))) )
       
   482         {/*could not find suitable pad for audiopipeline - remove it*/
       
   483             DEBUG_INFO("Warning! No suitable decodebin pad for audio pipeline!");
       
   484             gst_element_set_state( GST_ELEMENT(ctx->audioppbin), GST_STATE_NULL);
       
   485             gst_bin_remove(GST_BIN(bCtx->bin), ctx->audioppbin);
       
   486             gst_element_set_state( GST_ELEMENT(ctx->audiosink), GST_STATE_NULL);
       
   487             gst_bin_remove(GST_BIN(bCtx->bin), ctx->audiosink);
       
   488         }
       
   489         else if( ctx->videoppbin && !(gst_pad_is_linked(gst_element_get_static_pad(ctx->videoppbin, "videopp_sink"))) )
       
   490         {/*could not find suitable pad for videopipeline - remove it*/
       
   491             DEBUG_INFO("Warning! No suitable decodebin pad for video pipeline!");
       
   492             gst_element_set_state( GST_ELEMENT(ctx->videoppbin), GST_STATE_NULL);
       
   493             gst_bin_remove(GST_BIN(bCtx->bin), ctx->videoppbin);
       
   494             gst_element_set_state( GST_ELEMENT(ctx->videosink), GST_STATE_NULL);
       
   495             gst_bin_remove(GST_BIN(bCtx->bin), ctx->videosink);
       
   496         }
       
   497 		gst_element_set_state( GST_ELEMENT(bCtx->bin), bCtx->binWantedState);
       
   498 		gst_element_get_state( GST_ELEMENT(bCtx->bin), NULL, NULL, XA_ADAPT_ASYNC_TIMEOUT_SHORT_NSEC);
       
   499 		if(GST_STATE(bCtx->bin)==GST_STATE_PAUSED)
       
   500 		{
       
   501 			DEBUG_INFO("Retry preroll successful!")
       
   502 			ret = XA_RESULT_SUCCESS;
       
   503 		}
       
   504     }
       
   505     else
       
   506     {
       
   507         DEBUG_INFO("Preroll ready");
       
   508     }
       
   509 
       
   510 #ifdef XA_IMPL_MEASURE_GST_DELAY
       
   511     bCtx->endTime = clock();
       
   512     double diff = bCtx->endTime - bCtx->startTime ;
       
   513     diff = diff / CLOCKS_PER_SEC;
       
   514     DEBUG_API_A1( "Starting up bin took %.4lf secs",diff);
       
   515 #endif /* XA_IMPL_MEASURE_GST_DELAY */
       
   516     videotest = gst_bin_get_by_name(GST_BIN(bCtx->bin), "videotest");
       
   517     if ( videotest && !ctx->isobjsrc )
       
   518     {
       
   519     	gst_element_set_state( GST_ELEMENT(videotest),GST_STATE_PLAYING);
       
   520     }
       
   521 
       
   522     XAMetadataAdapt_PostInit(bCtx);
       
   523 
       
   524     if ( videotest )
       
   525     {
       
   526         gst_object_unref(videotest);
       
   527     }
       
   528 
       
   529     DEBUG_API("<-XAMediaPlayerAdapt_PostInit");
       
   530     return ret;
       
   531 }
       
   532 
       
   533 /*
       
   534  * void XAMediaPlayerAdapt_Destroy( XAMediaPlayerAdaptationCtx* ctx )
       
   535  * Destroys Media Player Adaptation Context
       
   536  * @param ctx - Media Player Adaptation context to be destroyed
       
   537  */
       
   538 void XAMediaPlayerAdapt_Destroy( XAAdaptationGstCtx* bCtx )
       
   539 {
       
   540     XAMediaPlayerAdaptationCtx* ctx = NULL;
       
   541 
       
   542     DEBUG_API("->XAMediaPlayerAdapt_Destroy");
       
   543     if(bCtx == NULL || bCtx->baseObj.ctxId != XAMediaPlayerAdaptation )
       
   544     {
       
   545         DEBUG_ERR("Invalid parameter!!");
       
   546         DEBUG_API("<-XAMediaPlayerAdapt_Destroy");
       
   547         return;
       
   548     }
       
   549     ctx = (XAMediaPlayerAdaptationCtx*)bCtx;
       
   550 
       
   551     if( ctx->isobjsrc )
       
   552     {   /* external source, unlink and remove now */
       
   553         /*gst_object_unparent( GST_OBJECT(ctx->source) );*/
       
   554         gst_element_unlink( ctx->source, ctx->codecbin );
       
   555         /*gst_bin_remove( GST_BIN(bCtx->bin), ctx->source );*/
       
   556         GST_OBJECT_FLAG_SET(GST_OBJECT(ctx->source),GST_OBJECT_FLOATING);
       
   557     }
       
   558 #if 0
       
   559     if ( ctx->xaSource )
       
   560     {
       
   561     	XAuint32 locType = *(XAuint32*)(ctx->xaSource->pLocator);
       
   562     	switch (locType )
       
   563     	{
       
   564 		case XA_DATALOCATOR_IODEVICE:
       
   565 		{
       
   566 			XADataLocator_IODevice* ioDevice = (XADataLocator_IODevice*)(ctx->xaSource->pLocator);
       
   567 			if ( ioDevice->deviceType == XA_IODEVICE_RADIO )
       
   568 			{
       
   569 				gst_object_unparent( GST_OBJECT(ctx->source) );
       
   570 				gst_element_unlink( ctx->source, ctx->codecbin );
       
   571 				gst_bin_remove( GST_BIN(bCtx->bin), ctx->source );
       
   572 				GST_OBJECT_FLAG_SET(GST_OBJECT(ctx->source),GST_OBJECT_FLOATING);
       
   573 			}
       
   574 		}
       
   575 		default:
       
   576 			break;
       
   577     	}
       
   578     }
       
   579 #endif
       
   580 
       
   581     if( ctx->isobjasink && ctx->xaAudioSink && ctx->xaAudioSink->pLocator )
       
   582     {
       
   583         XAuint32 locType = *(XAuint32*)(ctx->xaAudioSink->pLocator);
       
   584         switch ( locType )
       
   585         {
       
   586             case XA_DATALOCATOR_OUTPUTMIX:
       
   587             {
       
   588                 XADataLocator_OutputMix* omix = (XADataLocator_OutputMix*)(ctx->xaAudioSink->pLocator);
       
   589                 XAOMixImpl* omixDevice = (XAOMixImpl*)(*omix->outputMix);
       
   590                 if(omixDevice)
       
   591                 {
       
   592                     XAOutputMixAdapt_DisconnectObject((XAAdaptationGstCtx*)omixDevice->adaptationCtx, bCtx);
       
   593                 }
       
   594                 break;
       
   595             }
       
   596             default:
       
   597                 /* Vibra and LED need no handling */
       
   598                 break;
       
   599         }
       
   600 
       
   601     }
       
   602 
       
   603     if( bCtx->bus )
       
   604     {
       
   605         gst_bus_remove_signal_watch( bCtx->bus );
       
   606         gst_bus_disable_sync_message_emission ( bCtx->bus );
       
   607     }
       
   608     XAAdaptationGst_CancelAsyncWait(bCtx);
       
   609 
       
   610     if( ctx->runpositiontimer )
       
   611     {
       
   612         g_source_remove(ctx->runpositiontimer);
       
   613     }
       
   614     XAMetadataAdapt_FreeVars(ctx->metadatavars);
       
   615     XAAdaptationBase_Free( &bCtx->baseObj );
       
   616     free(ctx);
       
   617     ctx = NULL;
       
   618 
       
   619     DEBUG_API("<-XAMediaPlayerAdapt_Destroy");
       
   620 }
       
   621 
       
   622 
       
   623 /*
       
   624  * void XAMediaPlayerAdapt_CreatePipeline( XAMediaPlayerAdaptationCtx* ctx );
       
   625  */
       
   626 XAresult XAMediaPlayerAdapt_CreatePipeline( XAMediaPlayerAdaptationCtx* ctx )
       
   627 {
       
   628     XAresult ret = XA_RESULT_SUCCESS;
       
   629     XAboolean delayedlink = XA_BOOLEAN_FALSE;
       
   630     XAboolean isPCM = XA_BOOLEAN_FALSE;
       
   631     XAboolean isRawImage = XA_BOOLEAN_FALSE;
       
   632     XAMediaType mediatype;
       
   633     XAuint32 locType = 0;
       
   634     GstCaps* encSrcCaps = NULL;
       
   635     DEBUG_API("->XAMediaPlayerAdapt_CreatePipeline");
       
   636 
       
   637     /* create and add data source */
       
   638     XACommon_CheckDataSource(ctx->xaSource, &mediatype);
       
   639     ctx->source = XAAdaptationGst_CreateGstSource( ctx->xaSource, "datasrc", &(ctx->isobjsrc), &isPCM, &isRawImage);
       
   640     if( !(ctx->source) )
       
   641     {
       
   642         DEBUG_ERR("Could not create data source!!!");
       
   643         return XA_RESULT_INTERNAL_ERROR;
       
   644     }
       
   645     else
       
   646     {
       
   647         if(mediatype != XA_MEDIATYPE_AUDIO)
       
   648         {
       
   649           //temporary work around for video
       
   650     	  encSrcCaps = gst_caps_new_simple ("video/h263-2000",
       
   651                 "framerate", GST_TYPE_FRACTION, 25, 1,
       
   652                 "pixel-aspect-ratio", GST_TYPE_FRACTION, 16, 9,
       
   653                 "width", G_TYPE_INT, 176,
       
   654                 "height", G_TYPE_INT, 144,
       
   655                 NULL);
       
   656           g_object_set(G_OBJECT(ctx->source), "caps", encSrcCaps, NULL);
       
   657         }
       
   658 
       
   659         //boolRetVal = gst_bin_add(GST_BIN(pipeline), appsrc);
       
   660     	
       
   661     	
       
   662     }
       
   663 
       
   664     if ( !ctx->isobjsrc )
       
   665     { /* Add other than camera source to media player bin */
       
   666     	DEBUG_INFO("No camera source");
       
   667     	gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->source);
       
   668     }
       
   669     else
       
   670     {
       
   671        GstCaps* encSrcCaps;
       
   672        encSrcCaps = gst_caps_new_simple("video/x-raw-yuv",
       
   673                    "format", GST_TYPE_FOURCC,GST_MAKE_FOURCC('I','4','2','0'),
       
   674                    "framerate", GST_TYPE_FRACTION, 30, 1,
       
   675                    NULL);
       
   676        DEBUG_INFO_A1("new camera encoding filter: %s",gst_caps_to_string(encSrcCaps));
       
   677        g_object_set( G_OBJECT(ctx->source), "filter-caps",encSrcCaps,NULL);
       
   678        gst_caps_unref(encSrcCaps);
       
   679     }
       
   680 
       
   681     /* create and add codec bin */
       
   682     if( !(ctx->isobjsrc || isPCM) )
       
   683     {
       
   684     	DEBUG_INFO("Create decodebin");
       
   685         if(mediatype == XA_MEDIATYPE_AUDIO)
       
   686         {
       
   687            ctx->codecbin = gst_element_factory_make( "decodebin" , "mpcodecbin" );
       
   688         }
       
   689         else
       
   690         {
       
   691            ctx->codecbin = gst_element_factory_make( "identity" , "mpcodecbin" );
       
   692         }
       
   693     }
       
   694     else if(ctx->isobjsrc )
       
   695     { /* object sources produce framed raw data, decodebin only causes trouble */ //shyward
       
   696     	DEBUG_INFO("Create identity")
       
   697         ctx->codecbin = gst_element_factory_make( "identity" , "mpcodecbin" );
       
   698     }
       
   699     else if(isPCM)
       
   700     { /* decodebin does not know how to handle PCM files */
       
   701     	DEBUG_INFO("Create audioparse")
       
   702         ctx->codecbin = gst_element_factory_make( "audioparse" , "mpcodecbin" );
       
   703     }
       
   704     else if ( isRawImage)
       
   705     { /* decodebin does not know how to handle raw unframed video data */
       
   706     	DEBUG_INFO("Create videoparse")
       
   707         ctx->codecbin = gst_element_factory_make( "videoparse", "mpcodecbin" );
       
   708     }
       
   709 
       
   710     if( ctx->codecbin )
       
   711     {
       
   712         gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->codecbin);
       
   713         if ( !ctx->isobjsrc )
       
   714         {
       
   715             if(mediatype == XA_MEDIATYPE_AUDIO)
       
   716             {
       
   717                if( !gst_element_link(ctx->source, ctx->codecbin) )
       
   718                {
       
   719                     DEBUG_ERR("Could not link source to decodebin!!");
       
   720                     return XA_RESULT_INTERNAL_ERROR;
       
   721                }
       
   722             }
       
   723             else
       
   724             {
       
   725 			   if( !gst_element_link_filtered(ctx->source, ctx->codecbin, encSrcCaps ) )
       
   726 			   {
       
   727 			    	DEBUG_ERR("Could not link source to decodebin!!");
       
   728 				    return XA_RESULT_INTERNAL_ERROR;
       
   729 			   }
       
   730             }
       
   731         }
       
   732         else
       
   733         { /* Link camera source by using ghost-pads, because elements are in different bins */
       
   734 
       
   735         	GstPad *cameraBinGhostPad=NULL;
       
   736         	GstPad* ghost=NULL;
       
   737         	GstElement *camTee=NULL;
       
   738         	GstStateChangeReturn gret;
       
   739         	GstPad *mpGhostSink=NULL;
       
   740 
       
   741         	/* Set external camera source to ready for pipeline manipulation */
       
   742         	DEBUG_INFO("Set ext-source PAUSED for pipeline manipulation");
       
   743 			gret = gst_element_set_state( GST_ELEMENT(ctx->source), GST_STATE_READY);
       
   744 			if(gret == GST_STATE_CHANGE_SUCCESS)
       
   745 			    {
       
   746                 gret = gst_element_get_state( GST_ELEMENT(ctx->source), NULL,NULL,XA_ADAPT_ASYNC_TIMEOUT_SHORT_NSEC);
       
   747 			    }
       
   748 
       
   749 			/* Add new ghost-pad to external camera source */
       
   750         	camTee = gst_bin_get_by_name( GST_BIN(ctx->source), "CamTee");
       
   751         	if ( !camTee )
       
   752         	{
       
   753         		DEBUG_ERR("Could not get tee-element from camera");
       
   754         	}
       
   755         	cameraBinGhostPad = gst_element_get_request_pad( camTee, "src%d" );
       
   756         	if ( !cameraBinGhostPad )
       
   757         	{
       
   758         		DEBUG_ERR("Could not get new src-pad from CamTee element");
       
   759         	}
       
   760 			gst_element_add_pad(ctx->source, gst_ghost_pad_new("MPObjSrc",cameraBinGhostPad));
       
   761 			ghost = gst_element_get_static_pad( GST_ELEMENT(ctx->source), "MPObjSrc" );
       
   762 			DEBUG_INFO_A2("Setting element:%s pad:%s to blocking.",
       
   763 							gst_element_get_name(ctx->baseObj.bin),
       
   764 							gst_pad_get_name(ghost));
       
   765 			/* Set newly created pad to blocking */
       
   766 			gst_pad_set_blocked_async(ghost, TRUE, XAAdaptationGst_PadBlockCb, NULL);
       
   767 
       
   768 
       
   769 			/* Create new ghost-pad to media player pipeline where external camera is connected */
       
   770         	mpGhostSink = gst_element_get_static_pad( GST_ELEMENT(ctx->codecbin), "sink");
       
   771 			gst_element_add_pad(ctx->baseObj.bin, gst_ghost_pad_new("MPObjSink",mpGhostSink));
       
   772 
       
   773 			if ( !gst_element_link_pads( GST_ELEMENT(ctx->source), "MPObjSrc",
       
   774 										GST_ELEMENT(ctx->baseObj.bin), "MPObjSink") )
       
   775 			{
       
   776 				DEBUG_ERR("Could not link camera:MPObjSrc to videofilter:MPObjSink");
       
   777 				return XA_RESULT_INTERNAL_ERROR;
       
   778 			}
       
   779 
       
   780 			if ( cameraBinGhostPad )
       
   781 			{
       
   782 				gst_object_unref( cameraBinGhostPad );
       
   783 			}
       
   784 			if ( ghost )
       
   785 			{
       
   786 				gst_object_unref( ghost );
       
   787 			}
       
   788 			if ( mpGhostSink )
       
   789 			{
       
   790 				gst_object_unref( mpGhostSink );
       
   791 			}
       
   792 			if ( camTee )
       
   793 			{
       
   794 				gst_object_unref( camTee );
       
   795 			}
       
   796         }
       
   797     }
       
   798     else
       
   799     {
       
   800         DEBUG_ERR("Could not create decoder bin!!!");
       
   801         return XA_RESULT_INTERNAL_ERROR;
       
   802     }
       
   803 
       
   804     /* create and add video stream pipeline */
       
   805     if(!ctx->xaLEDArray && !ctx->xaVibra && mediatype!=XA_MEDIATYPE_AUDIO) /*no video for these*/
       
   806     {
       
   807         /* create video processing pipeline */
       
   808         ctx->videoppbin = XAAdaptationGst_CreateVideoPP( );
       
   809         if( ctx->videoppbin )
       
   810         {
       
   811             gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->videoppbin);
       
   812             //shyward ---link filtered???
       
   813             // boolRetVal = gst_element_link_filtered(appsrc, videosink, caps);
       
   814 			      //if(!gst_element_link(ctx->codecbin, ctx->videoppbin))
       
   815             if(!gst_element_link_filtered(ctx->codecbin, ctx->videoppbin,encSrcCaps))
       
   816             {
       
   817                 /* probably dynamic pads in codecbin */
       
   818                 DEBUG_INFO("Could not link codec to videopp, trying delayed link");
       
   819                 delayedlink = XA_BOOLEAN_TRUE;
       
   820             }
       
   821             ctx->videoScrSrcPad = gst_element_get_static_pad(ctx->videoppbin, "videopp_src");
       
   822         }
       
   823         else
       
   824         {
       
   825             DEBUG_ERR("Could not create video pp bin!!!!");
       
   826             return XA_RESULT_INTERNAL_ERROR;
       
   827         }
       
   828         //shyward
       
   829         /* Black screen pipeline not needed under Symbian. May need to revist for acceptance testing
       
   830         ctx->videoppBScrbin = XAAdaptationBase_CreateVideoPPBlackScr( );
       
   831         if( ctx->videoppBScrbin )
       
   832         {
       
   833             gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->videoppBScrbin);
       
   834             ctx->blackScrSrcPad = gst_element_get_static_pad(ctx->videoppBScrbin, "videoppBSrc_src");
       
   835         }
       
   836         else
       
   837         {
       
   838             DEBUG_ERR("Could not create video pp bin for black screen!!!!");
       
   839             return XA_RESULT_INTERNAL_ERROR;
       
   840         }
       
   841         */
       
   842         ctx->inputSelector = XAAdaptationGst_CreateInputSelector( );
       
   843         if( ctx->inputSelector )
       
   844         {
       
   845             gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->inputSelector);
       
   846             ctx->videoScrSinkPad = gst_element_get_request_pad(ctx->inputSelector, "sink%d");
       
   847             ctx->blackScrSinkPad = gst_element_get_request_pad(ctx->inputSelector, "sink%d");
       
   848             gst_pad_link(ctx->blackScrSrcPad, ctx->blackScrSinkPad);
       
   849             gst_pad_link(ctx->videoScrSrcPad, ctx->videoScrSinkPad);
       
   850         }
       
   851 
       
   852         //shyward - We have no video filter at this time
       
   853         /*
       
   854         ctx->filter = gst_element_factory_make("ffmpegcolorspace", "videofilter");
       
   855         gst_bin_add( GST_BIN(ctx->baseObj.bin), ctx->filter);
       
   856         if ( !gst_element_link( ctx->inputSelector, ctx->filter ) )
       
   857         {
       
   858 		   DEBUG_ERR("Could not link ctx->filter <-> ctx->inputSelector");
       
   859 		   return XA_RESULT_INTERNAL_ERROR;
       
   860 	   }
       
   861 	   */
       
   862         /* create video pipe sink */
       
   863         ctx->videosink = XAAdaptationGst_CreateGstSink( ctx->xaVideoSink, "videosink", &(ctx->isobjvsink) );
       
   864         /* NOTE: no valid object sinks for video output available */
       
   865         if( ctx->videosink )
       
   866         {
       
   867             gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->videosink);
       
   868 
       
   869             //shyward
       
   870             //if(!gst_element_link(ctx->filter, ctx->videosink))
       
   871             if(!gst_element_link_filtered(ctx->videoppbin, ctx->videosink,encSrcCaps))
       
   872             {
       
   873                 DEBUG_ERR("Could not link videopp to videosink!!");
       
   874                 return XA_RESULT_INTERNAL_ERROR;
       
   875             }
       
   876             else
       
   877             {
       
   878             	gst_caps_unref(encSrcCaps);
       
   879             }
       
   880         }
       
   881         else
       
   882         {
       
   883             DEBUG_ERR("Could not create video sink!!!!");
       
   884             return XA_RESULT_INTERNAL_ERROR;
       
   885         }
       
   886     }
       
   887     else
       
   888     {
       
   889         DEBUG_INFO("Media does not contain video!");
       
   890     }
       
   891 
       
   892     /* create and add audio stream pipeline */
       
   893 
       
   894     if(!ctx->xaLEDArray && !ctx->xaVibra && mediatype!=XA_MEDIATYPE_IMAGE) /*no audio for these*/
       
   895     {
       
   896     /* create audio post processing pipeline */
       
   897     ctx->audioppbin = XAAdaptationGst_CreateAudioPP( );
       
   898     if( ctx->audioppbin )
       
   899     {
       
   900         gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->audioppbin);
       
   901         if(!gst_element_link(ctx->codecbin, ctx->audioppbin))
       
   902         {
       
   903             DEBUG_INFO("Could not link codec to audiopp, trying delayed link");
       
   904             delayedlink = XA_BOOLEAN_TRUE;
       
   905         }
       
   906     }
       
   907     else
       
   908     {
       
   909         DEBUG_ERR("Could not create audio pp bin!!!!");
       
   910         return XA_RESULT_INTERNAL_ERROR;
       
   911     }
       
   912     /* create audio pipe sink */
       
   913     ctx->audiosink = XAAdaptationGst_CreateGstSink( ctx->xaAudioSink, "audiosink", &(ctx->isobjasink) );
       
   914     if( ctx->audiosink )
       
   915     {
       
   916         if( ctx->isobjasink && ctx->xaAudioSink && ctx->xaAudioSink->pLocator )
       
   917         {
       
   918             locType = *(XAuint32*)(ctx->xaAudioSink->pLocator);
       
   919             switch ( locType )
       
   920             {
       
   921                 case XA_DATALOCATOR_OUTPUTMIX:
       
   922                 {
       
   923                     XADataLocator_OutputMix* omix = (XADataLocator_OutputMix*)(ctx->xaAudioSink->pLocator);
       
   924                     XAOMixImpl* omixDevice = (XAOMixImpl*)(*omix->outputMix);
       
   925                     if(omixDevice)
       
   926                     {
       
   927                         XAOutputMixAdapt_ConnectObject((XAAdaptationGstCtx*)omixDevice->adaptationCtx, &(ctx->baseObj), ctx->audiosink);
       
   928                     }
       
   929                     break;
       
   930                 }
       
   931                 default:
       
   932                     /* Vibra and LED need no handling */
       
   933                     break;
       
   934             }
       
   935         }
       
   936         gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->audiosink);
       
   937         if(!gst_element_link(ctx->audioppbin, ctx->audiosink))
       
   938         {
       
   939             DEBUG_ERR("Could not link audiopp to audiosink!!");
       
   940             return XA_RESULT_INTERNAL_ERROR;
       
   941         }
       
   942     }
       
   943     else
       
   944     {
       
   945         DEBUG_ERR("Could not create audio sink!!!!");
       
   946         return XA_RESULT_INTERNAL_ERROR;
       
   947         }
       
   948     }
       
   949     else
       
   950     {
       
   951         DEBUG_INFO("Media does not contain audio!");
       
   952     }
       
   953 
       
   954     if(delayedlink)
       
   955     {
       
   956         /* listen for dynamically created pads */
       
   957         g_signal_connect (ctx->codecbin, "pad-added", G_CALLBACK (XAMediaPlayerAdapt_NewPadCb), ctx);
       
   958     }
       
   959     locType = *((XAuint32*)(ctx->xaSource->pLocator));
       
   960 
       
   961     DEBUG_API("<-XAMediaPlayerAdapt_CreatePipeline");
       
   962     return ret;
       
   963 }
       
   964 
       
   965 /*
       
   966  * gboolean XAMediaPlayerAdapt_PositionUpdate(gpointer ctx)
       
   967  * callback.
       
   968  * If position tracking enabled, periodic timer calls this method every XA_ADAPT_PU_INTERVAL msecs
       
   969  * @return false to stop periodic calls
       
   970  */
       
   971 gboolean XAMediaPlayerAdapt_PositionUpdate(gpointer ctx)
       
   972 {
       
   973     XAAdaptationGstCtx *bCtx = (XAAdaptationGstCtx*) ctx;
       
   974     XAMediaPlayerAdaptationCtx* mCtx = (XAMediaPlayerAdaptationCtx*) ctx;
       
   975     gint64 position;
       
   976     XAmillisecond posMsec;
       
   977     GstFormat format = GST_FORMAT_TIME;
       
   978     XAAdaptEvent event = {XA_PLAYITFEVENTS, XA_ADAPT_POSITION_UPDATE_EVT, 1, NULL};
       
   979 
       
   980     DEBUG_API("->XAMediaPlayerAdapt_PositionUpdate");
       
   981     if ( !gst_element_query_position( GST_ELEMENT(bCtx->bin), &format, &position ) )
       
   982     {
       
   983         DEBUG_ERR("Gst: Failed to get position");
       
   984         return( mCtx->runpositiontimer );
       
   985     }
       
   986     DEBUG_INFO_A1("Current position %"GST_TIME_FORMAT, GST_TIME_ARGS(position));
       
   987     if( mCtx && mCtx->trackpositionenabled )
       
   988     {
       
   989         posMsec = GST_TIME_AS_MSECONDS(position);/*Warning ok due to used API specification*/
       
   990         DEBUG_INFO_A1("mCtx->trackpositionenabled sending update, position:&ld ", posMsec);
       
   991         /* send needed events */
       
   992         event.data=&posMsec;
       
   993         XAAdaptationBase_SendAdaptEvents(&bCtx->baseObj, &event );
       
   994     }
       
   995     if( mCtx && mCtx->loopingenabled)
       
   996     {
       
   997     	DEBUG_INFO_A2("mCtx->loopingenabled, current position:%lu, loopend:%lu ", position, mCtx->loopend);
       
   998         if( (position >= mCtx->loopend) &&
       
   999             (mCtx->lastpos < mCtx->loopend) )
       
  1000         {
       
  1001             DEBUG_INFO_A2("Restart loop from %"GST_TIME_FORMAT" to %"GST_TIME_FORMAT,
       
  1002                           GST_TIME_ARGS(mCtx->loopstart), GST_TIME_ARGS(mCtx->loopend));
       
  1003             gst_element_seek( bCtx->bin, mCtx->playrate, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE),
       
  1004                               GST_SEEK_TYPE_SET, mCtx->loopstart,
       
  1005                               GST_SEEK_TYPE_NONE, (gint64)GST_CLOCK_TIME_NONE );
       
  1006             mCtx->lastpos = mCtx->loopstart;
       
  1007             if( mCtx && mCtx->trackpositionenabled )
       
  1008             {
       
  1009             	DEBUG_INFO_A1("mCtx->trackpositionenabled sending looping update, position:%&u ", posMsec);
       
  1010                 posMsec = GST_TIME_AS_MSECONDS(mCtx->lastpos);/*Warning ok due to used API specification*/
       
  1011                 /* send needed events */
       
  1012                 event.data=&posMsec;
       
  1013                 XAAdaptationBase_SendAdaptEvents(&bCtx->baseObj, &event );
       
  1014             }
       
  1015         }
       
  1016         else
       
  1017         {
       
  1018             mCtx->lastpos = position;
       
  1019         }
       
  1020     }
       
  1021     DEBUG_API("<-XAMediaPlayerAdapt_PositionUpdate");
       
  1022     /* return false to stop timer */
       
  1023     return( mCtx->runpositiontimer );
       
  1024 }
       
  1025 
       
  1026 /*
       
  1027  * XAresult XAMediaPlayerAdapt_UpdatePositionCbTimer
       
  1028  * Enable/disable periodic position tracking callback timer
       
  1029  */
       
  1030 XAresult XAMediaPlayerAdapt_UpdatePositionCbTimer(XAMediaPlayerAdaptationCtx* mCtx)
       
  1031 {
       
  1032     DEBUG_API_A2("->XAMediaPlayerAdapt_UpdatePositionCbTimer: trackposition %u, tracklooping %u",
       
  1033                 mCtx->trackpositionenabled, mCtx->loopingenabled);
       
  1034 
       
  1035     if(mCtx->runpositiontimer==0 && (mCtx->trackpositionenabled || mCtx->loopingenabled))
       
  1036     {
       
  1037         DEBUG_INFO("Start position tracking timer");
       
  1038         mCtx->positionCb = &XAMediaPlayerAdapt_PositionUpdate;
       
  1039         /* if play is already on, create a timer to track position of playback */
       
  1040         if( GST_STATE(mCtx->baseObj.bin) == GST_STATE_PLAYING )
       
  1041         {
       
  1042             mCtx->runpositiontimer = g_timeout_add(XA_ADAPT_PU_INTERVAL, mCtx->positionCb, mCtx);
       
  1043         }
       
  1044     }
       
  1045     else if (mCtx->runpositiontimer!=0 && !(mCtx->trackpositionenabled || mCtx->loopingenabled))
       
  1046     {
       
  1047         DEBUG_INFO("Stop position tracking timer");
       
  1048         mCtx->trackpositionenabled = XA_BOOLEAN_FALSE;
       
  1049         if(mCtx->runpositiontimer > 0)
       
  1050         {
       
  1051             g_source_remove(mCtx->runpositiontimer);
       
  1052             mCtx->runpositiontimer=0;
       
  1053         }
       
  1054     }
       
  1055     DEBUG_API("<-XAMediaPlayerAdapt_UpdatePositionCbTimer");
       
  1056     return XA_RESULT_SUCCESS;
       
  1057 }
       
  1058 
       
  1059 /*
       
  1060  * XAresult XAMediaPlayerAdapt_InitContentPipeSrc(ctx)
       
  1061  * CP code: can be moved to context base
       
  1062  */
       
  1063 /*XAresult XAMediaPlayerAdapt_InitContentPipeSrc(XAMediaPlayerAdaptationCtx* ctx)
       
  1064 {
       
  1065     XAresult ret = XA_RESULT_SUCCESS;
       
  1066     CPresult res;
       
  1067     GstStateChangeReturn gstRet = GST_STATE_CHANGE_SUCCESS;
       
  1068     DEBUG_API("->XAMediaPlayerAdapt_InitContentPipeSrc");
       
  1069     ctx->baseObj.pipeSrcThrCtx.appSrc = GST_APP_SRC(ctx->source);
       
  1070     ctx->baseObj.pipeSrcThrCtx.pipe = (XADataLocator_ContentPipe*)(ctx->xaSource->pLocator);
       
  1071 
       
  1072      Create thread for content pipe source
       
  1073     ret = XAImpl_CreateThreadHandle( &(ctx->baseObj.pipeSrcThr) );
       
  1074     if ( ret != XA_RESULT_SUCCESS )
       
  1075     {
       
  1076         DEBUG_ERR("Could not create thread for content pipe source!");
       
  1077         DEBUG_API("<-XAMediaPlayerAdapt_InitContentPipeSrc");
       
  1078         return ret;
       
  1079     }
       
  1080 
       
  1081      Create semaphore for content pipe source 
       
  1082     ret = XAImpl_CreateSemaphore( &(ctx->baseObj.pipeSrcThrCtx.stateSem));
       
  1083     if ( ret != XA_RESULT_SUCCESS )
       
  1084     {
       
  1085         DEBUG_ERR("Could not create semaphore for content pipe source!");
       
  1086         DEBUG_API("<-XAMediaPlayerAdapt_InitContentPipeSrc");
       
  1087         return ret;
       
  1088     }
       
  1089 
       
  1090      Open content pipe 
       
  1091     res = ctx->baseObj.pipeSrcThrCtx.pipe->pContentPipe->Open(&(ctx->baseObj.pipeSrcThrCtx.dataHandle),
       
  1092                                                 (CPstring)(ctx->baseObj.pipeSrcThrCtx.pipe->URI),
       
  1093                                                 CP_AccessRead );
       
  1094     if ( res ==  EXIT_FAILURE )
       
  1095     {
       
  1096         DEBUG_ERR("Could not open Content Pipe!")
       
  1097         return XA_RESULT_INTERNAL_ERROR;
       
  1098     }
       
  1099 
       
  1100     res = ctx->baseObj.pipeSrcThrCtx.pipe->pContentPipe->RegisterCallback( &(ctx->baseObj.pipeSrcThrCtx.dataHandle), &XAAdaptationBase_ContentPipeSrcCb);
       
  1101     if ( res == EXIT_FAILURE )
       
  1102     {
       
  1103         DEBUG_ERR("Could not register content pipe callbacks!")
       
  1104         return XA_RESULT_INTERNAL_ERROR;
       
  1105     }
       
  1106 
       
  1107     gstRet = gst_element_set_state( GST_ELEMENT(ctx->source), GST_STATE_PAUSED);
       
  1108     gst_element_sync_state_with_parent( GST_ELEMENT( ctx->source));
       
  1109 
       
  1110     XAImpl_StartThread( &(ctx->baseObj.pipeSrcThr), NULL, &XAAdaptationBase_ContentPipeScrThrFunc, &(ctx->baseObj.pipeSrcThrCtx) );
       
  1111 
       
  1112     DEBUG_API("<-XAMediaPlayerAdapt_InitContentPipeSrc");
       
  1113     return ret;
       
  1114 }*/