khronosfws/openmax_al/src/adaptation/xamediaplayeradaptctx.c
changeset 16 43d09473c595
parent 14 80975da52420
child 22 128eb6a32b84
equal deleted inserted replaced
14:80975da52420 16:43d09473c595
     1 /*
       
     2 * Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
       
     3 * All rights reserved.
       
     4 * This component and the accompanying materials are made available
       
     5 * under the terms of "Eclipse Public License v1.0"
       
     6 * which accompanies this distribution, and is available
       
     7 * at the URL "http://www.eclipse.org/legal/epl-v10.html".
       
     8 *
       
     9 * Initial Contributors:
       
    10 * Nokia Corporation - initial contribution.
       
    11 *
       
    12 * Contributors:
       
    13 *
       
    14 * Description: 
       
    15 *
       
    16 */
       
    17 
       
    18 #include <assert.h>
       
    19 #include <stdlib.h>
       
    20 #include <gstappsrc.h>
       
    21 #include <gst.h>
       
    22 #include "XAMediaPlayerAdaptCtx.h"
       
    23 #include "XAMediaPlayerAdaptCtxMMF.h"
       
    24 #include "XAAdaptation.h"
       
    25 #include "XAObjectItf.h"
       
    26 #include "XACameraDevice.h"
       
    27 #include "XAOutputMix.h"
       
    28 #include "XAMetadataAdaptation.h"
       
    29 #include "XANGAVideoSink.h"
       
    30 
       
    31 static void need_data_for_prerolling (GstElement * pipeline, guint size, XAMediaPlayerAdaptationCtx* ctx );
       
    32 static void  push_data_for_prerolling (GstElement * pipeline, GstBuffer *buffer, XAMediaPlayerAdaptationCtx* ctx);
       
    33 static void  enough_data_for_prerolling (GstElement * pipeline, XAMediaPlayerAdaptationCtx* ctx);
       
    34 /* forward declarations */
       
    35 XAresult XAMediaPlayerAdapt_CreatePipeline( XAMediaPlayerAdaptationCtx* ctx );
       
    36 
       
    37 void* ngaVideoSinkPtr = NULL;
       
    38 
       
    39 
       
    40 extern XAboolean cameraRealized;
       
    41 extern XACameraAdaptationCtx_* cameraCtx;
       
    42 
       
    43 /*
       
    44  * static void XAMediaPlayerAdapt_NewPadCb (GstElement *element, GstPad *pad,  gpointer data)
       
    45  * Listen to codec bin dynamic pads
       
    46  */
       
    47 static void XAMediaPlayerAdapt_NewPadCb (GstElement *element, GstPad *pad,  gpointer data)
       
    48 {
       
    49   XAMediaPlayerAdaptationCtx* mCtx = (XAMediaPlayerAdaptationCtx*)data;
       
    50   gchar *name = gst_pad_get_name (pad);
       
    51   DEBUG_API_A1("->XAMediaPlayerAdapt_NewPadCb: A new pad %s was created", name);
       
    52   /*try first newly created pad to video pipe*/
       
    53   if( mCtx->videoppbin && !(gst_pad_is_linked(gst_element_get_static_pad(mCtx->videoppbin, "videopp_sink"))) )
       
    54   {
       
    55       if(gst_element_link_pads (mCtx->codecbin, name, mCtx->videoppbin, "videopp_sink"))
       
    56       {
       
    57           DEBUG_INFO_A1("Pads linked, codecbin:%s to videopp:sink", name);
       
    58           g_free (name);
       
    59           DEBUG_API("<-XAMediaPlayerAdapt_NewPadCb");
       
    60           return;
       
    61       }
       
    62   }
       
    63   /*..and then to audio pipe*/
       
    64   if( mCtx->audioppbin && !(gst_pad_is_linked(gst_element_get_static_pad(mCtx->audioppbin, "sink"))) )
       
    65   {
       
    66       if(gst_element_link_pads (mCtx->codecbin, name, mCtx->audioppbin, "sink"))
       
    67       {
       
    68           DEBUG_INFO_A1("Pads linked, codecbin:%s to audiopp:sink", name);
       
    69           g_free (name);
       
    70           DEBUG_API("<-XAMediaPlayerAdapt_NewPadCb");
       
    71           return;
       
    72       }
       
    73   }
       
    74 
       
    75   g_free (name);
       
    76   DEBUG_INFO("Warning: Could not find anything to link to new pad.");
       
    77   DEBUG_API("<-XAMediaPlayerAdapt_NewPadCb");
       
    78 }
       
    79 
       
    80 /*
       
    81  * void  push_data_for_prerolling (GstElement * pipeline, GstBuffer *buffer, XAMediaPlayerAdaptationCtx* ctx)
       
    82  * Called when "push-buffer" signal is emitted
       
    83  */
       
    84 void  push_data_for_prerolling (GstElement * pipeline, GstBuffer *buffer, XAMediaPlayerAdaptationCtx* ctx)
       
    85 {
       
    86 	DEBUG_API("->push_data_for_prerolling");
       
    87 	gst_app_src_push_buffer( GST_APP_SRC(ctx->source), GST_BUFFER(buffer) );
       
    88 	/*GstPad* prerollPad = NULL;
       
    89 	prerollPad = gst_element_get_static_pad(GST_ELEMENT(ctx->source),"src");
       
    90 	gst_pad_push (prerollPad, buffer);
       
    91 	gst_element_send_event(GST_ELEMENT(ctx->source),gst_event_new_flush_start());
       
    92 	gst_element_send_event(GST_ELEMENT(ctx->source),gst_event_new_flush_stop());*/
       
    93 	DEBUG_API("<-push_data_for_prerolling");
       
    94 }
       
    95 
       
    96 /*
       
    97  * void  enough_data_for_prerolling (GstElement * pipeline, XAMediaPlayerAdaptationCtx* ctx)
       
    98  * Called when appsrc has enough data
       
    99  */
       
   100 void  enough_data_for_prerolling (GstElement * pipeline, XAMediaPlayerAdaptationCtx* ctx)
       
   101 {
       
   102 	DEBUG_API("->enough_data_for_prerolling");
       
   103 	/*No any functionality yet*/
       
   104 	DEBUG_API("<-enough_data_for_prerolling");
       
   105 }
       
   106 
       
   107 /*
       
   108  * void need_data_for_prerolling (GstElement * pipeline, guint size, XAMediaPlayerAdaptationCtx* ctx )
       
   109  * Called when the appsrc needs more data during prerolling
       
   110  */
       
   111 void need_data_for_prerolling (GstElement * pipeline, guint size, XAMediaPlayerAdaptationCtx* ctx )
       
   112 {
       
   113     CPresult cpRet;
       
   114     XAuint32 requestedBytes = size;
       
   115     guint readedBytes = 0;
       
   116     CP_CHECKBYTESRESULTTYPE eResult;
       
   117 
       
   118     DEBUG_API("->need_data_for_prerolling");
       
   119 
       
   120     /*Start prerolling to contentpipe data*/
       
   121 	ctx->baseObj.pipeSrcThrCtx.state = CPStatePrerolling;
       
   122 
       
   123 	do
       
   124 	{
       
   125 		gpointer cpBuffer = NULL;
       
   126 		GstBuffer  *buffer = NULL;
       
   127 
       
   128 		cpRet = ctx->baseObj.pipeSrcThrCtx.pipe->pContentPipe->CheckAvailableBytes(&(ctx->baseObj.pipeSrcThrCtx.dataHandle), requestedBytes, &eResult);
       
   129 		if ( cpRet != EXIT_SUCCESS )
       
   130 		{
       
   131 			DEBUG_API("ERROR");
       
   132 			ctx->baseObj.pipeSrcThrCtx.state = CPStateError;
       
   133 		}
       
   134 
       
   135 		if ( eResult == CP_CheckBytesOk )
       
   136 		{
       
   137 			cpBuffer = g_malloc0(requestedBytes );
       
   138 			readedBytes =+ requestedBytes;
       
   139 			cpRet = ctx->baseObj.pipeSrcThrCtx.pipe->pContentPipe->Read( &(ctx->baseObj.pipeSrcThrCtx.dataHandle), (CPbyte*)cpBuffer, requestedBytes );
       
   140 			if ( cpRet != EXIT_SUCCESS )
       
   141 			{
       
   142 				DEBUG_ERR("Could not read data from content pipe!");
       
   143 				ctx->baseObj.pipeSrcThrCtx.state = CPStateError;
       
   144 			}
       
   145 			else
       
   146 			{
       
   147 				DEBUG_INFO_A1("Readed %u bytes", requestedBytes );
       
   148 				/* Create gstBuffer, GStreamer frees data  */
       
   149 				buffer = gst_app_buffer_new( (void*)cpBuffer, requestedBytes, g_free, cpBuffer );
       
   150 				if ( !buffer )
       
   151 				{
       
   152 					DEBUG_ERR("Could not allocate buffer for content pipe source!");
       
   153 					ctx->baseObj.pipeSrcThrCtx.state = CPStateError;
       
   154 				}
       
   155 			}
       
   156 
       
   157 			if ( cpRet == EXIT_SUCCESS  )
       
   158 			{
       
   159 				if( buffer )
       
   160 				{
       
   161 					DEBUG_INFO("Pushing preroll buffer");
       
   162 					/*CP code: causes some delay for appsrc but we use push-buffer signal at this point
       
   163                     GstFlowReturn ret;
       
   164 					g_signal_emit_by_name (ctx->source, "push-buffer", GST_BUFFER(buffer), &ret);
       
   165 					if( ret != GST_FLOW_OK )
       
   166 					{
       
   167 						DEBUG_ERR("Some problem during preroll");
       
   168 						DEBUG_API("<-need_data_for_prerolling");
       
   169 					}*/
       
   170 					gst_element_send_event(GST_ELEMENT(ctx->source),gst_event_new_flush_start());
       
   171 					gst_app_src_push_buffer( GST_APP_SRC(ctx->source), GST_BUFFER(buffer) );
       
   172 					gst_element_send_event(GST_ELEMENT(ctx->source),gst_event_new_flush_stop());
       
   173 					break;
       
   174 				}
       
   175 			}
       
   176 		}
       
   177 		else if( eResult == CP_CheckBytesAtEndOfStream )
       
   178 		{
       
   179 			ctx->baseObj.pipeSrcThrCtx.state = CPStateEOS;
       
   180 			break;
       
   181 		}
       
   182 		else if( eResult == CP_CheckBytesInsufficientBytes )
       
   183 		{
       
   184 			ctx->baseObj.pipeSrcThrCtx.state = CPStateWaitForData;
       
   185 		}
       
   186 		else if( eResult == CP_CheckBytesVendorStartUnused )
       
   187 		{
       
   188 			/*We use this enumeration when client is started caching from the beginning again*/
       
   189 			DEBUG_API( "CP_CheckBytesVendorStartUnused from implementation");
       
   190 		}
       
   191 	}while ( ctx->baseObj.pipeSrcThrCtx.state == CPStatePrerolling || ctx->baseObj.pipeSrcThrCtx.state == CPStateWaitForData );
       
   192 
       
   193 	DEBUG_API("<-need_data_for_prerolling");
       
   194 }
       
   195 
       
   196 /*
       
   197  * gboolean XAMediaPlayerAdapt_GstBusCb( GstBus *bus, GstMessage *message, gpointer data )
       
   198  * MediaPlayer Gst-bus message handler (Callback)
       
   199  */
       
   200 gboolean XAMediaPlayerAdapt_GstBusCb( GstBus *bus, GstMessage *message, gpointer data )
       
   201 {
       
   202     XAAdaptationBaseCtx* bCtx = (XAAdaptationBaseCtx*)data;
       
   203     /* only listen to bin messages */
       
   204     if(GST_MESSAGE_SRC(message)==(GstObject*)(bCtx->bin))
       
   205     {
       
   206         XAMediaPlayerAdaptationCtx* mCtx = (XAMediaPlayerAdaptationCtx*)data;
       
   207         DEBUG_API_A2("->XAMediaPlayerAdapt_GstBusCb:\"%s\" from object \"%s\"",
       
   208                         GST_MESSAGE_TYPE_NAME(message), GST_OBJECT_NAME(GST_MESSAGE_SRC(message)));
       
   209 
       
   210         switch( GST_MESSAGE_TYPE(message))
       
   211         {
       
   212             case GST_MESSAGE_EOS:
       
   213             {
       
   214                 if( mCtx && mCtx->loopingenabled && mCtx->loopend == GST_CLOCK_TIME_NONE)
       
   215                 {
       
   216                     DEBUG_INFO_A2("Restart loop from %"GST_TIME_FORMAT" to %"GST_TIME_FORMAT,
       
   217                                   GST_TIME_ARGS(mCtx->loopstart), GST_TIME_ARGS(mCtx->loopend));
       
   218                     gst_element_seek( bCtx->bin, mCtx->playrate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE,
       
   219                                       GST_SEEK_TYPE_SET, mCtx->loopstart,
       
   220                                       GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE );
       
   221                     gst_element_get_state(bCtx->bin,NULL,NULL,XA_ADAPT_ASYNC_TIMEOUT_SHORT_NSEC);
       
   222                     mCtx->lastpos = mCtx->loopstart;
       
   223                     if( mCtx && mCtx->trackpositionenabled )
       
   224                     {
       
   225                         XAmillisecond posMsec = GST_TIME_AS_MSECONDS(mCtx->lastpos);/*Warning ok due to used API specification*/
       
   226                         XAAdaptEvent event = {XA_PLAYITFEVENTS, XA_ADAPT_POSITION_UPDATE_EVT, 1, NULL};
       
   227                         event.data = &posMsec;
       
   228                         XAAdaptationBase_SendAdaptEvents(bCtx, &event );
       
   229                     }
       
   230                 }
       
   231                 else
       
   232                 {
       
   233                 /* stop position tracking */
       
   234                 if(mCtx->runpositiontimer > 0)
       
   235                 {
       
   236                     g_source_remove(mCtx->runpositiontimer);
       
   237                     mCtx->runpositiontimer=0;
       
   238                 }
       
   239 
       
   240                 /* complete any ongoing client async operations */
       
   241                 XAAdaptationBase_CompleteAsyncWait(bCtx);
       
   242 
       
   243                 /* send needed events */
       
   244                 {
       
   245 					XAMediaType mediatype;
       
   246 					if( mCtx->baseObj.pipeSrcThrCtx.pipe )
       
   247 					{
       
   248 						XACommon_CheckDataSource((XADataSource*)mCtx->xaAudioSink, &mediatype);
       
   249 					}
       
   250 					else
       
   251 					{
       
   252 						XACommon_CheckDataSource(mCtx->xaSource, &mediatype);
       
   253 					}
       
   254 					if(mediatype!=XA_MEDIATYPE_IMAGE)
       
   255 					{
       
   256 						XAAdaptEvent event = { XA_PLAYITFEVENTS, XA_PLAYEVENT_HEADATEND, 0, NULL };
       
   257 						XAAdaptationBase_SendAdaptEvents(bCtx, &event );
       
   258 					}
       
   259                 }
       
   260                 if(mCtx->positionCb)
       
   261                 {
       
   262                     mCtx->positionCb(bCtx);
       
   263                 }
       
   264                 bCtx->binWantedState = GST_STATE_PAUSED;
       
   265                 }
       
   266                 break;
       
   267             }
       
   268 
       
   269             case GST_MESSAGE_STATE_CHANGED:
       
   270             {
       
   271                 GstState oldstate, newstate, pendingstate, gsttargetstate;
       
   272                 gst_message_parse_state_changed(message, &oldstate, &newstate, &pendingstate);
       
   273                 gsttargetstate = GST_STATE_TARGET(bCtx->bin);
       
   274                 DEBUG_INFO_A4("old %s -> new %s ( pending %s, gsttarget %s )",
       
   275                                gst_element_state_get_name(oldstate),
       
   276                                gst_element_state_get_name(newstate),
       
   277                                gst_element_state_get_name(pendingstate),
       
   278                                gst_element_state_get_name(gsttargetstate) );
       
   279                 if(gsttargetstate!=bCtx->binWantedState)
       
   280                 {
       
   281                     DEBUG_ERR_A1("WARNING: Gst target is not wanted target [%s]!!!",
       
   282                                     gst_element_state_get_name(bCtx->binWantedState));
       
   283                 }
       
   284                 /* print out some more info */
       
   285                 if( pendingstate == GST_STATE_VOID_PENDING )
       
   286                 {
       
   287                     if( newstate != bCtx->binWantedState )
       
   288                     {
       
   289                         DEBUG_INFO_A2("Gst in intermediate state transition (curr %s, target %s)",
       
   290                                         gst_element_state_get_name(newstate),
       
   291                                         gst_element_state_get_name(bCtx->binWantedState));
       
   292                        break; // TL: add to avoid extra event sent below in this case...                
       
   293                     }
       
   294                     else
       
   295                     {
       
   296                         DEBUG_INFO_A1("Gst in wanted target state (%s)",
       
   297                                         gst_element_state_get_name(newstate));
       
   298                     }
       
   299                 }
       
   300                 if( oldstate!=GST_STATE_PLAYING && newstate==GST_STATE_PLAYING )
       
   301                 {
       
   302                     /* send needed events */
       
   303                     XAAdaptEvent event = {XA_PLAYITFEVENTS, XA_PLAYEVENT_HEADMOVING, 0, NULL };
       
   304                     XAAdaptationBase_SendAdaptEvents(bCtx, &event );
       
   305                     /* enable position tracking if needed */
       
   306                     XAMediaPlayerAdapt_UpdatePositionCbTimer(mCtx);
       
   307                 }
       
   308                 if ( oldstate== GST_STATE_READY && newstate==GST_STATE_PAUSED)
       
   309                 {
       
   310                     if ( mCtx->baseObj.pipeSinkThrCtx.dataHandle )
       
   311                     {
       
   312                         mCtx->baseObj.pipeSrcThrCtx.state = CPStateInitialized;
       
   313                     }
       
   314                 }
       
   315                 break;
       
   316             }
       
   317 
       
   318             case GST_MESSAGE_ASYNC_DONE:
       
   319             {
       
   320                 /* some async sequence ended */
       
   321                 XAAdaptationBase_CompleteAsyncWait(bCtx);
       
   322                 break;
       
   323             }
       
   324 
       
   325             case GST_MESSAGE_ERROR:
       
   326             {
       
   327                 GError* error;
       
   328                 gchar* debug;
       
   329                 gst_message_parse_error(message, &error, &debug);
       
   330                 DEBUG_ERR_A1("Gst reports error \"%s\"", debug);
       
   331                 /* stop waiting any ongoing async operations */
       
   332                 XAAdaptationBase_CompleteAsyncWait(bCtx);
       
   333                 break;
       
   334             }
       
   335             case GST_MESSAGE_BUFFERING:
       
   336             {
       
   337                 gint percent;
       
   338                 gst_message_parse_buffering(message, &percent);
       
   339                 DEBUG_INFO_A1("Gst message buffering %d", percent);
       
   340                 mCtx->buffering = percent;
       
   341                 {
       
   342                     XAAdaptEvent event = {XA_PREFETCHITFEVENTS, XA_ADAPT_BUFFERING, 1, NULL };
       
   343                     event.data = &mCtx->buffering;
       
   344                 XAAdaptationBase_SendAdaptEvents(bCtx, &event );
       
   345                 }
       
   346                 break;
       
   347             }
       
   348             case GST_MESSAGE_ELEMENT:
       
   349             {
       
   350                 DEBUG_INFO("GST_MESSAGE_ELEMENT");
       
   351                 if ((gst_structure_has_name(message->structure, "graphics-surface-created")) || 
       
   352                     (gst_structure_has_name(message->structure, "graphics-surface-updated")))
       
   353                 {
       
   354                      DEBUG_INFO("graphics-surface-created message recived");
       
   355                 }
       
   356                 break;
       
   357             }
       
   358             default:
       
   359                 break;
       
   360         }
       
   361     }
       
   362     else //if (GST_MESSAGE_SRC(message)==(GstObject*)(bCtx->videosink))
       
   363     {
       
   364         switch( GST_MESSAGE_TYPE(message))
       
   365         {
       
   366             case GST_MESSAGE_ELEMENT:
       
   367             {
       
   368                 DEBUG_INFO("GST_MESSAGE_ELEMENT");
       
   369                 if ((gst_structure_has_name(message->structure, "graphics-surface-created")) || 
       
   370                     (gst_structure_has_name(message->structure, "graphics-surface-updated")))
       
   371                 {
       
   372                      guint32 surfaceid0, surfaceid1, surfaceid2, surfaceid3;
       
   373                      gint crop_rect_tl_x, crop_rect_tl_y, crop_rect_br_x, crop_rect_br_y;
       
   374                      gint aspect_ratio_num, aspect_ratio_denom;
       
   375                      
       
   376                      GstObject *sink= GST_MESSAGE_SRC(message);
       
   377      
       
   378                      g_object_get(sink, "surfaceid0", &surfaceid0, NULL);
       
   379                      g_object_get(sink, "surfaceid1", &surfaceid1,NULL);
       
   380                      g_object_get(sink, "surfaceid2", &surfaceid2,NULL);
       
   381                      g_object_get(sink, "surfaceid3", &surfaceid3,NULL);
       
   382                      g_object_get(sink, "croprect_tl_x", &crop_rect_tl_x, NULL);
       
   383                      g_object_get(sink, "croprect_tl_y", &crop_rect_tl_y, NULL);
       
   384                      g_object_get(sink, "croprect_br_x", &crop_rect_br_x, NULL);
       
   385                      g_object_get(sink, "croprect_br_y", &crop_rect_br_y, NULL);                
       
   386                      g_object_get(sink, "aspectratio_num", &aspect_ratio_num, NULL);
       
   387                      g_object_get(sink, "aspectratio_denom", &aspect_ratio_denom, NULL);                     
       
   388                      surface_created(ngaVideoSinkPtr, surfaceid0,surfaceid1,surfaceid2,surfaceid3,crop_rect_tl_x,
       
   389                      									crop_rect_tl_y,crop_rect_br_x,crop_rect_br_y,aspect_ratio_num,aspect_ratio_denom);                     
       
   390                 }
       
   391                 break;
       
   392             }
       
   393             default:
       
   394                 break;
       
   395         }   
       
   396     }
       
   397     DEBUG_API("<-XAMediaPlayerAdapt_GstBusCb");    
       
   398     return TRUE;
       
   399 }
       
   400 
       
   401 /*
       
   402  * XAMediaPlayerAdaptationCtx* XAMediaPlayerAdapt_Create()
       
   403  * Allocates memory for Media Player Adaptation Context and makes 1st phase initialization
       
   404  * @param XADataSource *pDataSrc - pointer to OMX-AL data source
       
   405  * @param XADataSource *pBankSrc - pointer to instrument bank structure in Mobile DLS, if NULL default will be used.
       
   406  * @param XADataSink *pAudioSnk - pointer to OMX-AL audio sink definition
       
   407  * @param XADataSink *pImageVideoSnk - pointer to OMX-AL image and video sink definition
       
   408  * @returns XAMediaPlayerAdaptationCtx* - Pointer to created context, NULL if error occurs.
       
   409  */
       
   410 XAAdaptationBaseCtx* XAMediaPlayerAdapt_Create(XADataSource *pDataSrc, XADataSource *pBankSrc,
       
   411                                                       XADataSink *pAudioSnk, XADataSink *pImageVideoSnk,
       
   412                                                       XADataSink *pVibra, XADataSink *pLEDArray)
       
   413 {
       
   414     XAMediaPlayerAdaptationCtx *pSelf = NULL;
       
   415     XAuint32 locType = 0;
       
   416     XADataLocator_IODevice *ioDevice;
       
   417     XAresult res = XA_RESULT_INTERNAL_ERROR;
       
   418     DEBUG_API("->XAMediaPlayerAdapt_Create");
       
   419     
       
   420     //Create NGA Video Sink class
       
   421     if(pImageVideoSnk)
       
   422     {
       
   423        ngaVideoSinkPtr = nga_video_sink_init();
       
   424     }
       
   425 
       
   426     pSelf = calloc(1, sizeof(XAMediaPlayerAdaptationCtx));
       
   427     if ( pSelf)
       
   428     {
       
   429         if( XAAdaptationBase_Init(&(pSelf->baseObj),XAMediaPlayerAdaptation)
       
   430             != XA_RESULT_SUCCESS )
       
   431         {
       
   432             DEBUG_ERR("Failed to init base context!!!");
       
   433             free(pSelf);
       
   434             pSelf = NULL;
       
   435         }
       
   436         else
       
   437         {
       
   438             pSelf->xaSource = pDataSrc;
       
   439             pSelf->xaBankSrc = pBankSrc;
       
   440             pSelf->xaAudioSink = pAudioSnk;
       
   441             pSelf->xaVideoSink = pImageVideoSnk;
       
   442             pSelf->xaLEDArray = pLEDArray;
       
   443             pSelf->xaVibra = pVibra;
       
   444             pSelf->loopstart = 0;
       
   445             pSelf->loopend = GST_CLOCK_TIME_NONE;
       
   446             pSelf->playrate = 1.0;
       
   447             pSelf->baseObj.pipeSrcThrCtx.state = CPStateNull;
       
   448             pSelf->baseObj.pipeSinkThrCtx.dataHandle = NULL;
       
   449             pSelf->rateprops = (XA_RATEPROP_SMOOTHVIDEO | XA_RATEPROP_SILENTAUDIO);
       
   450             pSelf->curMirror = XA_VIDEOMIRROR_NONE;
       
   451             pSelf->curRotation = 0;
       
   452             pSelf->isobjsrc = XA_BOOLEAN_FALSE;
       
   453             pSelf->cameraSinkSynced = XA_BOOLEAN_FALSE;
       
   454             /*pSelf->waitData = XA_BOOLEAN_FALSE;*/
       
   455             if(ngaVideoSinkPtr)
       
   456             {
       
   457                setup_native_display(ngaVideoSinkPtr, pImageVideoSnk);
       
   458             }
       
   459         }
       
   460 
       
   461         if ( pDataSrc )
       
   462 		{
       
   463 			locType = *((XAuint32*)(pDataSrc->pLocator));
       
   464 			if ( locType == XA_DATALOCATOR_IODEVICE  )
       
   465 			{
       
   466 				ioDevice = (XADataLocator_IODevice*)(pDataSrc->pLocator);
       
   467 				if ( ioDevice->deviceType == XA_IODEVICE_CAMERA && !cameraRealized )
       
   468 				{
       
   469 					DEBUG_ERR("Preconditions violated - Camera object not realized");
       
   470 					XAAdaptationBase_Free(&pSelf->baseObj);
       
   471 					free(pSelf);
       
   472 					pSelf = NULL;
       
   473 				}
       
   474 			}
       
   475 		}
       
   476     }
       
   477 
       
   478     DEBUG_API("<-XAMediaPlayerAdapt_Create");
       
   479     return (XAAdaptationBaseCtx*)pSelf;
       
   480 }
       
   481 
       
   482 
       
   483 
       
   484 /*
       
   485  * XAresult XAMediaPlayerAdapt_PostInit()
       
   486  * 2nd phase initialization of Media Player Adaptation Context
       
   487  * @param XAMediaPlayerAdaptationCtx* ctx - pointer to Media Player adaptation context
       
   488  * @return XAresult - Success value
       
   489  */
       
   490 XAresult XAMediaPlayerAdapt_PostInit( XAAdaptationBaseCtx* bCtx )
       
   491 {
       
   492     XAresult ret = XA_RESULT_SUCCESS;
       
   493     GstStateChangeReturn gret;
       
   494 	XAuint32 locType = 0;
       
   495 	XAMediaPlayerAdaptationCtx* ctx = NULL;
       
   496 	GstElement *videotest=NULL;
       
   497 
       
   498     DEBUG_API("->XAMediaPlayerAdapt_PostInit");
       
   499     if(bCtx == NULL || bCtx->ctxId != XAMediaPlayerAdaptation )
       
   500     {
       
   501         DEBUG_ERR("Invalid parameter!!");
       
   502         DEBUG_API("<-XAMediaPlayerAdapt_PostInit");
       
   503         return XA_RESULT_PARAMETER_INVALID;
       
   504     }
       
   505     ctx = (XAMediaPlayerAdaptationCtx*)bCtx;
       
   506     assert(ctx);
       
   507     ret = XAAdaptationBase_PostInit( bCtx );
       
   508     if( ret!=XA_RESULT_SUCCESS )
       
   509     {
       
   510         DEBUG_ERR("Base context postinit failed!!");
       
   511         return ret;
       
   512     }
       
   513 
       
   514     /* top level bin for media player */
       
   515     ctx->baseObj.bin = gst_pipeline_new("media_player");
       
   516     /* Create Gst bus listener. */
       
   517     ret = XAAdaptationBase_InitGstListener(bCtx);
       
   518     if( ret!=XA_RESULT_SUCCESS )
       
   519     {
       
   520         DEBUG_ERR("Bus listener creation failed!!");
       
   521         return ret;
       
   522     }
       
   523     /* Add Media Player specific handler */
       
   524     if(ctx->baseObj.bus)
       
   525     {
       
   526         ctx->baseObj.busCb = XAMediaPlayerAdapt_GstBusCb;
       
   527         gst_bus_add_signal_watch( ctx->baseObj.bus );
       
   528         gst_bus_enable_sync_message_emission( ctx->baseObj.bus );
       
   529         g_signal_connect(ctx->baseObj.bus, "message::eos", G_CALLBACK(bCtx->busCb), ctx );
       
   530         g_signal_connect(ctx->baseObj.bus, "message::error", G_CALLBACK(bCtx->busCb), ctx );
       
   531         g_signal_connect(ctx->baseObj.bus, "message::warning", G_CALLBACK(bCtx->busCb), ctx );
       
   532         g_signal_connect(ctx->baseObj.bus, "message::state-changed", G_CALLBACK(bCtx->busCb), ctx );
       
   533         g_signal_connect(ctx->baseObj.bus, "message::segment-done", G_CALLBACK(bCtx->busCb), ctx );
       
   534         g_signal_connect(ctx->baseObj.bus, "message::async-done", G_CALLBACK(bCtx->busCb), ctx );
       
   535         g_signal_connect(ctx->baseObj.bus, "message::element", G_CALLBACK(bCtx->busCb), ctx );
       
   536     }
       
   537     else
       
   538     {
       
   539         DEBUG_ERR("Failed to create message bus");
       
   540         return XA_RESULT_INTERNAL_ERROR;
       
   541     }
       
   542 
       
   543     XAMetadataAdapt_PreInit(bCtx);
       
   544 
       
   545     /* create pipeline */
       
   546     ret = XAMediaPlayerAdapt_CreatePipeline(ctx);
       
   547     if ( ret != XA_RESULT_SUCCESS )
       
   548     {
       
   549         DEBUG_ERR("Failed to create Media Player pipeline");
       
   550         return ret;
       
   551     }
       
   552 
       
   553     /* Init content pipe if used*/
       
   554 	locType = *((XAuint32*)(ctx->xaSource->pLocator));
       
   555 	if ( locType == XA_DATALOCATOR_CONTENTPIPE)
       
   556 	{
       
   557 		XAMediaPlayerAdapt_InitContentPipeSrc(ctx);
       
   558 	}
       
   559 
       
   560 
       
   561 #ifdef XA_IMPL_MEASURE_GST_DELAY
       
   562     ctx->baseObj.startTime = clock();
       
   563 #endif /* XA_IMPL_MEASURE_GST_DELAY */
       
   564     /* roll up bin */
       
   565     ctx->baseObj.binWantedState = GST_STATE_PAUSED;
       
   566 
       
   567     XAAdaptationBase_PrepareAsyncWait(bCtx);
       
   568     gret = gst_element_set_state( GST_ELEMENT(ctx->baseObj.bin), bCtx->binWantedState);
       
   569     if( gret == GST_STATE_CHANGE_ASYNC )
       
   570     {
       
   571         DEBUG_INFO("Wait for preroll");
       
   572         XAAdaptationBase_StartAsyncWait(bCtx);
       
   573         DEBUG_INFO("Preroll ready");
       
   574     }
       
   575     else if( gret == GST_STATE_CHANGE_FAILURE )
       
   576     {
       
   577         DEBUG_ERR("Preroll FAILED");
       
   578         ret = XA_RESULT_INTERNAL_ERROR;
       
   579     }
       
   580 
       
   581     bCtx->waitingasyncop = XA_BOOLEAN_FALSE;
       
   582 
       
   583     gret = gst_element_get_state( GST_ELEMENT(bCtx->bin), NULL, NULL, XA_ADAPT_ASYNC_TIMEOUT_SHORT_NSEC);
       
   584     if(GST_STATE(bCtx->bin)<GST_STATE_PAUSED)
       
   585     {
       
   586         DEBUG_INFO("Warning! Preroll not ready");
       
   587         if( ctx->audioppbin && !(gst_pad_is_linked(gst_element_get_static_pad(ctx->audioppbin, "sink"))) )
       
   588         {/*could not find suitable pad for audiopipeline - remove it*/
       
   589             DEBUG_INFO("Warning! No suitable decodebin pad for audio pipeline!");
       
   590             gst_element_set_state( GST_ELEMENT(ctx->audioppbin), GST_STATE_NULL);
       
   591             gst_bin_remove(GST_BIN(bCtx->bin), ctx->audioppbin);
       
   592             gst_element_set_state( GST_ELEMENT(ctx->audiosink), GST_STATE_NULL);
       
   593             gst_bin_remove(GST_BIN(bCtx->bin), ctx->audiosink);
       
   594         }
       
   595         else if( ctx->videoppbin && !(gst_pad_is_linked(gst_element_get_static_pad(ctx->videoppbin, "videopp_sink"))) )
       
   596         {/*could not find suitable pad for videopipeline - remove it*/
       
   597             DEBUG_INFO("Warning! No suitable decodebin pad for video pipeline!");
       
   598             gst_element_set_state( GST_ELEMENT(ctx->videoppbin), GST_STATE_NULL);
       
   599             gst_bin_remove(GST_BIN(bCtx->bin), ctx->videoppbin);
       
   600             gst_element_set_state( GST_ELEMENT(ctx->videosink), GST_STATE_NULL);
       
   601             gst_bin_remove(GST_BIN(bCtx->bin), ctx->videosink);
       
   602         }
       
   603 		gst_element_set_state( GST_ELEMENT(bCtx->bin), bCtx->binWantedState);
       
   604 		gst_element_get_state( GST_ELEMENT(bCtx->bin), NULL, NULL, XA_ADAPT_ASYNC_TIMEOUT_SHORT_NSEC);
       
   605 		if(GST_STATE(bCtx->bin)==GST_STATE_PAUSED)
       
   606 		{
       
   607 			DEBUG_INFO("Retry preroll successful!")
       
   608 			ret = XA_RESULT_SUCCESS;
       
   609 		}
       
   610     }
       
   611     else
       
   612     {
       
   613         DEBUG_INFO("Preroll ready");
       
   614     }
       
   615     /*
       
   616     CP code: Removed because appsrc does not call any "need-data" signal after that.
       
   617     locType = *((XAuint32*)(ctx->xaSource->pLocator));
       
   618 	if( locType == XA_DATALOCATOR_CONTENTPIPE)
       
   619 	{
       
   620 		gboolean emit = gst_app_src_get_emit_signals( GST_APP_SRC( ctx->source));
       
   621 		if( emit )
       
   622 		{
       
   623 			gst_app_src_set_emit_signals( GST_APP_SRC( ctx->source), FALSE );
       
   624 		}
       
   625 	}*/
       
   626 #ifdef XA_IMPL_MEASURE_GST_DELAY
       
   627     bCtx->endTime = clock();
       
   628     double diff = bCtx->endTime - bCtx->startTime ;
       
   629     diff = diff / CLOCKS_PER_SEC;
       
   630     DEBUG_API_A1( "Starting up bin took %.4lf secs",diff);
       
   631 #endif /* XA_IMPL_MEASURE_GST_DELAY */
       
   632     videotest = gst_bin_get_by_name(GST_BIN(bCtx->bin), "videotest");
       
   633     if ( videotest && !ctx->isobjsrc )
       
   634     {
       
   635     	gst_element_set_state( GST_ELEMENT(videotest),GST_STATE_PLAYING);
       
   636     }
       
   637 
       
   638     XAMetadataAdapt_PostInit(bCtx);
       
   639 
       
   640     if ( videotest )
       
   641     {
       
   642         gst_object_unref(videotest);
       
   643     }
       
   644 
       
   645     DEBUG_API("<-XAMediaPlayerAdapt_PostInit");
       
   646     return ret;
       
   647 }
       
   648 
       
   649 /*
       
   650  * void XAMediaPlayerAdapt_Destroy( XAMediaPlayerAdaptationCtx* ctx )
       
   651  * Destroys Media Player Adaptation Context
       
   652  * @param ctx - Media Player Adaptation context to be destroyed
       
   653  */
       
   654 void XAMediaPlayerAdapt_Destroy( XAAdaptationBaseCtx* bCtx )
       
   655 {
       
   656     XAMediaPlayerAdaptationCtx* ctx = NULL;
       
   657 
       
   658     XAresult res = XA_RESULT_SUCCESS;
       
   659     DEBUG_API("->XAMediaPlayerAdapt_Destroy");
       
   660     if(bCtx == NULL || bCtx->ctxId != XAMediaPlayerAdaptation )
       
   661     {
       
   662         DEBUG_ERR("Invalid parameter!!");
       
   663         DEBUG_API("<-XAMediaPlayerAdapt_Destroy");
       
   664         return;
       
   665     }
       
   666     ctx = (XAMediaPlayerAdaptationCtx*)bCtx;
       
   667 
       
   668     if( ctx->isobjsrc )
       
   669     {   /* external source, unlink and remove now */
       
   670         /*gst_object_unparent( GST_OBJECT(ctx->source) );*/
       
   671         gst_element_unlink( ctx->source, ctx->codecbin );
       
   672         /*gst_bin_remove( GST_BIN(bCtx->bin), ctx->source );*/
       
   673         GST_OBJECT_FLAG_SET(GST_OBJECT(ctx->source),GST_OBJECT_FLOATING);
       
   674     }
       
   675 #if 0
       
   676     if ( ctx->xaSource )
       
   677     {
       
   678     	XAuint32 locType = *(XAuint32*)(ctx->xaSource->pLocator);
       
   679     	switch (locType )
       
   680     	{
       
   681 		case XA_DATALOCATOR_IODEVICE:
       
   682 		{
       
   683 			XADataLocator_IODevice* ioDevice = (XADataLocator_IODevice*)(ctx->xaSource->pLocator);
       
   684 			if ( ioDevice->deviceType == XA_IODEVICE_RADIO )
       
   685 			{
       
   686 				gst_object_unparent( GST_OBJECT(ctx->source) );
       
   687 				gst_element_unlink( ctx->source, ctx->codecbin );
       
   688 				gst_bin_remove( GST_BIN(bCtx->bin), ctx->source );
       
   689 				GST_OBJECT_FLAG_SET(GST_OBJECT(ctx->source),GST_OBJECT_FLOATING);
       
   690 			}
       
   691 		}
       
   692 		default:
       
   693 			break;
       
   694     	}
       
   695     }
       
   696 #endif
       
   697 
       
   698     if( ctx->baseObj.pipeSrcThrCtx.dataHandle )
       
   699     {
       
   700         XAuint32 locType = *(XAuint32*)(ctx->xaAudioSink->pLocator);
       
   701 		switch ( locType )
       
   702 		{
       
   703 		case XA_DATALOCATOR_CONTENTPIPE:
       
   704 			{
       
   705 				res  = ctx->baseObj.pipeSrcThrCtx.pipe->pContentPipe->Close(&(ctx->baseObj.pipeSrcThrCtx.dataHandle));
       
   706 				if( res != XA_RESULT_SUCCESS)
       
   707 				{
       
   708 					DEBUG_ERR("Cannot close contentpipe content");
       
   709 				}
       
   710 				break;
       
   711 			}
       
   712 			default:
       
   713 				break;
       
   714 		}
       
   715     }
       
   716 
       
   717     if( ctx->isobjasink && ctx->xaAudioSink && ctx->xaAudioSink->pLocator )
       
   718     {
       
   719         XAuint32 locType = *(XAuint32*)(ctx->xaAudioSink->pLocator);
       
   720         switch ( locType )
       
   721         {
       
   722             case XA_DATALOCATOR_OUTPUTMIX:
       
   723             {
       
   724                 XADataLocator_OutputMix* omix = (XADataLocator_OutputMix*)(ctx->xaAudioSink->pLocator);
       
   725                 XAOMixImpl* omixDevice = (XAOMixImpl*)(*omix->outputMix);
       
   726                 if(omixDevice)
       
   727                 {
       
   728                     XAOutputMixAdapt_DisconnectObject(omixDevice->adaptationCtx, bCtx);
       
   729                 }
       
   730                 break;
       
   731             }
       
   732             default:
       
   733                 /* Vibra and LED need no handling */
       
   734                 break;
       
   735         }
       
   736 
       
   737     }
       
   738 
       
   739     if( bCtx->bus )
       
   740     {
       
   741         gst_bus_remove_signal_watch( bCtx->bus );
       
   742         gst_bus_disable_sync_message_emission ( bCtx->bus );
       
   743     }
       
   744     XAAdaptationBase_CancelAsyncWait(bCtx);
       
   745 
       
   746     if( ctx->runpositiontimer )
       
   747     {
       
   748         g_source_remove(ctx->runpositiontimer);
       
   749     }
       
   750     XAMetadataAdapt_FreeVars(ctx->metadatavars);
       
   751     XAAdaptationBase_Free( bCtx );
       
   752     free(ctx);
       
   753     ctx = NULL;
       
   754 
       
   755     DEBUG_API("<-XAMediaPlayerAdapt_Destroy");
       
   756 }
       
   757 
       
   758 
       
   759 /*
       
   760  * void XAMediaPlayerAdapt_CreatePipeline( XAMediaPlayerAdaptationCtx* ctx );
       
   761  */
       
   762 XAresult XAMediaPlayerAdapt_CreatePipeline( XAMediaPlayerAdaptationCtx* ctx )
       
   763 {
       
   764     XAresult ret = XA_RESULT_SUCCESS;
       
   765     XAboolean delayedlink = XA_BOOLEAN_FALSE;
       
   766     XAboolean isPCM = XA_BOOLEAN_FALSE;
       
   767     XAboolean isRawImage = XA_BOOLEAN_FALSE;
       
   768     XAMediaType mediatype;
       
   769     XAuint32 locType = 0;
       
   770     GstCaps* encSrcCaps;
       
   771     DEBUG_API("->XAMediaPlayerAdapt_CreatePipeline");
       
   772 
       
   773     /* create and add data source */
       
   774     XACommon_CheckDataSource(ctx->xaSource, &mediatype);
       
   775     ctx->source = XAAdaptationBase_CreateGstSource( ctx->xaSource, "datasrc", &(ctx->isobjsrc), &isPCM, &isRawImage);
       
   776     if( !(ctx->source) )
       
   777     {
       
   778         DEBUG_ERR("Could not create data source!!!");
       
   779         return XA_RESULT_INTERNAL_ERROR;
       
   780     }
       
   781     else
       
   782     {
       
   783         if(mediatype != XA_MEDIATYPE_AUDIO)
       
   784         {
       
   785           //temporary work around for video
       
   786     	  encSrcCaps = gst_caps_new_simple ("video/h263-2000",
       
   787                 "framerate", GST_TYPE_FRACTION, 25, 1,
       
   788                 "pixel-aspect-ratio", GST_TYPE_FRACTION, 16, 9,
       
   789                 "width", G_TYPE_INT, 176,
       
   790                 "height", G_TYPE_INT, 144,
       
   791                 NULL);
       
   792           g_object_set(G_OBJECT(ctx->source), "caps", encSrcCaps, NULL);
       
   793         }
       
   794 
       
   795         //boolRetVal = gst_bin_add(GST_BIN(pipeline), appsrc);
       
   796     	
       
   797     	
       
   798     }
       
   799 
       
   800     if ( !ctx->isobjsrc )
       
   801     { /* Add other than camera source to media player bin */
       
   802     	DEBUG_INFO("No camera source");
       
   803     	gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->source);
       
   804     }
       
   805     else
       
   806     {
       
   807        GstCaps* encSrcCaps;
       
   808        encSrcCaps = gst_caps_new_simple("video/x-raw-yuv",
       
   809                    "format", GST_TYPE_FOURCC,GST_MAKE_FOURCC('I','4','2','0'),
       
   810                    "framerate", GST_TYPE_FRACTION, 30, 1,
       
   811                    NULL);
       
   812        DEBUG_INFO_A1("new camera encoding filter: %s",gst_caps_to_string(encSrcCaps));
       
   813        g_object_set( G_OBJECT(ctx->source), "filter-caps",encSrcCaps,NULL);
       
   814        gst_caps_unref(encSrcCaps);
       
   815     }
       
   816 
       
   817     /* create and add codec bin */
       
   818     if( !(ctx->isobjsrc || isPCM) )
       
   819     {
       
   820     	DEBUG_INFO("Create decodebin");
       
   821         if(mediatype == XA_MEDIATYPE_AUDIO)
       
   822         {
       
   823            ctx->codecbin = gst_element_factory_make( "decodebin" , "mpcodecbin" );
       
   824         }
       
   825         else
       
   826         {
       
   827            ctx->codecbin = gst_element_factory_make( "identity" , "mpcodecbin" );
       
   828         }
       
   829     }
       
   830     else if(ctx->isobjsrc )
       
   831     { /* object sources produce framed raw data, decodebin only causes trouble */ //shyward
       
   832     	DEBUG_INFO("Create identity")
       
   833         ctx->codecbin = gst_element_factory_make( "identity" , "mpcodecbin" );
       
   834     }
       
   835     else if(isPCM)
       
   836     { /* decodebin does not know how to handle PCM files */
       
   837     	DEBUG_INFO("Create audioparse")
       
   838         ctx->codecbin = gst_element_factory_make( "audioparse" , "mpcodecbin" );
       
   839     }
       
   840     else if ( isRawImage)
       
   841     { /* decodebin does not know how to handle raw unframed video data */
       
   842     	DEBUG_INFO("Create videoparse")
       
   843         ctx->codecbin = gst_element_factory_make( "videoparse", "mpcodecbin" );
       
   844     }
       
   845 
       
   846     if( ctx->codecbin )
       
   847     {
       
   848         gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->codecbin);
       
   849         if ( !ctx->isobjsrc )
       
   850         {
       
   851             if(mediatype == XA_MEDIATYPE_AUDIO)
       
   852             {
       
   853                if( !gst_element_link(ctx->source, ctx->codecbin) )
       
   854                {
       
   855                     DEBUG_ERR("Could not link source to decodebin!!");
       
   856                     return XA_RESULT_INTERNAL_ERROR;
       
   857                }
       
   858             }
       
   859             else
       
   860             {
       
   861 			   if( !gst_element_link_filtered(ctx->source, ctx->codecbin, encSrcCaps ) )
       
   862 			   {
       
   863 			    	DEBUG_ERR("Could not link source to decodebin!!");
       
   864 				    return XA_RESULT_INTERNAL_ERROR;
       
   865 			   }
       
   866             }
       
   867         }
       
   868         else
       
   869         { /* Link camera source by using ghost-pads, because elements are in different bins */
       
   870 
       
   871         	GstPad *cameraBinGhostPad=NULL;
       
   872         	GstPad* ghost=NULL;
       
   873         	GstElement *camTee=NULL;
       
   874         	GstStateChangeReturn gret;
       
   875         	GstPad *mpGhostSink=NULL;
       
   876 
       
   877         	/* Set external camera source to ready for pipeline manipulation */
       
   878         	DEBUG_INFO("Set ext-source PAUSED for pipeline manipulation");
       
   879 			gret = gst_element_set_state( GST_ELEMENT(ctx->source), GST_STATE_READY);
       
   880 			gret = gst_element_get_state( GST_ELEMENT(ctx->source), NULL,NULL,XA_ADAPT_ASYNC_TIMEOUT_SHORT_NSEC);
       
   881 
       
   882 			/* Add new ghost-pad to external camera source */
       
   883         	camTee = gst_bin_get_by_name( GST_BIN(ctx->source), "CamTee");
       
   884         	if ( !camTee )
       
   885         	{
       
   886         		DEBUG_ERR("Could not get tee-element from camera");
       
   887         	}
       
   888         	cameraBinGhostPad = gst_element_get_request_pad( camTee, "src%d" );
       
   889         	if ( !cameraBinGhostPad )
       
   890         	{
       
   891         		DEBUG_ERR("Could not get new src-pad from CamTee element");
       
   892         	}
       
   893 			gst_element_add_pad(ctx->source, gst_ghost_pad_new("MPObjSrc",cameraBinGhostPad));
       
   894 			ghost = gst_element_get_static_pad( GST_ELEMENT(ctx->source), "MPObjSrc" );
       
   895 			DEBUG_INFO_A2("Setting element:%s pad:%s to blocking.",
       
   896 							gst_element_get_name(ctx->baseObj.bin),
       
   897 							gst_pad_get_name(ghost));
       
   898 			/* Set newly created pad to blocking */
       
   899 			gst_pad_set_blocked_async(ghost, TRUE, XAAdaptationBase_PadBlockCb, NULL);
       
   900 
       
   901 
       
   902 			/* Create new ghost-pad to media player pipeline where external camera is connected */
       
   903         	mpGhostSink = gst_element_get_static_pad( GST_ELEMENT(ctx->codecbin), "sink");
       
   904 			gst_element_add_pad(ctx->baseObj.bin, gst_ghost_pad_new("MPObjSink",mpGhostSink));
       
   905 
       
   906 			if ( !gst_element_link_pads( GST_ELEMENT(ctx->source), "MPObjSrc",
       
   907 										GST_ELEMENT(ctx->baseObj.bin), "MPObjSink") )
       
   908 			{
       
   909 				DEBUG_ERR("Could not link camera:MPObjSrc to videofilter:MPObjSink");
       
   910 				return XA_RESULT_INTERNAL_ERROR;
       
   911 			}
       
   912 
       
   913 			if ( cameraBinGhostPad )
       
   914 			{
       
   915 				gst_object_unref( cameraBinGhostPad );
       
   916 			}
       
   917 			if ( ghost )
       
   918 			{
       
   919 				gst_object_unref( ghost );
       
   920 			}
       
   921 			if ( mpGhostSink )
       
   922 			{
       
   923 				gst_object_unref( mpGhostSink );
       
   924 			}
       
   925 			if ( camTee )
       
   926 			{
       
   927 				gst_object_unref( camTee );
       
   928 			}
       
   929         }
       
   930     }
       
   931     else
       
   932     {
       
   933         DEBUG_ERR("Could not create decoder bin!!!");
       
   934         return XA_RESULT_INTERNAL_ERROR;
       
   935     }
       
   936 
       
   937     /* create and add video stream pipeline */
       
   938     if(!ctx->xaLEDArray && !ctx->xaVibra && mediatype!=XA_MEDIATYPE_AUDIO) /*no video for these*/
       
   939     {
       
   940         /* create video processing pipeline */
       
   941         ctx->videoppbin = XAAdaptationBase_CreateVideoPP( );
       
   942         if( ctx->videoppbin )
       
   943         {
       
   944             gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->videoppbin);
       
   945             //shyward ---link filtered???
       
   946             // boolRetVal = gst_element_link_filtered(appsrc, videosink, caps);
       
   947 			      //if(!gst_element_link(ctx->codecbin, ctx->videoppbin))
       
   948             if(!gst_element_link_filtered(ctx->codecbin, ctx->videoppbin,encSrcCaps))
       
   949             {
       
   950                 /* probably dynamic pads in codecbin */
       
   951                 DEBUG_INFO("Could not link codec to videopp, trying delayed link");
       
   952                 delayedlink = XA_BOOLEAN_TRUE;
       
   953             }
       
   954             ctx->videoScrSrcPad = gst_element_get_static_pad(ctx->videoppbin, "videopp_src");
       
   955         }
       
   956         else
       
   957         {
       
   958             DEBUG_ERR("Could not create video pp bin!!!!");
       
   959             return XA_RESULT_INTERNAL_ERROR;
       
   960         }
       
   961         //shyward
       
   962         /* Black screen pipeline not needed under Symbian. May need to revist for acceptance testing
       
   963         ctx->videoppBScrbin = XAAdaptationBase_CreateVideoPPBlackScr( );
       
   964         if( ctx->videoppBScrbin )
       
   965         {
       
   966             gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->videoppBScrbin);
       
   967             ctx->blackScrSrcPad = gst_element_get_static_pad(ctx->videoppBScrbin, "videoppBSrc_src");
       
   968         }
       
   969         else
       
   970         {
       
   971             DEBUG_ERR("Could not create video pp bin for black screen!!!!");
       
   972             return XA_RESULT_INTERNAL_ERROR;
       
   973         }
       
   974         */
       
   975         ctx->inputSelector = XAAdaptationBase_CreateInputSelector( );
       
   976         if( ctx->inputSelector )
       
   977         {
       
   978             gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->inputSelector);
       
   979             ctx->videoScrSinkPad = gst_element_get_request_pad(ctx->inputSelector, "sink%d");
       
   980             ctx->blackScrSinkPad = gst_element_get_request_pad(ctx->inputSelector, "sink%d");
       
   981             gst_pad_link(ctx->blackScrSrcPad, ctx->blackScrSinkPad);
       
   982             gst_pad_link(ctx->videoScrSrcPad, ctx->videoScrSinkPad);
       
   983         }
       
   984 
       
   985         //shyward - We have no video filter at this time
       
   986         /*
       
   987         ctx->filter = gst_element_factory_make("ffmpegcolorspace", "videofilter");
       
   988         gst_bin_add( GST_BIN(ctx->baseObj.bin), ctx->filter);
       
   989         if ( !gst_element_link( ctx->inputSelector, ctx->filter ) )
       
   990         {
       
   991 		   DEBUG_ERR("Could not link ctx->filter <-> ctx->inputSelector");
       
   992 		   return XA_RESULT_INTERNAL_ERROR;
       
   993 	   }
       
   994 	   */
       
   995         /* create video pipe sink */
       
   996         ctx->videosink = XAAdaptationBase_CreateGstSink( ctx->xaVideoSink, "videosink", &(ctx->isobjvsink) );
       
   997         /* NOTE: no valid object sinks for video output available */
       
   998         if( ctx->videosink )
       
   999         {
       
  1000             gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->videosink);
       
  1001 
       
  1002             //shyward
       
  1003             //if(!gst_element_link(ctx->filter, ctx->videosink))
       
  1004             if(!gst_element_link_filtered(ctx->videoppbin, ctx->videosink,encSrcCaps))
       
  1005             {
       
  1006                 DEBUG_ERR("Could not link videopp to videosink!!");
       
  1007                 return XA_RESULT_INTERNAL_ERROR;
       
  1008             }
       
  1009             else
       
  1010             {
       
  1011             	gst_caps_unref(encSrcCaps);
       
  1012             }
       
  1013         }
       
  1014         else
       
  1015         {
       
  1016             DEBUG_ERR("Could not create video sink!!!!");
       
  1017             return XA_RESULT_INTERNAL_ERROR;
       
  1018         }
       
  1019     }
       
  1020     else
       
  1021     {
       
  1022         DEBUG_INFO("Media does not contain video!");
       
  1023     }
       
  1024 
       
  1025     /* create and add audio stream pipeline */
       
  1026 
       
  1027     if(!ctx->xaLEDArray && !ctx->xaVibra && mediatype!=XA_MEDIATYPE_IMAGE) /*no audio for these*/
       
  1028     {
       
  1029     /* create audio post processing pipeline */
       
  1030     ctx->audioppbin = XAAdaptationBase_CreateAudioPP( );
       
  1031     if( ctx->audioppbin )
       
  1032     {
       
  1033         gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->audioppbin);
       
  1034         if(!gst_element_link(ctx->codecbin, ctx->audioppbin))
       
  1035         {
       
  1036             DEBUG_INFO("Could not link codec to audiopp, trying delayed link");
       
  1037             delayedlink = XA_BOOLEAN_TRUE;
       
  1038         }
       
  1039     }
       
  1040     else
       
  1041     {
       
  1042         DEBUG_ERR("Could not create audio pp bin!!!!");
       
  1043         return XA_RESULT_INTERNAL_ERROR;
       
  1044     }
       
  1045     /* create audio pipe sink */
       
  1046     ctx->audiosink = XAAdaptationBase_CreateGstSink( ctx->xaAudioSink, "audiosink", &(ctx->isobjasink) );
       
  1047     if( ctx->audiosink )
       
  1048     {
       
  1049         if( ctx->isobjasink && ctx->xaAudioSink && ctx->xaAudioSink->pLocator )
       
  1050         {
       
  1051             locType = *(XAuint32*)(ctx->xaAudioSink->pLocator);
       
  1052             switch ( locType )
       
  1053             {
       
  1054                 case XA_DATALOCATOR_OUTPUTMIX:
       
  1055                 {
       
  1056                     XADataLocator_OutputMix* omix = (XADataLocator_OutputMix*)(ctx->xaAudioSink->pLocator);
       
  1057                     XAOMixImpl* omixDevice = (XAOMixImpl*)(*omix->outputMix);
       
  1058                     if(omixDevice)
       
  1059                     {
       
  1060                         XAOutputMixAdapt_ConnectObject(omixDevice->adaptationCtx, &(ctx->baseObj), ctx->audiosink);
       
  1061                     }
       
  1062                     break;
       
  1063                 }
       
  1064                 default:
       
  1065                     /* Vibra and LED need no handling */
       
  1066                     break;
       
  1067             }
       
  1068         }
       
  1069         gst_bin_add(GST_BIN(ctx->baseObj.bin), ctx->audiosink);
       
  1070         if(!gst_element_link(ctx->audioppbin, ctx->audiosink))
       
  1071         {
       
  1072             DEBUG_ERR("Could not link audiopp to audiosink!!");
       
  1073             return XA_RESULT_INTERNAL_ERROR;
       
  1074         }
       
  1075     }
       
  1076     else
       
  1077     {
       
  1078         DEBUG_ERR("Could not create audio sink!!!!");
       
  1079         return XA_RESULT_INTERNAL_ERROR;
       
  1080         }
       
  1081     }
       
  1082     else
       
  1083     {
       
  1084         DEBUG_INFO("Media does not contain audio!");
       
  1085     }
       
  1086 
       
  1087     if(delayedlink)
       
  1088     {
       
  1089         /* listen for dynamically created pads */
       
  1090         g_signal_connect (ctx->codecbin, "pad-added", G_CALLBACK (XAMediaPlayerAdapt_NewPadCb), ctx);
       
  1091     }
       
  1092     locType = *((XAuint32*)(ctx->xaSource->pLocator));
       
  1093     if( locType == XA_DATALOCATOR_CONTENTPIPE)
       
  1094     {
       
  1095 
       
  1096         g_signal_connect (ctx->source, "need-data", G_CALLBACK (need_data_for_prerolling), ctx);
       
  1097         g_signal_connect (ctx->source, "enough-data", G_CALLBACK (enough_data_for_prerolling), ctx);
       
  1098         g_signal_connect (ctx->source, "push-buffer", G_CALLBACK (push_data_for_prerolling), ctx);
       
  1099     }
       
  1100     DEBUG_API("<-XAMediaPlayerAdapt_CreatePipeline");
       
  1101     return ret;
       
  1102 }
       
  1103 
       
  1104 /*
       
  1105  * gboolean XAMediaPlayerAdapt_PositionUpdate(gpointer ctx)
       
  1106  * callback.
       
  1107  * If position tracking enabled, periodic timer calls this method every XA_ADAPT_PU_INTERVAL msecs
       
  1108  * @return false to stop periodic calls
       
  1109  */
       
  1110 gboolean XAMediaPlayerAdapt_PositionUpdate(gpointer ctx)
       
  1111 {
       
  1112     XAAdaptationBaseCtx *bCtx = (XAAdaptationBaseCtx*) ctx;
       
  1113     XAMediaPlayerAdaptationCtx* mCtx = (XAMediaPlayerAdaptationCtx*) ctx;
       
  1114     gint64 position;
       
  1115     XAmillisecond posMsec;
       
  1116     GstFormat format = GST_FORMAT_TIME;
       
  1117     XAAdaptEvent event = {XA_PLAYITFEVENTS, XA_ADAPT_POSITION_UPDATE_EVT, 1, NULL};
       
  1118 
       
  1119     DEBUG_API("->XAMediaPlayerAdapt_PositionUpdate");
       
  1120     if ( !gst_element_query_position( GST_ELEMENT(bCtx->bin), &format, &position ) )
       
  1121     {
       
  1122         DEBUG_ERR("Gst: Failed to get position");
       
  1123         return( mCtx->runpositiontimer );
       
  1124     }
       
  1125     DEBUG_INFO_A1("Current position %"GST_TIME_FORMAT, GST_TIME_ARGS(position));
       
  1126     if( mCtx && mCtx->trackpositionenabled )
       
  1127     {
       
  1128         posMsec = GST_TIME_AS_MSECONDS(position);/*Warning ok due to used API specification*/
       
  1129         DEBUG_INFO_A1("mCtx->trackpositionenabled sending update, position:&ld ", posMsec);
       
  1130         /* send needed events */
       
  1131         event.data=&posMsec;
       
  1132         XAAdaptationBase_SendAdaptEvents(bCtx, &event );
       
  1133     }
       
  1134     if( mCtx && mCtx->loopingenabled)
       
  1135     {
       
  1136     	DEBUG_INFO_A2("mCtx->loopingenabled, current position:%lu, loopend:%lu ", position, mCtx->loopend);
       
  1137         if( (position >= mCtx->loopend) &&
       
  1138             (mCtx->lastpos < mCtx->loopend) )
       
  1139         {
       
  1140             DEBUG_INFO_A2("Restart loop from %"GST_TIME_FORMAT" to %"GST_TIME_FORMAT,
       
  1141                           GST_TIME_ARGS(mCtx->loopstart), GST_TIME_ARGS(mCtx->loopend));
       
  1142             gst_element_seek( bCtx->bin, mCtx->playrate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE,
       
  1143                               GST_SEEK_TYPE_SET, mCtx->loopstart,
       
  1144                               GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE );
       
  1145             mCtx->lastpos = mCtx->loopstart;
       
  1146             if( mCtx && mCtx->trackpositionenabled )
       
  1147             {
       
  1148             	DEBUG_INFO_A1("mCtx->trackpositionenabled sending looping update, position:%&u ", posMsec);
       
  1149                 posMsec = GST_TIME_AS_MSECONDS(mCtx->lastpos);/*Warning ok due to used API specification*/
       
  1150                 /* send needed events */
       
  1151                 event.data=&posMsec;
       
  1152                 XAAdaptationBase_SendAdaptEvents(bCtx, &event );
       
  1153             }
       
  1154         }
       
  1155         else
       
  1156         {
       
  1157             mCtx->lastpos = position;
       
  1158         }
       
  1159     }
       
  1160     DEBUG_API("<-XAMediaPlayerAdapt_PositionUpdate");
       
  1161     /* return false to stop timer */
       
  1162     return( mCtx->runpositiontimer );
       
  1163 }
       
  1164 
       
  1165 /*
       
  1166  * XAresult XAMediaPlayerAdapt_UpdatePositionCbTimer
       
  1167  * Enable/disable periodic position tracking callback timer
       
  1168  */
       
  1169 XAresult XAMediaPlayerAdapt_UpdatePositionCbTimer(XAMediaPlayerAdaptationCtx* mCtx)
       
  1170 {
       
  1171     DEBUG_API_A2("->XAMediaPlayerAdapt_UpdatePositionCbTimer: trackposition %u, tracklooping %u",
       
  1172                 mCtx->trackpositionenabled, mCtx->loopingenabled);
       
  1173 
       
  1174     if(mCtx->runpositiontimer==0 && (mCtx->trackpositionenabled || mCtx->loopingenabled))
       
  1175     {
       
  1176         DEBUG_INFO("Start position tracking timer");
       
  1177         mCtx->positionCb = &XAMediaPlayerAdapt_PositionUpdate;
       
  1178         /* if play is already on, create a timer to track position of playback */
       
  1179         if( GST_STATE(mCtx->baseObj.bin) == GST_STATE_PLAYING )
       
  1180         {
       
  1181             mCtx->runpositiontimer = g_timeout_add(XA_ADAPT_PU_INTERVAL, mCtx->positionCb, mCtx);
       
  1182         }
       
  1183     }
       
  1184     else if (mCtx->runpositiontimer!=0 && !(mCtx->trackpositionenabled || mCtx->loopingenabled))
       
  1185     {
       
  1186         DEBUG_INFO("Stop position tracking timer");
       
  1187         mCtx->trackpositionenabled = XA_BOOLEAN_FALSE;
       
  1188         if(mCtx->runpositiontimer > 0)
       
  1189         {
       
  1190             g_source_remove(mCtx->runpositiontimer);
       
  1191             mCtx->runpositiontimer=0;
       
  1192         }
       
  1193     }
       
  1194     DEBUG_API("<-XAMediaPlayerAdapt_UpdatePositionCbTimer");
       
  1195     return XA_RESULT_SUCCESS;
       
  1196 }
       
  1197 
       
  1198 /*
       
  1199  * XAresult XAMediaPlayerAdapt_InitContentPipeSrc(ctx)
       
  1200  * CP code: can be moved to context base
       
  1201  */
       
  1202 XAresult XAMediaPlayerAdapt_InitContentPipeSrc(XAMediaPlayerAdaptationCtx* ctx)
       
  1203 {
       
  1204     XAresult ret = XA_RESULT_SUCCESS;
       
  1205     CPresult res;
       
  1206     /*GstStateChangeReturn gstRet = GST_STATE_CHANGE_SUCCESS;*/
       
  1207     DEBUG_API("->XAMediaPlayerAdapt_InitContentPipeSrc");
       
  1208     ctx->baseObj.pipeSrcThrCtx.appSrc = GST_APP_SRC(ctx->source);
       
  1209     ctx->baseObj.pipeSrcThrCtx.pipe = (XADataLocator_ContentPipe*)(ctx->xaSource->pLocator);
       
  1210 
       
  1211     /* Create thread for content pipe source*/
       
  1212     ret = XAImpl_CreateThreadHandle( &(ctx->baseObj.pipeSrcThr) );
       
  1213     if ( ret != XA_RESULT_SUCCESS )
       
  1214     {
       
  1215         DEBUG_ERR("Could not create thread for content pipe source!");
       
  1216         DEBUG_API("<-XAMediaPlayerAdapt_InitContentPipeSrc");
       
  1217         return ret;
       
  1218     }
       
  1219 
       
  1220     /* Create semaphore for content pipe source */
       
  1221     ret = XAImpl_CreateSemaphore( &(ctx->baseObj.pipeSrcThrCtx.stateSem));
       
  1222     if ( ret != XA_RESULT_SUCCESS )
       
  1223     {
       
  1224         DEBUG_ERR("Could not create semaphore for content pipe source!");
       
  1225         DEBUG_API("<-XAMediaPlayerAdapt_InitContentPipeSrc");
       
  1226         return ret;
       
  1227     }
       
  1228 
       
  1229     /* Open content pipe */
       
  1230     res = ctx->baseObj.pipeSrcThrCtx.pipe->pContentPipe->Open(&(ctx->baseObj.pipeSrcThrCtx.dataHandle),
       
  1231                                                 (CPstring)(ctx->baseObj.pipeSrcThrCtx.pipe->URI),
       
  1232                                                 CP_AccessRead );
       
  1233     if ( res ==  EXIT_FAILURE )
       
  1234     {
       
  1235         DEBUG_ERR("Could not open Content Pipe!")
       
  1236         return XA_RESULT_INTERNAL_ERROR;
       
  1237     }
       
  1238 
       
  1239     res = ctx->baseObj.pipeSrcThrCtx.pipe->pContentPipe->RegisterCallback( &(ctx->baseObj.pipeSrcThrCtx.dataHandle), &XAAdaptationBase_ContentPipeSrcCb);
       
  1240     if ( res == EXIT_FAILURE )
       
  1241     {
       
  1242         DEBUG_ERR("Could not register content pipe callbacks!")
       
  1243         return XA_RESULT_INTERNAL_ERROR;
       
  1244     }
       
  1245 
       
  1246     /*gstRet = gst_element_set_state( GST_ELEMENT(ctx->source), GST_STATE_PAUSED);
       
  1247     gst_element_sync_state_with_parent( GST_ELEMENT( ctx->source));*/
       
  1248 
       
  1249     XAImpl_StartThread( &(ctx->baseObj.pipeSrcThr), NULL, &XAAdaptationBase_ContentPipeScrThrFunc, &(ctx->baseObj.pipeSrcThrCtx) );
       
  1250 
       
  1251     DEBUG_API("<-XAMediaPlayerAdapt_InitContentPipeSrc");
       
  1252     return ret;
       
  1253 }