diff -r 71e347f905f2 -r 4a7fac7dd34a gst_plugins_base/gst/playback/gstplaysink.c --- a/gst_plugins_base/gst/playback/gstplaysink.c Fri Mar 19 09:35:09 2010 +0200 +++ b/gst_plugins_base/gst/playback/gstplaysink.c Fri Apr 16 15:15:52 2010 +0300 @@ -29,32 +29,33 @@ #include "gstplaysink.h" -#ifdef __SYMBIAN32__ -#include -#endif GST_DEBUG_CATEGORY_STATIC (gst_play_sink_debug); #define GST_CAT_DEFAULT gst_play_sink_debug #define VOLUME_MAX_DOUBLE 10.0 +#define GST_PLAY_CHAIN(c) ((GstPlayChain *)(c)) + /* holds the common data fields for the audio and video pipelines. We keep them * in a structure to more easily have all the info available. */ typedef struct { GstPlaySink *playsink; - GstPad *sinkpad; GstElement *bin; gboolean added; gboolean activated; + gboolean raw; } GstPlayChain; typedef struct { GstPlayChain chain; + GstPad *sinkpad; GstElement *queue; GstElement *conv; GstElement *resample; GstElement *volume; /* element with the volume property */ + gboolean sink_volume; /* if the volume was provided by the sink */ GstElement *mute; /* element with the mute property */ GstElement *sink; } GstPlayAudioChain; @@ -62,6 +63,7 @@ typedef struct { GstPlayChain chain; + GstPad *sinkpad; GstElement *queue; GstElement *conv; GstElement *scale; @@ -72,6 +74,7 @@ typedef struct { GstPlayChain chain; + GstPad *sinkpad; GstElement *queue; GstElement *conv; GstElement *resample; @@ -83,6 +86,33 @@ * chain */ } GstPlayVisChain; +typedef struct +{ + GstPlayChain chain; + GstPad *sinkpad; + GstElement *conv; + GstElement *overlay; + GstPad *videosinkpad; + GstPad *textsinkpad; + GstPad *srcpad; /* outgoing srcpad, used to connect to the next + * chain */ + GstElement *sink; /* custom sink to receive subtitle buffers */ +} GstPlayTextChain; + +typedef struct +{ + GstPlayChain chain; + GstPad *sinkpad; + GstElement *queue; + GstElement *conv; + GstElement *overlay; + GstPad *videosinkpad; + GstPad *subpsinkpad; + GstPad *srcpad; /* outgoing srcpad, used to connect to the next + * chain */ + GstElement *sink; /* custom sink to receive subpicture buffers */ +} GstPlaySubpChain; + #define GST_PLAY_SINK_GET_LOCK(playsink) (((GstPlaySink *)playsink)->lock) #define GST_PLAY_SINK_LOCK(playsink) g_mutex_lock (GST_PLAY_SINK_GET_LOCK (playsink)) #define GST_PLAY_SINK_UNLOCK(playsink) g_mutex_unlock (GST_PLAY_SINK_GET_LOCK (playsink)) @@ -93,35 +123,47 @@ GMutex *lock; + gboolean async_pending; + gboolean need_async_start; + GstPlayFlags flags; - GstPlayChain *audiochain; - GstPlayChain *videochain; - GstPlayChain *vischain; + /* chains */ + GstPlayAudioChain *audiochain; + GstPlayVideoChain *videochain; + GstPlayVisChain *vischain; + GstPlayTextChain *textchain; + GstPlaySubpChain *subpchain; + /* audio */ GstPad *audio_pad; gboolean audio_pad_raw; + /* audio tee */ GstElement *audio_tee; GstPad *audio_tee_sink; GstPad *audio_tee_asrc; GstPad *audio_tee_vissrc; - + /* video */ GstPad *video_pad; gboolean video_pad_raw; - + /* text */ GstPad *text_pad; + /* subpictures */ + GstPad *subp_pad; /* properties */ GstElement *audio_sink; GstElement *video_sink; GstElement *visualisation; + GstElement *text_sink; + GstElement *subp_sink; gfloat volume; gboolean mute; gchar *font_desc; /* font description */ guint connection_speed; /* connection speed in bits/sec (0 = unknown) */ - - /* internal elements */ - GstElement *textoverlay_element; + gint count; + gboolean volume_changed; /* volume/mute changed while no audiochain */ + gboolean mute_changed; /* ... has been reated yet */ }; struct _GstPlaySinkClass @@ -134,12 +176,6 @@ enum { PROP_0, - PROP_AUDIO_SINK, - PROP_VIDEO_SINK, - PROP_VIS_PLUGIN, - PROP_VOLUME, - PROP_FRAME, - PROP_FONT_DESC, PROP_LAST }; @@ -154,17 +190,12 @@ static void gst_play_sink_dispose (GObject * object); static void gst_play_sink_finalize (GObject * object); -static void gst_play_sink_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * spec); -static void gst_play_sink_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * spec); - static gboolean gst_play_sink_send_event (GstElement * element, GstEvent * event); static GstStateChangeReturn gst_play_sink_change_state (GstElement * element, GstStateChange transition); -static GstElementClass *parent_class; +static void gst_play_sink_handle_message (GstBin * bin, GstMessage * message); /* static guint gst_play_sink_signals[LAST_SIGNAL] = { 0 }; */ @@ -173,36 +204,8 @@ "Generic/Bin/Player", "Autoplug and play media from an uri", "Wim Taymans "); -#ifdef __SYMBIAN32__ -EXPORT_C -#endif - -GType -gst_play_sink_get_type (void) -{ - static GType gst_play_sink_type = 0; - - if (!gst_play_sink_type) { - static const GTypeInfo gst_play_sink_info = { - sizeof (GstPlaySinkClass), - NULL, - NULL, - (GClassInitFunc) gst_play_sink_class_init, - NULL, - NULL, - sizeof (GstPlaySink), - 0, - (GInstanceInitFunc) gst_play_sink_init, - NULL - }; - - gst_play_sink_type = g_type_register_static (GST_TYPE_BIN, - "GstPlaySink", &gst_play_sink_info, 0); - } - - return gst_play_sink_type; -} +G_DEFINE_TYPE (GstPlaySink, gst_play_sink, GST_TYPE_BIN); static void gst_play_sink_class_init (GstPlaySinkClass * klass) @@ -215,45 +218,18 @@ gstelement_klass = (GstElementClass *) klass; gstbin_klass = (GstBinClass *) klass; - parent_class = g_type_class_peek_parent (klass); - - gobject_klass->set_property = gst_play_sink_set_property; - gobject_klass->get_property = gst_play_sink_get_property; - gobject_klass->dispose = GST_DEBUG_FUNCPTR (gst_play_sink_dispose); gobject_klass->finalize = GST_DEBUG_FUNCPTR (gst_play_sink_finalize); - g_object_class_install_property (gobject_klass, PROP_VIDEO_SINK, - g_param_spec_object ("video-sink", "Video Sink", - "the video output element to use (NULL = default sink)", - GST_TYPE_ELEMENT, G_PARAM_READWRITE)); - g_object_class_install_property (gobject_klass, PROP_AUDIO_SINK, - g_param_spec_object ("audio-sink", "Audio Sink", - "the audio output element to use (NULL = default sink)", - GST_TYPE_ELEMENT, G_PARAM_READWRITE)); - g_object_class_install_property (gobject_klass, PROP_VIS_PLUGIN, - g_param_spec_object ("vis-plugin", "Vis plugin", - "the visualization element to use (NULL = none)", - GST_TYPE_ELEMENT, G_PARAM_READWRITE)); - g_object_class_install_property (gobject_klass, PROP_VOLUME, - g_param_spec_double ("volume", "volume", "volume", - 0.0, VOLUME_MAX_DOUBLE, 1.0, G_PARAM_READWRITE)); - g_object_class_install_property (gobject_klass, PROP_FRAME, - gst_param_spec_mini_object ("frame", "Frame", - "The last video frame (NULL = no video available)", - GST_TYPE_BUFFER, G_PARAM_READABLE)); - g_object_class_install_property (gobject_klass, PROP_FONT_DESC, - g_param_spec_string ("subtitle-font-desc", - "Subtitle font description", - "Pango font description of font " - "to be used for subtitle rendering", NULL, G_PARAM_WRITABLE)); - gst_element_class_set_details (gstelement_klass, &gst_play_sink_details); gstelement_klass->change_state = GST_DEBUG_FUNCPTR (gst_play_sink_change_state); gstelement_klass->send_event = GST_DEBUG_FUNCPTR (gst_play_sink_send_event); + gstbin_klass->handle_message = + GST_DEBUG_FUNCPTR (gst_play_sink_handle_message); + GST_DEBUG_CATEGORY_INIT (gst_play_sink_debug, "playsink", 0, "play bin"); } @@ -264,12 +240,24 @@ playsink->video_sink = NULL; playsink->audio_sink = NULL; playsink->visualisation = NULL; - playsink->textoverlay_element = NULL; + playsink->text_sink = NULL; playsink->volume = 1.0; playsink->font_desc = NULL; playsink->flags = GST_PLAY_FLAG_SOFT_VOLUME; playsink->lock = g_mutex_new (); + playsink->need_async_start = TRUE; + GST_OBJECT_FLAG_SET (playsink, GST_ELEMENT_IS_SINK); +} + +static void +free_chain (GstPlayChain * chain) +{ + if (chain) { + if (chain->bin) + gst_object_unref (chain->bin); + g_free (chain); + } } static void @@ -294,14 +282,44 @@ gst_object_unref (playsink->visualisation); playsink->visualisation = NULL; } - if (playsink->textoverlay_element != NULL) { - gst_object_unref (playsink->textoverlay_element); - playsink->textoverlay_element = NULL; + if (playsink->text_sink != NULL) { + gst_element_set_state (playsink->text_sink, GST_STATE_NULL); + gst_object_unref (playsink->text_sink); + playsink->text_sink = NULL; } + + free_chain ((GstPlayChain *) playsink->videochain); + playsink->videochain = NULL; + free_chain ((GstPlayChain *) playsink->audiochain); + playsink->audiochain = NULL; + free_chain ((GstPlayChain *) playsink->vischain); + playsink->vischain = NULL; + free_chain ((GstPlayChain *) playsink->textchain); + playsink->textchain = NULL; + + if (playsink->audio_tee_sink) { + gst_object_unref (playsink->audio_tee_sink); + playsink->audio_tee_sink = NULL; + } + + if (playsink->audio_tee_vissrc) { + gst_element_release_request_pad (playsink->audio_tee, + playsink->audio_tee_vissrc); + gst_object_unref (playsink->audio_tee_vissrc); + playsink->audio_tee_vissrc = NULL; + } + + if (playsink->audio_tee_asrc) { + gst_element_release_request_pad (playsink->audio_tee, + playsink->audio_tee_asrc); + gst_object_unref (playsink->audio_tee_asrc); + playsink->audio_tee_asrc = NULL; + } + g_free (playsink->font_desc); playsink->font_desc = NULL; - G_OBJECT_CLASS (parent_class)->dispose (object); + G_OBJECT_CLASS (gst_play_sink_parent_class)->dispose (object); } static void @@ -313,45 +331,108 @@ g_mutex_free (playsink->lock); - G_OBJECT_CLASS (parent_class)->finalize (object); + G_OBJECT_CLASS (gst_play_sink_parent_class)->finalize (object); } #ifdef __SYMBIAN32__ EXPORT_C #endif - void -gst_play_sink_set_video_sink (GstPlaySink * playsink, GstElement * sink) +gst_play_sink_set_sink (GstPlaySink * playsink, GstPlaySinkType type, + GstElement * sink) { - GST_OBJECT_LOCK (playsink); - if (playsink->video_sink) - gst_object_unref (playsink->video_sink); + GstElement **elem = NULL, *old = NULL; + + GST_LOG ("Setting sink %" GST_PTR_FORMAT " as sink type %d", sink, type); - if (sink) { - gst_object_ref (sink); - gst_object_sink (sink); + GST_PLAY_SINK_LOCK (playsink); + switch (type) { + case GST_PLAY_SINK_TYPE_AUDIO: + case GST_PLAY_SINK_TYPE_AUDIO_RAW: + elem = &playsink->audio_sink; + break; + case GST_PLAY_SINK_TYPE_VIDEO: + case GST_PLAY_SINK_TYPE_VIDEO_RAW: + elem = &playsink->video_sink; + break; + case GST_PLAY_SINK_TYPE_TEXT: + elem = &playsink->text_sink; + break; + case GST_PLAY_SINK_TYPE_SUBPIC: + elem = &playsink->subp_sink; + break; + default: + break; } - playsink->video_sink = sink; - GST_OBJECT_UNLOCK (playsink); + if (elem) { + old = *elem; + if (sink) + gst_object_ref (sink); + *elem = sink; + } + GST_PLAY_SINK_UNLOCK (playsink); + + if (old) + gst_object_unref (old); } + #ifdef __SYMBIAN32__ EXPORT_C #endif - -void -gst_play_sink_set_audio_sink (GstPlaySink * playsink, GstElement * sink) +GstElement * +gst_play_sink_get_sink (GstPlaySink * playsink, GstPlaySinkType type) { - GST_OBJECT_LOCK (playsink); - if (playsink->audio_sink) - gst_object_unref (playsink->audio_sink); + GstElement *result = NULL; + GstElement *elem = NULL, *chainp = NULL; - if (sink) { - gst_object_ref (sink); - gst_object_sink (sink); + GST_PLAY_SINK_LOCK (playsink); + switch (type) { + case GST_PLAY_SINK_TYPE_AUDIO: + { + GstPlayAudioChain *chain; + if ((chain = (GstPlayAudioChain *) playsink->audiochain)) + chainp = chain->sink; + elem = playsink->audio_sink; + break; + } + case GST_PLAY_SINK_TYPE_VIDEO: + { + GstPlayVideoChain *chain; + if ((chain = (GstPlayVideoChain *) playsink->videochain)) + chainp = chain->sink; + elem = playsink->video_sink; + break; + } + case GST_PLAY_SINK_TYPE_TEXT: + { + GstPlayTextChain *chain; + if ((chain = (GstPlayTextChain *) playsink->textchain)) + chainp = chain->sink; + elem = playsink->text_sink; + break; + } + case GST_PLAY_SINK_TYPE_SUBPIC: + { + GstPlaySubpChain *chain; + if ((chain = (GstPlaySubpChain *) playsink->subpchain)) + chainp = chain->sink; + elem = playsink->subp_sink; + break; + } + default: + break; } - playsink->audio_sink = sink; - GST_OBJECT_UNLOCK (playsink); + if (chainp) { + /* we have an active chain with a sink, get the sink */ + result = gst_object_ref (chainp); + } + /* nothing found, return last configured sink */ + if (result == NULL && elem) + result = gst_object_ref (elem); + GST_PLAY_SINK_UNLOCK (playsink); + + return result; } static void @@ -390,13 +471,13 @@ gst_bin_remove (GST_BIN_CAST (chain->chain.bin), chain->vis); /* add new plugin and set state to playing */ - chain->vis = gst_object_ref (playsink->visualisation); + chain->vis = playsink->visualisation; gst_bin_add (GST_BIN_CAST (chain->chain.bin), chain->vis); gst_element_set_state (chain->vis, GST_STATE_PLAYING); /* get pads */ - chain->vissinkpad = gst_element_get_pad (chain->vis, "sink"); - chain->vissrcpad = gst_element_get_pad (chain->vis, "src"); + chain->vissinkpad = gst_element_get_static_pad (chain->vis, "sink"); + chain->vissrcpad = gst_element_get_static_pad (chain->vis, "src"); /* link pads */ gst_pad_link (chain->blockpad, chain->vissinkpad); @@ -419,11 +500,22 @@ { GstPlayVisChain *chain; + /* setting NULL means creating the default vis plugin */ + if (vis == NULL) + vis = gst_element_factory_make ("goom", "vis"); + + /* simply return if we don't have a vis plugin here */ + if (vis == NULL) + return; + GST_PLAY_SINK_LOCK (playsink); /* first store the new vis */ if (playsink->visualisation) gst_object_unref (playsink->visualisation); - playsink->visualisation = gst_object_ref (vis); + /* take ownership */ + gst_object_ref (vis); + gst_object_sink (vis); + playsink->visualisation = vis; /* now try to change the plugin in the running vis chain, if we have no chain, * we don't bother, any future vis chain will be created with the new vis @@ -433,7 +525,10 @@ /* block the pad, the next time the callback is called we can change the * visualisation. It's possible that this never happens or that the pad was - * already blocked. */ + * already blocked. If the callback never happens, we don't have new data so + * we don't need the new vis plugin. If the pad was already blocked, the + * function returns FALSE but the previous pad block will do the right thing + * anyway. */ GST_DEBUG_OBJECT (playsink, "blocking vis pad"); gst_pad_set_blocked_async (chain->blockpad, TRUE, gst_play_sink_vis_blocked, playsink); @@ -447,6 +542,30 @@ #endif +GstElement * +gst_play_sink_get_vis_plugin (GstPlaySink * playsink) +{ + GstElement *result = NULL; + GstPlayVisChain *chain; + + GST_PLAY_SINK_LOCK (playsink); + if ((chain = (GstPlayVisChain *) playsink->vischain)) { + /* we have an active chain, get the sink */ + if (chain->vis) + result = gst_object_ref (chain->vis); + } + /* nothing found, return last configured sink */ + if (result == NULL && playsink->visualisation) + result = gst_object_ref (playsink->visualisation); + GST_PLAY_SINK_UNLOCK (playsink); + + return result; +} +#ifdef __SYMBIAN32__ +EXPORT_C +#endif + + void gst_play_sink_set_volume (GstPlaySink * playsink, gdouble volume) { @@ -456,7 +575,15 @@ playsink->volume = volume; chain = (GstPlayAudioChain *) playsink->audiochain; if (chain && chain->volume) { - g_object_set (chain->volume, "volume", volume, NULL); + GST_LOG_OBJECT (playsink, "elements: volume=%" GST_PTR_FORMAT ", mute=%" + GST_PTR_FORMAT "; new volume=%.03f, mute=%d", chain->volume, + chain->mute, volume, playsink->mute); + /* if there is a mute element or we are not muted, set the volume */ + if (chain->mute || !playsink->mute) + g_object_set (chain->volume, "volume", volume, NULL); + } else { + GST_LOG_OBJECT (playsink, "no volume element"); + playsink->volume_changed = TRUE; } GST_PLAY_SINK_UNLOCK (playsink); } @@ -473,11 +600,12 @@ GST_PLAY_SINK_LOCK (playsink); chain = (GstPlayAudioChain *) playsink->audiochain; + result = playsink->volume; if (chain && chain->volume) { - g_object_get (chain->volume, "volume", &result, NULL); - playsink->volume = result; - } else { - result = playsink->volume; + if (chain->mute || !playsink->mute) { + g_object_get (chain->volume, "volume", &result, NULL); + playsink->volume = result; + } } GST_PLAY_SINK_UNLOCK (playsink); @@ -496,8 +624,18 @@ GST_PLAY_SINK_LOCK (playsink); playsink->mute = mute; chain = (GstPlayAudioChain *) playsink->audiochain; - if (chain && chain->mute) { - g_object_set (chain->mute, "mute", mute, NULL); + if (chain) { + if (chain->mute) { + g_object_set (chain->mute, "mute", mute, NULL); + } else if (chain->volume) { + if (mute) + g_object_set (chain->volume, "volume", (gdouble) 0.0, NULL); + else + g_object_set (chain->volume, "volume", (gdouble) playsink->volume, + NULL); + } + } else { + playsink->mute_changed = TRUE; } GST_PLAY_SINK_UNLOCK (playsink); } @@ -526,80 +664,6 @@ } static void -gst_play_sink_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec) -{ - GstPlaySink *playsink; - - playsink = GST_PLAY_SINK (object); - - switch (prop_id) { - case PROP_VIDEO_SINK: - gst_play_sink_set_video_sink (playsink, g_value_get_object (value)); - break; - case PROP_AUDIO_SINK: - gst_play_sink_set_audio_sink (playsink, g_value_get_object (value)); - break; - case PROP_VIS_PLUGIN: - gst_play_sink_set_vis_plugin (playsink, g_value_get_object (value)); - break; - case PROP_VOLUME: - gst_play_sink_set_volume (playsink, g_value_get_double (value)); - break; - case PROP_FONT_DESC: - GST_OBJECT_LOCK (playsink); - g_free (playsink->font_desc); - playsink->font_desc = g_strdup (g_value_get_string (value)); - if (playsink->textoverlay_element) { - g_object_set (G_OBJECT (playsink->textoverlay_element), - "font-desc", g_value_get_string (value), NULL); - } - GST_OBJECT_UNLOCK (playsink); - break; - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - -static void -gst_play_sink_get_property (GObject * object, guint prop_id, GValue * value, - GParamSpec * pspec) -{ - GstPlaySink *playsink; - - playsink = GST_PLAY_SINK (object); - - switch (prop_id) { - case PROP_VIDEO_SINK: - GST_OBJECT_LOCK (playsink); - g_value_set_object (value, playsink->video_sink); - GST_OBJECT_UNLOCK (playsink); - break; - case PROP_AUDIO_SINK: - GST_OBJECT_LOCK (playsink); - g_value_set_object (value, playsink->audio_sink); - GST_OBJECT_UNLOCK (playsink); - break; - case PROP_VIS_PLUGIN: - GST_OBJECT_LOCK (playsink); - g_value_set_object (value, playsink->visualisation); - GST_OBJECT_UNLOCK (playsink); - break; - case PROP_VOLUME: - g_value_set_double (value, gst_play_sink_get_volume (playsink)); - break; - case PROP_FRAME: - { - break; - } - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - -static void post_missing_element_message (GstPlaySink * playsink, const gchar * name) { GstMessage *msg; @@ -608,15 +672,6 @@ gst_element_post_message (GST_ELEMENT_CAST (playsink), msg); } -static void -free_chain (GstPlayChain * chain) -{ - if (chain->bin) - gst_object_unref (chain->bin); - gst_object_unref (chain->playsink); - g_free (chain); -} - static gboolean add_chain (GstPlayChain * chain, gboolean add) { @@ -636,11 +691,17 @@ static gboolean activate_chain (GstPlayChain * chain, gboolean activate) { + GstState state; + if (chain->activated == activate) return TRUE; + GST_OBJECT_LOCK (chain->playsink); + state = GST_STATE_TARGET (chain->playsink); + GST_OBJECT_UNLOCK (chain->playsink); + if (activate) - gst_element_set_state (chain->bin, GST_STATE_PAUSED); + gst_element_set_state (chain->bin, state); else gst_element_set_state (chain->bin, GST_STATE_NULL); @@ -687,6 +748,106 @@ return result; } +static gint +find_property_sink (GstElement * element, const gchar * name) +{ + gint res; + gboolean is_sink; + + GST_OBJECT_LOCK (element); + is_sink = GST_OBJECT_FLAG_IS_SET (element, GST_ELEMENT_IS_SINK); + GST_OBJECT_UNLOCK (element); + + if (is_sink && + g_object_class_find_property (G_OBJECT_GET_CLASS (element), name)) { + res = 0; + GST_DEBUG_OBJECT (element, "found %s property on sink", name); + } else { + GST_DEBUG_OBJECT (element, "did not find %s property", name); + res = 1; + gst_object_unref (element); + } + return res; +} + +/* find a sink in the hierarchy with a property named @name. This function does + * not increase the refcount of the returned object and thus remains valid as + * long as the bin is valid. */ +static GstElement * +gst_play_sink_find_property_sinks (GstPlaySink * playsink, GstElement * obj, + const gchar * name) +{ + GstElement *result = NULL; + GstIterator *it; + + if (g_object_class_find_property (G_OBJECT_GET_CLASS (obj), name)) { + result = obj; + } else if (GST_IS_BIN (obj)) { + it = gst_bin_iterate_recurse (GST_BIN_CAST (obj)); + result = gst_iterator_find_custom (it, + (GCompareFunc) find_property_sink, (gpointer) name); + gst_iterator_free (it); + /* we don't need the extra ref */ + if (result) + gst_object_unref (result); + } + return result; +} + +static void +do_async_start (GstPlaySink * playsink) +{ + GstMessage *message; + + if (!playsink->need_async_start) + return; + + playsink->async_pending = TRUE; + + GST_INFO_OBJECT (playsink, "Sending async_start message"); + message = gst_message_new_async_start (GST_OBJECT_CAST (playsink), FALSE); + GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (GST_BIN_CAST + (playsink), message); +} + +static void +do_async_done (GstPlaySink * playsink) +{ + GstMessage *message; + + if (playsink->async_pending) { + GST_INFO_OBJECT (playsink, "Sending async_done message"); + message = gst_message_new_async_done (GST_OBJECT_CAST (playsink)); + GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (GST_BIN_CAST + (playsink), message); + + playsink->async_pending = FALSE; + } + + playsink->need_async_start = FALSE; +} + +/* try to change the state of an element. This function returns the element when + * the state change could be performed. When this function returns NULL an error + * occured and the element is unreffed if @unref is TRUE. */ +static GstElement * +try_element (GstPlaySink * playsink, GstElement * element, gboolean unref) +{ + GstStateChangeReturn ret; + + if (element) { + ret = gst_element_set_state (element, GST_STATE_READY); + if (ret == GST_STATE_CHANGE_FAILURE) { + GST_DEBUG_OBJECT (playsink, "failed state change.."); + gst_element_set_state (element, GST_STATE_NULL); + if (unref) + gst_object_unref (element); + element = NULL; + } + } + return element; +} + /* make the element (bin) that contains the elements needed to perform * video display. * @@ -700,37 +861,51 @@ * +------------------------------------------------------------+ * */ -static GstPlayChain * -gen_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async) +static GstPlayVideoChain * +gen_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async, + gboolean queue) { GstPlayVideoChain *chain; GstBin *bin; GstPad *pad; + GstElement *head, *prev, *elem; chain = g_new0 (GstPlayVideoChain, 1); - chain->chain.playsink = gst_object_ref (playsink); + chain->chain.playsink = playsink; + chain->chain.raw = raw; + + GST_DEBUG_OBJECT (playsink, "making video chain %p", chain); if (playsink->video_sink) { - chain->sink = playsink->video_sink; - } else { - chain->sink = gst_element_factory_make ("autovideosink", "videosink"); - if (chain->sink == NULL) { - chain->sink = gst_element_factory_make ("xvimagesink", "videosink"); - } - if (chain->sink == NULL) - goto no_sinks; + GST_DEBUG_OBJECT (playsink, "trying configured videosink"); + chain->sink = try_element (playsink, playsink->video_sink, FALSE); + } + if (chain->sink == NULL) { + GST_DEBUG_OBJECT (playsink, "trying autovideosink"); + elem = gst_element_factory_make ("autovideosink", "videosink"); + chain->sink = try_element (playsink, elem, TRUE); } + /* FIXME: if DEFAULT_VIDEOSINK != "autovideosink" try this now */ + if (chain->sink == NULL) { + GST_DEBUG_OBJECT (playsink, "trying xvimagesink"); + elem = gst_element_factory_make ("xvimagesink", "videosink"); + chain->sink = try_element (playsink, elem, TRUE); + } + if (chain->sink == NULL) + goto no_sinks; /* if we can disable async behaviour of the sink, we can avoid adding a - * queue for the audio chain. We can't use the deep property here because the - * sink might change it's internal sink element later. */ - if (g_object_class_find_property (G_OBJECT_GET_CLASS (chain->sink), "async")) { - GST_DEBUG_OBJECT (playsink, "setting async property to %d on video sink", - async); - g_object_set (chain->sink, "async", async, NULL); + * queue for the audio chain. */ + elem = gst_play_sink_find_property_sinks (playsink, chain->sink, "async"); + if (elem) { + GST_DEBUG_OBJECT (playsink, "setting async property to %d on element %s", + async, GST_ELEMENT_NAME (elem)); + g_object_set (elem, "async", async, NULL); chain->async = async; - } else + } else { + GST_DEBUG_OBJECT (playsink, "no async property on the sink"); chain->async = TRUE; + } /* create a bin to hold objects, as we create them we add them to this bin so * that when something goes wrong we only need to unref the bin */ @@ -740,74 +915,86 @@ gst_object_sink (bin); gst_bin_add (bin, chain->sink); - if (raw) { - chain->conv = gst_element_factory_make ("ffmpegcolorspace", "vconv"); - if (chain->conv == NULL) - goto no_colorspace; - gst_bin_add (bin, chain->conv); - - chain->scale = gst_element_factory_make ("videoscale", "vscale"); - if (chain->scale == NULL) - goto no_videoscale; - gst_bin_add (bin, chain->scale); + if (queue) { + /* decouple decoder from sink, this improves playback quite a lot since the + * decoder can continue while the sink blocks for synchronisation. We don't + * need a lot of buffers as this consumes a lot of memory and we don't want + * too little because else we would be context switching too quickly. */ + chain->queue = gst_element_factory_make ("queue", "vqueue"); + g_object_set (G_OBJECT (chain->queue), "max-size-buffers", 3, + "max-size-bytes", 0, "max-size-time", (gint64) 0, NULL); + gst_bin_add (bin, chain->queue); + head = prev = chain->queue; + } else { + head = chain->sink; + prev = NULL; } - /* decouple decoder from sink, this improves playback quite a lot since the - * decoder can continue while the sink blocks for synchronisation. We don't - * need a lot of buffers as this consumes a lot of memory and we don't want - * too little because else we would be context switching too quickly. */ - chain->queue = gst_element_factory_make ("queue", "vqueue"); - g_object_set (G_OBJECT (chain->queue), "max-size-buffers", 3, - "max-size-bytes", 0, "max-size-time", (gint64) 0, NULL); - gst_bin_add (bin, chain->queue); + if (raw && !(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO)) { + GST_DEBUG_OBJECT (playsink, "creating ffmpegcolorspace"); + chain->conv = gst_element_factory_make ("ffmpegcolorspace", "vconv"); + if (chain->conv == NULL) { + post_missing_element_message (playsink, "ffmpegcolorspace"); + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Missing element '%s' - check your GStreamer installation."), + "ffmpegcolorspace"), ("video rendering might fail")); + } else { + gst_bin_add (bin, chain->conv); + if (prev) { + if (!gst_element_link_pads (prev, "src", chain->conv, "sink")) + goto link_failed; + } else { + head = chain->conv; + } + prev = chain->conv; + } - if (raw) { - gst_element_link_pads (chain->queue, "src", chain->conv, "sink"); - gst_element_link_pads (chain->conv, "src", chain->scale, "sink"); - /* be more careful with the pad from the custom sink element, it might not - * be named 'sink' */ - if (!gst_element_link_pads (chain->scale, "src", chain->sink, NULL)) - goto link_failed; - - pad = gst_element_get_pad (chain->queue, "sink"); - } else { - if (!gst_element_link_pads (chain->queue, "src", chain->sink, NULL)) - goto link_failed; - pad = gst_element_get_pad (chain->queue, "sink"); + GST_DEBUG_OBJECT (playsink, "creating videoscale"); + chain->scale = gst_element_factory_make ("videoscale", "vscale"); + if (chain->scale == NULL) { + post_missing_element_message (playsink, "videoscale"); + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Missing element '%s' - check your GStreamer installation."), + "videoscale"), ("possibly a liboil version mismatch?")); + } else { + gst_bin_add (bin, chain->scale); + if (prev) { + if (!gst_element_link_pads (prev, "src", chain->scale, "sink")) + goto link_failed; + } else { + head = chain->scale; + } + prev = chain->scale; + } } - chain->chain.sinkpad = gst_ghost_pad_new ("sink", pad); + if (prev) { + GST_DEBUG_OBJECT (playsink, "linking to sink"); + if (!gst_element_link_pads (prev, "src", chain->sink, NULL)) + goto link_failed; + } + + pad = gst_element_get_static_pad (head, "sink"); + chain->sinkpad = gst_ghost_pad_new ("sink", pad); gst_object_unref (pad); - gst_element_add_pad (chain->chain.bin, chain->chain.sinkpad); - return (GstPlayChain *) chain; + gst_element_add_pad (chain->chain.bin, chain->sinkpad); + + return chain; /* ERRORS */ no_sinks: { - post_missing_element_message (playsink, "autovideosink"); - GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, - (_("Both autovideosink and xvimagesink elements are missing.")), - (NULL)); - free_chain ((GstPlayChain *) chain); - return NULL; - } -no_colorspace: - { - post_missing_element_message (playsink, "ffmpegcolorspace"); - GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, - (_("Missing element '%s' - check your GStreamer installation."), - "ffmpegcolorspace"), (NULL)); - free_chain ((GstPlayChain *) chain); - return NULL; - } - -no_videoscale: - { - post_missing_element_message (playsink, "videoscale"); - GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, - (_("Missing element '%s' - check your GStreamer installation."), - "videoscale"), ("possibly a liboil version mismatch?")); + if (!elem) { + post_missing_element_message (playsink, "autovideosink"); + GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, + (_("Both autovideosink and xvimagesink elements are missing.")), + (NULL)); + } else { + GST_ELEMENT_ERROR (playsink, CORE, STATE_CHANGE, + (_("Both autovideosink and xvimagesink elements are not working.")), + (NULL)); + } free_chain ((GstPlayChain *) chain); return NULL; } @@ -820,87 +1007,330 @@ } } -#if 0 +static gboolean +setup_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async, + gboolean queue) +{ + GstElement *elem; + GstPlayVideoChain *chain; + GstStateChangeReturn ret; + + chain = playsink->videochain; + + /* if the chain was active we don't do anything */ + if (GST_PLAY_CHAIN (chain)->activated == TRUE) + return TRUE; + + if (chain->chain.raw != raw) + return FALSE; + + /* try to set the sink element to READY again */ + ret = gst_element_set_state (chain->sink, GST_STATE_READY); + if (ret == GST_STATE_CHANGE_FAILURE) + return FALSE; + + /* if we can disable async behaviour of the sink, we can avoid adding a + * queue for the audio chain. */ + elem = gst_play_sink_find_property_sinks (playsink, chain->sink, "async"); + if (elem) { + GST_DEBUG_OBJECT (playsink, "setting async property to %d on element %s", + async, GST_ELEMENT_NAME (elem)); + g_object_set (elem, "async", async, NULL); + chain->async = async; + } else { + GST_DEBUG_OBJECT (playsink, "no async property on the sink"); + chain->async = TRUE; + } + return TRUE; +} + /* make an element for playback of video with subtitles embedded. * - * +--------------------------------------------------+ - * | tbin +-------------+ | - * | +-----+ | textoverlay | +------+ | - * | | csp | +--video_sink | | vbin | | - * video_sink-sink src+ +-text_sink src-sink | | - * | +-----+ | +-------------+ +------+ | - * text_sink-------------+ | - * +--------------------------------------------------+ - * - * If there is no subtitle renderer this function will simply return the - * videosink without the text_sink pad. + * +----------------------------------------------+ + * | tbin +-------------+ | + * | +-----+ | textoverlay | | + * | | csp | +--video_sink | | + * sink-------sink src+ +-text_sink src--+ | + * | +-----+ | +-------------+ +-- src + * text_sink-------------+ | + * +----------------------------------------------+ */ -static GstElement * -gen_text_element (GstPlaySink * playsink) +static GstPlayTextChain * +gen_text_chain (GstPlaySink * playsink) { - GstElement *element, *csp, *overlay, *vbin; - GstPad *pad; + GstPlayTextChain *chain; + GstBin *bin; + GstElement *elem; + GstPad *videosinkpad, *textsinkpad, *srcpad; + + chain = g_new0 (GstPlayTextChain, 1); + chain->chain.playsink = playsink; + + GST_DEBUG_OBJECT (playsink, "making text chain %p", chain); + + chain->chain.bin = gst_bin_new ("tbin"); + bin = GST_BIN_CAST (chain->chain.bin); + gst_object_ref (bin); + gst_object_sink (bin); + + videosinkpad = textsinkpad = srcpad = NULL; + + /* first try to hook the text pad to the custom sink */ + if (playsink->text_sink) { + GST_DEBUG_OBJECT (playsink, "trying configured textsink"); + chain->sink = try_element (playsink, playsink->text_sink, FALSE); + if (chain->sink) { + elem = gst_play_sink_find_property_sinks (playsink, chain->sink, "async"); + if (elem) { + /* make sure the sparse subtitles don't participate in the preroll */ + g_object_set (elem, "async", FALSE, NULL); + /* we have a custom sink, this will be our textsinkpad */ + textsinkpad = gst_element_get_static_pad (chain->sink, "sink"); + if (textsinkpad) { + /* we're all fine now and we can add the sink to the chain */ + GST_DEBUG_OBJECT (playsink, "adding custom text sink"); + gst_bin_add (bin, chain->sink); + } else { + GST_WARNING_OBJECT (playsink, + "can't find a sink pad on custom text sink"); + gst_object_unref (chain->sink); + chain->sink = NULL; + } + /* try to set sync to true but it's no biggie when we can't */ + if ((elem = + gst_play_sink_find_property_sinks (playsink, chain->sink, + "sync"))) + g_object_set (elem, "sync", TRUE, NULL); + } else { + GST_WARNING_OBJECT (playsink, + "can't find async property in custom text sink"); + } + } + if (textsinkpad == NULL) { + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Custom text sink element is not usable.")), + ("fallback to default textoverlay")); + } + } - /* Create the video rendering bin, error is posted when this fails. */ - vbin = gen_video_element (playsink); - if (!vbin) - return NULL; + if (textsinkpad == NULL) { + if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO)) { + /* no custom sink, try to setup the colorspace and textoverlay elements */ + chain->conv = gst_element_factory_make ("ffmpegcolorspace", "tconv"); + if (chain->conv == NULL) { + /* not really needed, it might work without colorspace */ + post_missing_element_message (playsink, "ffmpegcolorspace"); + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Missing element '%s' - check your GStreamer installation."), + "ffmpegcolorspace"), ("subtitle rendering might fail")); + } else { + gst_bin_add (bin, chain->conv); + videosinkpad = gst_element_get_static_pad (chain->conv, "sink"); + } + } - /* Text overlay */ - overlay = gst_element_factory_make ("textoverlay", "overlay"); + chain->overlay = gst_element_factory_make ("textoverlay", "overlay"); + if (chain->overlay == NULL) { + post_missing_element_message (playsink, "textoverlay"); + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Missing element '%s' - check your GStreamer installation."), + "textoverlay"), ("subtitle rendering disabled")); + } else { + gst_bin_add (bin, chain->overlay); + + /* Set some parameters */ + g_object_set (G_OBJECT (chain->overlay), + "halign", "center", "valign", "bottom", NULL); + if (playsink->font_desc) { + g_object_set (G_OBJECT (chain->overlay), "font-desc", + playsink->font_desc, NULL); + } + g_object_set (G_OBJECT (chain->overlay), "wait-text", FALSE, NULL); - /* If no overlay return the video bin without subtitle support. */ - if (!overlay) - goto no_overlay; + textsinkpad = gst_element_get_static_pad (chain->overlay, "text_sink"); + + srcpad = gst_element_get_static_pad (chain->overlay, "src"); + + if (videosinkpad) { + /* if we had a videosinkpad, we had a converter and we can link it, we + * know that this will work */ + gst_element_link_pads (chain->conv, "src", chain->overlay, + "video_sink"); + } else { + /* no videopad, expose our own video pad then */ + videosinkpad = + gst_element_get_static_pad (chain->overlay, "video_sink"); + } + } + } - /* Create our bin */ - element = gst_bin_new ("textbin"); - - /* Set some parameters */ - g_object_set (G_OBJECT (overlay), - "halign", "center", "valign", "bottom", NULL); - if (playsink->font_desc) { - g_object_set (G_OBJECT (overlay), "font-desc", playsink->font_desc, NULL); + if (videosinkpad == NULL) { + /* if we still don't have a videosink, we don't have a converter nor an + * overlay. the only thing we can do is insert an identity and ghost the src + * and sink pads. */ + chain->conv = gst_element_factory_make ("identity", "tidentity"); + g_object_set (chain->conv, "signal-handoffs", FALSE, NULL); + g_object_set (chain->conv, "silent", TRUE, NULL); + gst_bin_add (bin, chain->conv); + srcpad = gst_element_get_static_pad (chain->conv, "src"); + videosinkpad = gst_element_get_static_pad (chain->conv, "sink"); + } else { + /* we have a videosink but maybe not a srcpad because there was no + * overlay */ + if (srcpad == NULL) { + /* ghost the source pad of the converter then */ + srcpad = gst_element_get_static_pad (chain->conv, "src"); + } } - /* Take a ref */ - playsink->textoverlay_element = GST_ELEMENT_CAST (gst_object_ref (overlay)); + /* expose the ghostpads */ + if (videosinkpad) { + chain->videosinkpad = gst_ghost_pad_new ("sink", videosinkpad); + gst_object_unref (videosinkpad); + gst_element_add_pad (chain->chain.bin, chain->videosinkpad); + } + if (textsinkpad) { + chain->textsinkpad = gst_ghost_pad_new ("text_sink", textsinkpad); + gst_object_unref (textsinkpad); + gst_element_add_pad (chain->chain.bin, chain->textsinkpad); + } + if (srcpad) { + chain->srcpad = gst_ghost_pad_new ("src", srcpad); + gst_object_unref (srcpad); + gst_element_add_pad (chain->chain.bin, chain->srcpad); + } - /* we know this will succeed, as the video bin already created one before */ - csp = gst_element_factory_make ("ffmpegcolorspace", "subtitlecsp"); - - /* Add our elements */ - gst_bin_add_many (GST_BIN_CAST (element), csp, overlay, vbin, NULL); + return chain; +} - /* Link */ - gst_element_link_pads (csp, "src", overlay, "video_sink"); - gst_element_link_pads (overlay, "src", vbin, "sink"); +/* make an element for playback of video with subpictures embedded. + * + * +--------------------------------------------------------+ + * | pbin +-------------+ | + * | +-------+ +-----+ | dvdspu | | + * | | queue | | csp | +---video | | + * sink----sink src--sink src+ +-subpicture src--+ | + * | +-------+ +-----+ | +-------------+ +-- src + * subpicture----------------------+ | + * +--------------------------------------------------------+ + */ +static GstPlaySubpChain * +gen_subp_chain (GstPlaySink * playsink) +{ + GstPlaySubpChain *chain; + GstBin *bin; + GstElement *elem, *head; + GstPad *videosinkpad, *subpsinkpad, *srcpad; - /* Add ghost pads on the subtitle bin */ - pad = gst_element_get_pad (overlay, "text_sink"); - gst_element_add_pad (element, gst_ghost_pad_new ("text_sink", pad)); - gst_object_unref (pad); + chain = g_new0 (GstPlaySubpChain, 1); + chain->chain.playsink = playsink; + + GST_DEBUG_OBJECT (playsink, "making subpicture chain %p", chain); + + chain->chain.bin = gst_bin_new ("pbin"); + bin = GST_BIN_CAST (chain->chain.bin); + gst_object_ref (bin); + gst_object_sink (bin); + + videosinkpad = subpsinkpad = srcpad = NULL; - pad = gst_element_get_pad (csp, "sink"); - gst_element_add_pad (element, gst_ghost_pad_new ("sink", pad)); - gst_object_unref (pad); + /* first try to hook the text pad to the custom sink */ + if (playsink->subp_sink) { + GST_DEBUG_OBJECT (playsink, "trying configured subpsink"); + chain->sink = try_element (playsink, playsink->text_sink, FALSE); + if (chain->sink) { + elem = gst_play_sink_find_property_sinks (playsink, chain->sink, "async"); + if (elem) { + /* make sure the sparse subtitles don't participate in the preroll */ + g_object_set (elem, "async", FALSE, NULL); + /* we have a custom sink, this will be our subpsinkpad */ + subpsinkpad = gst_element_get_static_pad (chain->sink, "sink"); + if (subpsinkpad) { + /* we're all fine now and we can add the sink to the chain */ + GST_DEBUG_OBJECT (playsink, "adding custom text sink"); + gst_bin_add (bin, chain->sink); + } else { + GST_WARNING_OBJECT (playsink, + "can't find a sink pad on custom text sink"); + gst_object_unref (chain->sink); + chain->sink = NULL; + } + /* try to set sync to true but it's no biggie when we can't */ + if ((elem = + gst_play_sink_find_property_sinks (playsink, chain->sink, + "sync"))) + g_object_set (elem, "sync", TRUE, NULL); + } else { + GST_WARNING_OBJECT (playsink, + "can't find async property in custom text sink"); + } + } + if (subpsinkpad == NULL) { + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Custom text sink element is not usable.")), + ("fallback to default dvdspu overlay")); + } + } - /* Set state to READY */ - gst_element_set_state (element, GST_STATE_READY); + /* make a little queue */ + chain->queue = gst_element_factory_make ("queue", "vqueue"); + g_object_set (G_OBJECT (chain->queue), "max-size-buffers", 3, + "max-size-bytes", 0, "max-size-time", (gint64) 0, NULL); + gst_bin_add (bin, chain->queue); + head = chain->queue; - return element; + /* video goes into the queue */ + videosinkpad = gst_element_get_static_pad (chain->queue, "sink"); - /* ERRORS */ -no_overlay: - { - post_missing_element_message (playsink, "textoverlay"); - GST_WARNING_OBJECT (playsink, - "No overlay (pango) element, subtitles disabled"); - return vbin; + if (subpsinkpad == NULL) { + if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO)) { + /* no custom sink, try to setup the colorspace and textoverlay elements */ + chain->conv = gst_element_factory_make ("ffmpegcolorspace", "tconv"); + if (chain->conv == NULL) { + /* not really needed, it might work without colorspace */ + post_missing_element_message (playsink, "ffmpegcolorspace"); + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Missing element '%s' - check your GStreamer installation."), + "ffmpegcolorspace"), ("subpicture rendering might fail")); + } else { + gst_bin_add (bin, chain->conv); + gst_element_link_pads (head, "src", chain->conv, "sink"); + head = chain->conv; + } + } + + chain->overlay = gst_element_factory_make ("dvdspu", "spuoverlay"); + if (chain->overlay == NULL) { + post_missing_element_message (playsink, "dvdspu"); + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Missing element '%s' - check your GStreamer installation."), + "dvdspu"), ("subpicture rendering disabled")); + } else { + gst_bin_add (bin, chain->overlay); + /* Set some parameters */ + subpsinkpad = gst_element_get_static_pad (chain->overlay, "subpicture"); + /* link to the next element */ + gst_element_link_pads (head, "src", chain->overlay, "video"); + head = chain->overlay; + } } + srcpad = gst_element_get_static_pad (head, "src"); + chain->srcpad = gst_ghost_pad_new ("src", srcpad); + gst_object_unref (srcpad); + gst_element_add_pad (chain->chain.bin, chain->srcpad); + + /* expose the ghostpads */ + chain->videosinkpad = gst_ghost_pad_new ("sink", videosinkpad); + gst_object_unref (videosinkpad); + gst_element_add_pad (chain->chain.bin, chain->videosinkpad); + + if (subpsinkpad) { + chain->subpsinkpad = gst_ghost_pad_new ("subpicture", subpsinkpad); + gst_object_unref (subpsinkpad); + gst_element_add_pad (chain->chain.bin, chain->subpsinkpad); + } + return chain; } -#endif - /* make the chain that contains the elements needed to perform * audio playback. @@ -917,27 +1347,40 @@ * sink-+ | * +-------------------------------------------------------------+ */ -static GstPlayChain * +static GstPlayAudioChain * gen_audio_chain (GstPlaySink * playsink, gboolean raw, gboolean queue) { GstPlayAudioChain *chain; GstBin *bin; - gboolean res; + gboolean have_volume; GstPad *pad; + GstElement *head, *prev, *elem; chain = g_new0 (GstPlayAudioChain, 1); - chain->chain.playsink = gst_object_ref (playsink); + chain->chain.playsink = playsink; + chain->chain.raw = raw; + + GST_DEBUG_OBJECT (playsink, "making audio chain %p", chain); if (playsink->audio_sink) { - chain->sink = playsink->audio_sink; - } else { - chain->sink = gst_element_factory_make ("autoaudiosink", "audiosink"); - if (chain->sink == NULL) { - chain->sink = gst_element_factory_make ("alsasink", "audiosink"); - } - if (chain->sink == NULL) - goto no_sinks; + GST_DEBUG_OBJECT (playsink, "trying configured audiosink %" GST_PTR_FORMAT, + playsink->audio_sink); + chain->sink = try_element (playsink, playsink->audio_sink, FALSE); + } + if (chain->sink == NULL) { + GST_DEBUG_OBJECT (playsink, "trying autoaudiosink"); + elem = gst_element_factory_make ("autoaudiosink", "audiosink"); + chain->sink = try_element (playsink, elem, TRUE); } + /* FIXME: if DEFAULT_AUDIOSINK != "autoaudiosink" try this now */ + if (chain->sink == NULL) { + GST_DEBUG_OBJECT (playsink, "trying alsasink"); + elem = gst_element_factory_make ("alsasink", "audiosink"); + chain->sink = try_element (playsink, elem, TRUE); + } + if (chain->sink == NULL) + goto no_sinks; + chain->chain.bin = gst_bin_new ("abin"); bin = GST_BIN_CAST (chain->chain.bin); gst_object_ref (bin); @@ -950,97 +1393,151 @@ GST_DEBUG_OBJECT (playsink, "adding audio queue"); chain->queue = gst_element_factory_make ("queue", "aqueue"); gst_bin_add (bin, chain->queue); + prev = head = chain->queue; + } else { + head = chain->sink; + prev = NULL; + } + + /* check if the sink, or something within the sink, has the volume property. + * If it does we don't need to add a volume element. */ + elem = gst_play_sink_find_property_sinks (playsink, chain->sink, "volume"); + if (elem) { + chain->volume = elem; + + GST_DEBUG_OBJECT (playsink, "the sink has a volume property"); + have_volume = TRUE; + chain->sink_volume = TRUE; + /* if the sink also has a mute property we can use this as well. We'll only + * use the mute property if there is a volume property. We can simulate the + * mute with the volume otherwise. */ + chain->mute = + gst_play_sink_find_property_sinks (playsink, chain->sink, "mute"); + if (chain->mute) { + GST_DEBUG_OBJECT (playsink, "the sink has a mute property"); + } + /* use the sink to control the volume and mute */ + if (playsink->volume_changed) { + g_object_set (G_OBJECT (chain->volume), "volume", playsink->volume, NULL); + } + if (playsink->mute_changed) { + if (chain->mute) { + g_object_set (chain->mute, "mute", playsink->mute, NULL); + } else { + if (playsink->mute) + g_object_set (chain->volume, "volume", (gdouble) 0.0, NULL); + } + } + } else { + /* no volume, we need to add a volume element when we can */ + GST_DEBUG_OBJECT (playsink, "the sink has no volume property"); + have_volume = FALSE; + chain->sink_volume = FALSE; } - if (raw) { + if (raw && !(playsink->flags & GST_PLAY_FLAG_NATIVE_AUDIO)) { + GST_DEBUG_OBJECT (playsink, "creating audioconvert"); chain->conv = gst_element_factory_make ("audioconvert", "aconv"); - if (chain->conv == NULL) - goto no_audioconvert; - gst_bin_add (bin, chain->conv); - - chain->resample = gst_element_factory_make ("audioresample", "aresample"); - if (chain->resample == NULL) - goto no_audioresample; - gst_bin_add (bin, chain->resample); - - res = gst_element_link_pads (chain->conv, "src", chain->resample, "sink"); - - /* FIXME check if the sink has the volume property */ - - if (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME) { - chain->volume = gst_element_factory_make ("volume", "volume"); - if (chain->volume == NULL) - goto no_volume; - - /* volume also has the mute property */ - chain->mute = gst_object_ref (chain->volume); + if (chain->conv == NULL) { + post_missing_element_message (playsink, "audioconvert"); + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Missing element '%s' - check your GStreamer installation."), + "audioconvert"), ("possibly a liboil version mismatch?")); + } else { + gst_bin_add (bin, chain->conv); + if (prev) { + if (!gst_element_link_pads (prev, "src", chain->conv, "sink")) + goto link_failed; + } else { + head = chain->conv; + } + prev = chain->conv; + } - /* configure with the latest volume */ - g_object_set (G_OBJECT (chain->volume), "volume", playsink->volume, NULL); - gst_bin_add (bin, chain->volume); - - res &= - gst_element_link_pads (chain->resample, "src", chain->volume, "sink"); - res &= gst_element_link_pads (chain->volume, "src", chain->sink, NULL); + GST_DEBUG_OBJECT (playsink, "creating audioresample"); + chain->resample = gst_element_factory_make ("audioresample", "aresample"); + if (chain->resample == NULL) { + post_missing_element_message (playsink, "audioresample"); + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Missing element '%s' - check your GStreamer installation."), + "audioresample"), ("possibly a liboil version mismatch?")); } else { - res &= gst_element_link_pads (chain->resample, "src", chain->sink, NULL); + gst_bin_add (bin, chain->resample); + if (prev) { + if (!gst_element_link_pads (prev, "src", chain->resample, "sink")) + goto link_failed; + } else { + head = chain->resample; + } + prev = chain->resample; } - if (!res) - goto link_failed; - if (queue) { - res = gst_element_link_pads (chain->queue, "src", chain->conv, "sink"); - pad = gst_element_get_pad (chain->queue, "sink"); - } else { - pad = gst_element_get_pad (chain->conv, "sink"); - } - } else { - if (queue) { - res = gst_element_link_pads (chain->queue, "src", chain->sink, "sink"); - pad = gst_element_get_pad (chain->queue, "sink"); - } else { - pad = gst_element_get_pad (chain->sink, "sink"); + if (!have_volume && playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME) { + GST_DEBUG_OBJECT (playsink, "creating volume"); + chain->volume = gst_element_factory_make ("volume", "volume"); + if (chain->volume == NULL) { + post_missing_element_message (playsink, "volume"); + GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, + (_("Missing element '%s' - check your GStreamer installation."), + "volume"), ("possibly a liboil version mismatch?")); + } else { + have_volume = TRUE; + + /* volume also has the mute property */ + chain->mute = chain->volume; + + /* configure with the latest volume and mute */ + g_object_set (G_OBJECT (chain->volume), "volume", playsink->volume, + NULL); + g_object_set (G_OBJECT (chain->mute), "mute", playsink->mute, NULL); + gst_bin_add (bin, chain->volume); + + if (prev) { + if (!gst_element_link_pads (prev, "src", chain->volume, "sink")) + goto link_failed; + } else { + head = chain->volume; + } + prev = chain->volume; + } } } - chain->chain.sinkpad = gst_ghost_pad_new ("sink", pad); + + if (prev) { + /* we only have to link to the previous element if we have something in + * front of the sink */ + GST_DEBUG_OBJECT (playsink, "linking to sink"); + if (!gst_element_link_pads (prev, "src", chain->sink, NULL)) + goto link_failed; + } + + /* post a warning if we have no way to configure the volume */ + if (!have_volume) { + GST_ELEMENT_WARNING (playsink, STREAM, NOT_IMPLEMENTED, + (_("No volume control found")), ("Volume/mute is not available")); + } + + /* and ghost the sinkpad of the headmost element */ + GST_DEBUG_OBJECT (playsink, "ghosting sink pad"); + pad = gst_element_get_static_pad (head, "sink"); + chain->sinkpad = gst_ghost_pad_new ("sink", pad); gst_object_unref (pad); - gst_element_add_pad (chain->chain.bin, chain->chain.sinkpad); + gst_element_add_pad (chain->chain.bin, chain->sinkpad); - return (GstPlayChain *) chain; + return chain; /* ERRORS */ no_sinks: { - post_missing_element_message (playsink, "autoaudiosink"); - GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, - (_("Both autoaudiosink and alsasink elements are missing.")), (NULL)); - free_chain ((GstPlayChain *) chain); - return NULL; - } -no_audioconvert: - { - post_missing_element_message (playsink, "audioconvert"); - GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, - (_("Missing element '%s' - check your GStreamer installation."), - "audioconvert"), ("possibly a liboil version mismatch?")); - free_chain ((GstPlayChain *) chain); - return NULL; - } -no_audioresample: - { - post_missing_element_message (playsink, "audioresample"); - GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, - (_("Missing element '%s' - check your GStreamer installation."), - "audioresample"), ("possibly a liboil version mismatch?")); - free_chain ((GstPlayChain *) chain); - return NULL; - } -no_volume: - { - post_missing_element_message (playsink, "volume"); - GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, - (_("Missing element '%s' - check your GStreamer installation."), - "volume"), ("possibly a liboil version mismatch?")); + if (!elem) { + post_missing_element_message (playsink, "autoaudiosink"); + GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, + (_("Both autoaudiosink and alsasink elements are missing.")), (NULL)); + } else { + GST_ELEMENT_ERROR (playsink, CORE, STATE_CHANGE, + (_("Both autoaudiosink and alsasink elements are not working.")), + (NULL)); + } free_chain ((GstPlayChain *) chain); return NULL; } @@ -1053,6 +1550,70 @@ } } +static gboolean +setup_audio_chain (GstPlaySink * playsink, gboolean raw, gboolean queue) +{ + GstElement *elem; + GstPlayAudioChain *chain; + GstStateChangeReturn ret; + + chain = playsink->audiochain; + + /* if the chain was active we don't do anything */ + if (GST_PLAY_CHAIN (chain)->activated == TRUE) + return TRUE; + + if (chain->chain.raw != raw) + return FALSE; + + /* try to set the sink element to READY again */ + ret = gst_element_set_state (chain->sink, GST_STATE_READY); + if (ret == GST_STATE_CHANGE_FAILURE) + return FALSE; + + /* check if the sink, or something within the sink, has the volume property. + * If it does we don't need to add a volume element. */ + elem = gst_play_sink_find_property_sinks (playsink, chain->sink, "volume"); + if (elem) { + chain->volume = elem; + + if (playsink->volume_changed) { + GST_DEBUG_OBJECT (playsink, "the sink has a volume property, setting %f", + playsink->volume); + /* use the sink to control the volume */ + g_object_set (G_OBJECT (chain->volume), "volume", playsink->volume, NULL); + } + /* if the sink also has a mute property we can use this as well. We'll only + * use the mute property if there is a volume property. We can simulate the + * mute with the volume otherwise. */ + chain->mute = + gst_play_sink_find_property_sinks (playsink, chain->sink, "mute"); + if (chain->mute) { + GST_DEBUG_OBJECT (playsink, "the sink has a mute property"); + } + } else { + /* no volume, we need to add a volume element when we can */ + GST_DEBUG_OBJECT (playsink, "the sink has no volume property"); + if (!raw) { + GST_LOG_OBJECT (playsink, "non-raw format, can't do soft volume control"); + chain->volume = NULL; + chain->mute = NULL; + } else { + /* both last and current chain are raw audio, there should be a volume + * element already, unless the sink changed from one with a volume + * property to one that hasn't got a volume property, in which case we + * re-generate the chain */ + if (chain->volume == NULL) { + GST_DEBUG_OBJECT (playsink, "no existing volume element to re-use"); + return FALSE; + } + + GST_DEBUG_OBJECT (playsink, "reusing existing volume element"); + } + } + return TRUE; +} + /* * +-------------------------------------------------------------------+ * | visbin | @@ -1064,16 +1625,19 @@ * +-------------------------------------------------------------------+ * */ -static GstPlayChain * +static GstPlayVisChain * gen_vis_chain (GstPlaySink * playsink) { GstPlayVisChain *chain; GstBin *bin; gboolean res; GstPad *pad; + GstElement *elem; chain = g_new0 (GstPlayVisChain, 1); - chain->chain.playsink = gst_object_ref (playsink); + chain->chain.playsink = playsink; + + GST_DEBUG_OBJECT (playsink, "making vis chain %p", chain); chain->chain.bin = gst_bin_new ("visbin"); bin = GST_BIN_CAST (chain->chain.bin); @@ -1097,15 +1661,20 @@ /* this pad will be used for blocking the dataflow and switching the vis * plugin */ - chain->blockpad = gst_element_get_pad (chain->resample, "src"); + chain->blockpad = gst_element_get_static_pad (chain->resample, "src"); if (playsink->visualisation) { - chain->vis = gst_object_ref (playsink->visualisation); - } else { - chain->vis = gst_element_factory_make ("goom", "vis"); - if (!chain->vis) - goto no_goom; + GST_DEBUG_OBJECT (playsink, "trying configure vis"); + chain->vis = try_element (playsink, playsink->visualisation, FALSE); } + if (chain->vis == NULL) { + GST_DEBUG_OBJECT (playsink, "trying goom"); + elem = gst_element_factory_make ("goom", "vis"); + chain->vis = try_element (playsink, elem, TRUE); + } + if (chain->vis == NULL) + goto no_goom; + gst_bin_add (bin, chain->vis); res = gst_element_link_pads (chain->queue, "src", chain->conv, "sink"); @@ -1114,18 +1683,18 @@ if (!res) goto link_failed; - chain->vissinkpad = gst_element_get_pad (chain->vis, "sink"); - chain->vissrcpad = gst_element_get_pad (chain->vis, "src"); + chain->vissinkpad = gst_element_get_static_pad (chain->vis, "sink"); + chain->vissrcpad = gst_element_get_static_pad (chain->vis, "src"); - pad = gst_element_get_pad (chain->queue, "sink"); - chain->chain.sinkpad = gst_ghost_pad_new ("sink", pad); + pad = gst_element_get_static_pad (chain->queue, "sink"); + chain->sinkpad = gst_ghost_pad_new ("sink", pad); gst_object_unref (pad); - gst_element_add_pad (chain->chain.bin, chain->chain.sinkpad); + gst_element_add_pad (chain->chain.bin, chain->sinkpad); chain->srcpad = gst_ghost_pad_new ("src", chain->vissrcpad); gst_element_add_pad (chain->chain.bin, chain->srcpad); - return (GstPlayChain *) chain; + return chain; /* ERRORS */ no_audioconvert: @@ -1164,33 +1733,9 @@ } } -#if 0 -static gboolean -activate_vis (GstPlaySink * playsink, gboolean activate) -{ - /* need to have an audio chain */ - if (!playsink->audiochain || !playsink->vischain) - return FALSE; - - if (playsink->vischain->activated == activate) - return TRUE; - - if (activate) { - /* activation: Add the vis chain to the sink bin . Take a new srcpad from - * the tee of the audio chain and link it to the sinkpad of the vis chain. - */ - - } else { - /* deactivation: release the srcpad from the tee of the audio chain. Set the - * vis chain to NULL and remove it from the sink bin */ - - } - return TRUE; -} -#endif - /* this function is called when all the request pads are requested and when we - * have to construct the final pipeline. + * have to construct the final pipeline. Based on the flags we construct the + * final output pipelines. */ #ifdef __SYMBIAN32__ EXPORT_C @@ -1200,12 +1745,12 @@ gst_play_sink_reconfigure (GstPlaySink * playsink) { GstPlayFlags flags; - gboolean need_audio, need_video, need_vis; + gboolean need_audio, need_video, need_vis, need_text, need_subp; GST_DEBUG_OBJECT (playsink, "reconfiguring"); /* assume we need nothing */ - need_audio = need_video = need_vis = FALSE; + need_audio = need_video = need_vis = need_text = need_subp = FALSE; GST_PLAY_SINK_LOCK (playsink); GST_OBJECT_LOCK (playsink); @@ -1214,7 +1759,22 @@ GST_OBJECT_UNLOCK (playsink); /* figure out which components we need */ - if (flags & GST_PLAY_FLAG_VIDEO && playsink->video_pad) { + if (flags & GST_PLAY_FLAG_TEXT && (playsink->text_pad || playsink->subp_pad)) { + /* we have a text_pad and we need text rendering, in this case we need a + * video_pad to combine the video with the text */ + if (!playsink->video_pad) + goto subs_but_no_video; + + /* we have subtitles and we are requested to show it, we also need to show + * video in this case. */ + need_video = TRUE; + need_text = (playsink->text_pad != NULL); + need_subp = (playsink->subp_pad != NULL); + + /* we can't handle both of them yet */ + if (need_text && need_subp) + goto subs_and_text; + } else if (flags & GST_PLAY_FLAG_VIDEO && playsink->video_pad) { /* we have video and we are requested to show it */ need_video = TRUE; } @@ -1222,71 +1782,233 @@ if (flags & GST_PLAY_FLAG_AUDIO) { need_audio = TRUE; } - if (flags & GST_PLAY_FLAG_VIS && !need_video) { - /* also add video when we add visualisation */ - need_video = TRUE; - need_vis = TRUE; + if (playsink->audio_pad_raw) { + /* only can do vis with raw uncompressed audio */ + if (flags & GST_PLAY_FLAG_VIS && !need_video) { + /* also add video when we add visualisation */ + need_video = TRUE; + need_vis = TRUE; + } } } + /* set up video pipeline */ if (need_video) { + gboolean raw, async, queue; + + /* we need a raw sink when we do vis or when we have a raw pad */ + raw = need_vis ? TRUE : playsink->video_pad_raw; + /* we try to set the sink async=FALSE when we need vis, this way we can + * avoid a queue in the audio chain. */ + async = !need_vis; + /* put a little queue in front of the video but only if we are not doing + * subpictures because then we will add the queue in front of the subpicture + * mixer to minimize latency. */ + queue = (need_subp == FALSE); + GST_DEBUG_OBJECT (playsink, "adding video, raw %d", playsink->video_pad_raw); - if (!playsink->videochain) { - gboolean raw, async; - /* we need a raw sink when we do vis or when we have a raw pad */ - raw = need_vis ? TRUE : playsink->video_pad_raw; - /* we try to set the sink async=FALSE when we need vis, this way we can - * avoid a queue in the audio chain. */ - async = !need_vis; + if (playsink->videochain) { + /* try to reactivate the chain */ + if (!setup_video_chain (playsink, raw, async, queue)) { + add_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE); + activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE); + free_chain ((GstPlayChain *) playsink->videochain); + playsink->videochain = NULL; + } + } - playsink->videochain = gen_video_chain (playsink, raw, async); + if (!playsink->videochain) { + playsink->videochain = gen_video_chain (playsink, raw, async, queue); + } + if (playsink->videochain) { + GST_DEBUG_OBJECT (playsink, "adding video chain"); + add_chain (GST_PLAY_CHAIN (playsink->videochain), TRUE); + activate_chain (GST_PLAY_CHAIN (playsink->videochain), TRUE); + /* if we are not part of vis or subtitles, set the ghostpad target */ + if (!need_vis && !need_text && playsink->text_pad == NULL) { + GST_DEBUG_OBJECT (playsink, "ghosting video sinkpad"); + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), + playsink->videochain->sinkpad); + } } - add_chain (playsink->videochain, TRUE); - activate_chain (playsink->videochain, TRUE); - if (!need_vis) - gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), - playsink->videochain->sinkpad); } else { + GST_DEBUG_OBJECT (playsink, "no video needed"); if (playsink->videochain) { - add_chain (playsink->videochain, FALSE); - activate_chain (playsink->videochain, FALSE); + GST_DEBUG_OBJECT (playsink, "removing video chain"); + if (playsink->vischain) { + GstPad *srcpad; + + GST_DEBUG_OBJECT (playsink, "unlinking vis chain"); + + /* also had visualisation, release the tee srcpad before we then + * unlink the video from it */ + if (playsink->audio_tee_vissrc) { + gst_element_release_request_pad (playsink->audio_tee, + playsink->audio_tee_vissrc); + gst_object_unref (playsink->audio_tee_vissrc); + playsink->audio_tee_vissrc = NULL; + } + srcpad = + gst_element_get_static_pad (playsink->vischain->chain.bin, "src"); + gst_pad_unlink (srcpad, playsink->videochain->sinkpad); + } + add_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE); + activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE); } if (playsink->video_pad) gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), NULL); } - if (need_audio) { - GST_DEBUG_OBJECT (playsink, "adding audio"); - if (!playsink->audiochain) { - gboolean raw, queue; + if (need_text) { + GST_DEBUG_OBJECT (playsink, "adding text"); + if (!playsink->textchain) { + GST_DEBUG_OBJECT (playsink, "creating text chain"); + playsink->textchain = gen_text_chain (playsink); + } + if (playsink->textchain) { + GST_DEBUG_OBJECT (playsink, "adding text chain"); + add_chain (GST_PLAY_CHAIN (playsink->textchain), TRUE); + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->text_pad), + playsink->textchain->textsinkpad); + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), + playsink->textchain->videosinkpad); + gst_pad_link (playsink->textchain->srcpad, playsink->videochain->sinkpad); + activate_chain (GST_PLAY_CHAIN (playsink->textchain), TRUE); + if (playsink->textchain->overlay) + g_object_set (playsink->textchain->overlay, "silent", FALSE, NULL); + } + } else { + GST_DEBUG_OBJECT (playsink, "no text needed"); + /* we have no subtitles/text or we are requested to not show them */ + if (playsink->textchain) { + if (playsink->text_pad == NULL) { + /* no text pad, remove the chain entirely */ + GST_DEBUG_OBJECT (playsink, "removing text chain"); + add_chain (GST_PLAY_CHAIN (playsink->textchain), FALSE); + activate_chain (GST_PLAY_CHAIN (playsink->textchain), FALSE); + } else { + /* we have a chain and a textpad, turn the subtitles off */ + GST_DEBUG_OBJECT (playsink, "turning off the text"); + if (playsink->textchain->overlay) + g_object_set (playsink->textchain->overlay, "silent", TRUE, NULL); + } + } + if (!need_video && playsink->video_pad) + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), NULL); + if (playsink->text_pad) + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->text_pad), NULL); + } - /* get a raw sink if we are asked for a raw pad */ - raw = playsink->audio_pad_raw; - if (need_vis) { - /* If we are dealing with visualisations, we need to add a queue to - * decouple the audio from the video part. We only have to do this when - * the video part is async=true */ - queue = ((GstPlayVideoChain *) playsink->videochain)->async; - GST_DEBUG_OBJECT (playsink, "need audio queue for vis: %d", queue); + if (need_subp && playsink->videochain) { + GST_DEBUG_OBJECT (playsink, "adding subpicture"); + if (!playsink->subpchain) { + GST_DEBUG_OBJECT (playsink, "creating subpicture chain"); + playsink->subpchain = gen_subp_chain (playsink); + } + if (playsink->subpchain) { + GST_DEBUG_OBJECT (playsink, "adding subp chain"); + add_chain (GST_PLAY_CHAIN (playsink->subpchain), TRUE); + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->subp_pad), + playsink->subpchain->subpsinkpad); + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), + playsink->subpchain->videosinkpad); + gst_pad_link (playsink->subpchain->srcpad, playsink->videochain->sinkpad); + activate_chain (GST_PLAY_CHAIN (playsink->subpchain), TRUE); + } + } else { + GST_DEBUG_OBJECT (playsink, "no subpicture needed"); + /* we have no subpicture or we are requested to not show them */ + if (playsink->subpchain) { + if (playsink->subp_pad == NULL) { + /* no subpicture pad, remove the chain entirely */ + GST_DEBUG_OBJECT (playsink, "removing subp chain"); + add_chain (GST_PLAY_CHAIN (playsink->subpchain), FALSE); + activate_chain (GST_PLAY_CHAIN (playsink->subpchain), FALSE); } else { - /* no vis, we can avoid a queue */ - GST_DEBUG_OBJECT (playsink, "don't need audio queue"); - queue = FALSE; + /* we have a chain and a subpicture pad, turn the subtitles off */ + GST_DEBUG_OBJECT (playsink, "turning off the subp"); } + } + if (!need_video && playsink->video_pad) + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), NULL); + if (playsink->subp_pad) + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->subp_pad), NULL); + } + if (need_audio) { + gboolean raw, queue; + + GST_DEBUG_OBJECT (playsink, "adding audio"); + + /* get a raw sink if we are asked for a raw pad */ + raw = playsink->audio_pad_raw; + if (need_vis && playsink->videochain) { + /* If we are dealing with visualisations, we need to add a queue to + * decouple the audio from the video part. We only have to do this when + * the video part is async=true */ + queue = ((GstPlayVideoChain *) playsink->videochain)->async; + GST_DEBUG_OBJECT (playsink, "need audio queue for vis: %d", queue); + } else { + /* no vis, we can avoid a queue */ + GST_DEBUG_OBJECT (playsink, "don't need audio queue"); + queue = FALSE; + } + + if (playsink->audiochain) { + /* try to reactivate the chain */ + if (!setup_audio_chain (playsink, raw, queue)) { + GST_DEBUG_OBJECT (playsink, "removing current audio chain"); + if (playsink->audio_tee_asrc) { + gst_element_release_request_pad (playsink->audio_tee, + playsink->audio_tee_asrc); + gst_object_unref (playsink->audio_tee_asrc); + playsink->audio_tee_asrc = NULL; + } + add_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE); + activate_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE); + playsink->audiochain->volume = NULL; + playsink->audiochain->mute = NULL; + free_chain ((GstPlayChain *) playsink->audiochain); + playsink->audiochain = NULL; + playsink->volume_changed = playsink->mute_changed = FALSE; + } + } + + if (!playsink->audiochain) { + GST_DEBUG_OBJECT (playsink, "creating new audio chain"); playsink->audiochain = gen_audio_chain (playsink, raw, queue); } - add_chain (playsink->audiochain, TRUE); - gst_pad_link (playsink->audio_tee_asrc, playsink->audiochain->sinkpad); - activate_chain (playsink->audiochain, TRUE); + + if (playsink->audiochain) { + GST_DEBUG_OBJECT (playsink, "adding audio chain"); + if (playsink->audio_tee_asrc == NULL) { + playsink->audio_tee_asrc = + gst_element_get_request_pad (playsink->audio_tee, "src%d"); + } + add_chain (GST_PLAY_CHAIN (playsink->audiochain), TRUE); + activate_chain (GST_PLAY_CHAIN (playsink->audiochain), TRUE); + gst_pad_link (playsink->audio_tee_asrc, playsink->audiochain->sinkpad); + } } else { + GST_DEBUG_OBJECT (playsink, "no audio needed"); /* we have no audio or we are requested to not play audio */ if (playsink->audiochain) { - gst_pad_unlink (playsink->audio_tee_asrc, playsink->audiochain->sinkpad); - add_chain (playsink->audiochain, FALSE); - activate_chain (playsink->audiochain, FALSE); + GST_DEBUG_OBJECT (playsink, "removing audio chain"); + /* release the audio pad */ + if (playsink->audio_tee_asrc) { + gst_element_release_request_pad (playsink->audio_tee, + playsink->audio_tee_asrc); + gst_object_unref (playsink->audio_tee_asrc); + playsink->audio_tee_asrc = NULL; + } + if (playsink->audiochain->sink_volume) { + playsink->audiochain->volume = NULL; + playsink->audiochain->mute = NULL; + } + add_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE); + activate_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE); } } @@ -1298,28 +2020,72 @@ GST_DEBUG_OBJECT (playsink, "adding visualisation"); - srcpad = - gst_element_get_pad (GST_ELEMENT_CAST (playsink->vischain->bin), "src"); - add_chain (playsink->vischain, TRUE); - gst_pad_link (playsink->audio_tee_vissrc, playsink->vischain->sinkpad); - gst_pad_link (srcpad, playsink->videochain->sinkpad); - gst_object_unref (srcpad); - activate_chain (playsink->vischain, TRUE); + if (playsink->vischain) { + GST_DEBUG_OBJECT (playsink, "setting up vis chain"); + srcpad = + gst_element_get_static_pad (playsink->vischain->chain.bin, "src"); + add_chain (GST_PLAY_CHAIN (playsink->vischain), TRUE); + activate_chain (GST_PLAY_CHAIN (playsink->vischain), TRUE); + if (playsink->audio_tee_vissrc == NULL) { + playsink->audio_tee_vissrc = + gst_element_get_request_pad (playsink->audio_tee, "src%d"); + } + gst_pad_link (playsink->audio_tee_vissrc, playsink->vischain->sinkpad); + gst_pad_link (srcpad, playsink->videochain->sinkpad); + gst_object_unref (srcpad); + } } else { + GST_DEBUG_OBJECT (playsink, "no vis needed"); if (playsink->vischain) { - add_chain (playsink->vischain, FALSE); - activate_chain (playsink->vischain, FALSE); + if (playsink->audio_tee_vissrc) { + gst_element_release_request_pad (playsink->audio_tee, + playsink->audio_tee_vissrc); + gst_object_unref (playsink->audio_tee_vissrc); + playsink->audio_tee_vissrc = NULL; + } + GST_DEBUG_OBJECT (playsink, "removing vis chain"); + add_chain (GST_PLAY_CHAIN (playsink->vischain), FALSE); + activate_chain (GST_PLAY_CHAIN (playsink->vischain), FALSE); } } + do_async_done (playsink); GST_PLAY_SINK_UNLOCK (playsink); return TRUE; + + /* ERRORS */ +subs_but_no_video: + { + GST_ELEMENT_ERROR (playsink, STREAM, FORMAT, + (_("Can't play a text file without video.")), + ("Have text pad but no video pad")); + GST_PLAY_SINK_UNLOCK (playsink); + return FALSE; + } +subs_and_text: + { + GST_ELEMENT_ERROR (playsink, STREAM, FORMAT, + (_("Can't play a text subtitles and subpictures.")), + ("Have text pad and subpicture pad")); + GST_PLAY_SINK_UNLOCK (playsink); + return FALSE; + } } + +/** + * gst_play_sink_set_flags: + * @playsink: a #GstPlaySink + * @flags: #GstPlayFlags + * + * Configure @flags on @playsink. The flags control the behaviour of @playsink + * when constructing the sink pipelins. + * + * Returns: TRUE if the flags could be configured. + */ #ifdef __SYMBIAN32__ EXPORT_C #endif - gboolean gst_play_sink_set_flags (GstPlaySink * playsink, GstPlayFlags flags) { @@ -1331,11 +2097,20 @@ return TRUE; } + +/** + * gst_play_sink_get_flags: + * @playsink: a #GstPlaySink + * + * Get the flags of @playsink. That flags control the behaviour of the sink when + * it constructs the sink pipelines. + * + * Returns: the currently configured #GstPlayFlags. + */ #ifdef __SYMBIAN32__ EXPORT_C #endif - GstPlayFlags gst_play_sink_get_flags (GstPlaySink * playsink) { @@ -1354,6 +2129,59 @@ #endif +void +gst_play_sink_set_font_desc (GstPlaySink * playsink, const gchar * desc) +{ + GstPlayTextChain *chain; + + GST_PLAY_SINK_LOCK (playsink); + chain = (GstPlayTextChain *) playsink->textchain; + g_free (playsink->font_desc); + playsink->font_desc = g_strdup (desc); + if (chain && chain->overlay) { + g_object_set (chain->overlay, "font-desc", desc, NULL); + } + GST_PLAY_SINK_UNLOCK (playsink); +} +#ifdef __SYMBIAN32__ +EXPORT_C +#endif + + +gchar * +gst_play_sink_get_font_desc (GstPlaySink * playsink) +{ + gchar *result = NULL; + GstPlayTextChain *chain; + + GST_PLAY_SINK_LOCK (playsink); + chain = (GstPlayTextChain *) playsink->textchain; + if (chain && chain->overlay) { + g_object_get (chain->overlay, "font-desc", &result, NULL); + playsink->font_desc = g_strdup (result); + } else { + result = g_strdup (playsink->font_desc); + } + GST_PLAY_SINK_UNLOCK (playsink); + + return result; +} + +/** + * gst_play_sink_get_last_frame: + * @playsink: a #GstPlaySink + * + * Get the last displayed frame from @playsink. This frame is in the native + * format of the sink element, the caps on the result buffer contain the format + * of the frame data. + * + * Returns: a #GstBuffer with the frame data or %NULL when no video frame is + * available. + */ +#ifdef __SYMBIAN32__ +EXPORT_C +#endif + GstBuffer * gst_play_sink_get_last_frame (GstPlaySink * playsink) { @@ -1385,17 +2213,29 @@ return result; } + +/** + * gst_play_sink_request_pad + * @playsink: a #GstPlaySink + * @type: a #GstPlaySinkType + * + * Create or return a pad of @type. + * + * Returns: a #GstPad of @type or %NULL when the pad could not be created. + */ #ifdef __SYMBIAN32__ EXPORT_C #endif - GstPad * gst_play_sink_request_pad (GstPlaySink * playsink, GstPlaySinkType type) { GstPad *res = NULL; gboolean created = FALSE; gboolean raw = FALSE; + gboolean activate = TRUE; + + GST_DEBUG_OBJECT (playsink, "request pad type %d", type); GST_PLAY_SINK_LOCK (playsink); switch (type) { @@ -1403,20 +2243,19 @@ raw = TRUE; case GST_PLAY_SINK_TYPE_AUDIO: if (!playsink->audio_tee) { + GST_LOG_OBJECT (playsink, "creating tee"); /* create tee when needed. This element will feed the audio sink chain * and the vis chain. */ playsink->audio_tee = gst_element_factory_make ("tee", "audiotee"); playsink->audio_tee_sink = - gst_element_get_pad (playsink->audio_tee, "sink"); - /* get two request pads */ - playsink->audio_tee_vissrc = - gst_element_get_request_pad (playsink->audio_tee, "src%d"); - playsink->audio_tee_asrc = - gst_element_get_request_pad (playsink->audio_tee, "src%d"); + gst_element_get_static_pad (playsink->audio_tee, "sink"); gst_bin_add (GST_BIN_CAST (playsink), playsink->audio_tee); gst_element_set_state (playsink->audio_tee, GST_STATE_PAUSED); + } else { + gst_element_set_state (playsink->audio_tee, GST_STATE_PAUSED); } if (!playsink->audio_pad) { + GST_LOG_OBJECT (playsink, "ghosting tee sinkpad"); playsink->audio_pad = gst_ghost_pad_new ("audio_sink", playsink->audio_tee_sink); created = TRUE; @@ -1428,6 +2267,7 @@ raw = TRUE; case GST_PLAY_SINK_TYPE_VIDEO: if (!playsink->video_pad) { + GST_LOG_OBJECT (playsink, "ghosting videosink"); playsink->video_pad = gst_ghost_pad_new_no_target ("video_sink", GST_PAD_SINK); created = TRUE; @@ -1436,6 +2276,7 @@ res = playsink->video_pad; break; case GST_PLAY_SINK_TYPE_TEXT: + GST_LOG_OBJECT (playsink, "ghosting text"); if (!playsink->text_pad) { playsink->text_pad = gst_ghost_pad_new_no_target ("text_sink", GST_PAD_SINK); @@ -1443,6 +2284,28 @@ } res = playsink->text_pad; break; + case GST_PLAY_SINK_TYPE_FLUSHING: + { + gchar *padname; + + /* we need a unique padname for the flushing pad. */ + padname = g_strdup_printf ("flushing_%d", playsink->count); + res = gst_ghost_pad_new_no_target (padname, GST_PAD_SINK); + g_free (padname); + playsink->count++; + activate = FALSE; + created = TRUE; + break; + } + case GST_PLAY_SINK_TYPE_SUBPIC: + GST_LOG_OBJECT (playsink, "ghosting subpicture pad"); + if (!playsink->subp_pad) { + playsink->subp_pad = + gst_ghost_pad_new_no_target ("subp_sink", GST_PAD_SINK); + created = TRUE; + } + res = playsink->subp_pad; + break; default: res = NULL; break; @@ -1450,8 +2313,12 @@ GST_PLAY_SINK_UNLOCK (playsink); if (created && res) { + /* we have to add the pad when it's active or we get an error when the + * element is 'running' */ gst_pad_set_active (res, TRUE); gst_element_add_pad (GST_ELEMENT_CAST (playsink), res); + if (!activate) + gst_pad_set_active (res, activate); } return res; @@ -1465,6 +2332,9 @@ gst_play_sink_release_pad (GstPlaySink * playsink, GstPad * pad) { GstPad **res = NULL; + gboolean untarget = TRUE; + + GST_DEBUG_OBJECT (playsink, "release pad %" GST_PTR_FORMAT, pad); GST_PLAY_SINK_LOCK (playsink); if (pad == playsink->video_pad) { @@ -1473,16 +2343,73 @@ res = &playsink->audio_pad; } else if (pad == playsink->text_pad) { res = &playsink->text_pad; + } else if (pad == playsink->subp_pad) { + res = &playsink->subp_pad; + } else { + /* try to release the given pad anyway, these could be the FLUSHING pads. */ + res = &pad; + untarget = FALSE; } GST_PLAY_SINK_UNLOCK (playsink); if (*res) { + GST_DEBUG_OBJECT (playsink, "deactivate pad %" GST_PTR_FORMAT, *res); gst_pad_set_active (*res, FALSE); + if (untarget) { + GST_DEBUG_OBJECT (playsink, "untargeting pad %" GST_PTR_FORMAT, *res); + gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (*res), NULL); + } + GST_DEBUG_OBJECT (playsink, "remove pad %" GST_PTR_FORMAT, *res); gst_element_remove_pad (GST_ELEMENT_CAST (playsink), *res); *res = NULL; } } +static void +gst_play_sink_handle_message (GstBin * bin, GstMessage * message) +{ + GstPlaySink *playsink; + + playsink = GST_PLAY_SINK_CAST (bin); + + switch (GST_MESSAGE_TYPE (message)) { + case GST_MESSAGE_STEP_DONE: + { + GstFormat format; + guint64 amount; + gdouble rate; + gboolean flush, intermediate, res, eos; + guint64 duration; + + GST_INFO_OBJECT (playsink, "Handling step-done message"); + gst_message_parse_step_done (message, &format, &amount, &rate, &flush, + &intermediate, &duration, &eos); + + if (format == GST_FORMAT_BUFFERS) { + /* for the buffer format, we align the other streams */ + if (playsink->audiochain) { + GstEvent *event; + + event = + gst_event_new_step (GST_FORMAT_TIME, duration, rate, flush, + intermediate); + + if (!(res = + gst_element_send_event (playsink->audiochain->chain.bin, + event))) { + GST_DEBUG_OBJECT (playsink, "Event failed when sent to audio sink"); + } + } + } + GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin, message); + break; + } + default: + GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin, message); + break; + } +} + /* Send an event to our sinks until one of them works; don't then send to the * remaining sinks (unlike GstBin) */ @@ -1491,22 +2418,22 @@ { gboolean res = TRUE; + if (playsink->videochain) { + gst_event_ref (event); + if ((res = gst_element_send_event (playsink->videochain->chain.bin, event))) { + GST_DEBUG_OBJECT (playsink, "Sent event succesfully to video sink"); + goto done; + } + GST_DEBUG_OBJECT (playsink, "Event failed when sent to video sink"); + } if (playsink->audiochain) { gst_event_ref (event); - if ((res = gst_element_send_event (playsink->audiochain->bin, event))) { + if ((res = gst_element_send_event (playsink->audiochain->chain.bin, event))) { GST_DEBUG_OBJECT (playsink, "Sent event succesfully to audio sink"); goto done; } GST_DEBUG_OBJECT (playsink, "Event failed when sent to audio sink"); } - if (playsink->videochain) { - gst_event_ref (event); - if ((res = gst_element_send_event (playsink->videochain->bin, event))) { - GST_DEBUG_OBJECT (playsink, "Sent event succesfully to video sink"); - goto done; - } - GST_DEBUG_OBJECT (playsink, "Event failed when sent to video sink"); - } done: gst_event_unref (event); return res; @@ -1520,14 +2447,40 @@ { gboolean res = FALSE; GstEventType event_type = GST_EVENT_TYPE (event); + GstPlaySink *playsink; + + playsink = GST_PLAY_SINK_CAST (element); switch (event_type) { case GST_EVENT_SEEK: - GST_DEBUG_OBJECT (element, "Sending seek event to a sink"); - res = gst_play_sink_send_event_to_sink (GST_PLAY_SINK (element), event); + GST_DEBUG_OBJECT (element, "Sending event to a sink"); + res = gst_play_sink_send_event_to_sink (playsink, event); break; + case GST_EVENT_STEP: + { + GstFormat format; + guint64 amount; + gdouble rate; + gboolean flush, intermediate; + + gst_event_parse_step (event, &format, &amount, &rate, &flush, + &intermediate); + + if (format == GST_FORMAT_BUFFERS) { + /* for buffers, we will try to step video frames, for other formats we + * send the step to all sinks */ + res = gst_play_sink_send_event_to_sink (playsink, event); + } else { + res = + GST_ELEMENT_CLASS (gst_play_sink_parent_class)->send_event (element, + event); + } + break; + } default: - res = parent_class->send_event (element, event); + res = + GST_ELEMENT_CLASS (gst_play_sink_parent_class)->send_event (element, + event); break; } return res; @@ -1537,41 +2490,102 @@ gst_play_sink_change_state (GstElement * element, GstStateChange transition) { GstStateChangeReturn ret; + GstStateChangeReturn bret; + GstPlaySink *playsink; playsink = GST_PLAY_SINK (element); switch (transition) { case GST_STATE_CHANGE_READY_TO_PAUSED: + /* we want to go async to PAUSED until we managed to configure and add the + * sinks */ + do_async_start (playsink); + ret = GST_STATE_CHANGE_ASYNC; + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + case GST_STATE_CHANGE_READY_TO_NULL: + if (playsink->audiochain && playsink->audiochain->sink_volume) { + /* remove our links to the mute and volume elements when they were + * provided by a sink */ + playsink->audiochain->volume = NULL; + playsink->audiochain->mute = NULL; + } + ret = GST_STATE_CHANGE_SUCCESS; break; default: + /* all other state changes return SUCCESS by default, this value can be + * overridden by the result of the children */ + ret = GST_STATE_CHANGE_SUCCESS; break; } - ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); - if (ret == GST_STATE_CHANGE_FAILURE) - return ret; + /* do the state change of the children */ + bret = + GST_ELEMENT_CLASS (gst_play_sink_parent_class)->change_state (element, + transition); + /* now look at the result of our children and adjust the return value */ + switch (bret) { + case GST_STATE_CHANGE_FAILURE: + /* failure, we stop */ + goto activate_failed; + case GST_STATE_CHANGE_NO_PREROLL: + /* some child returned NO_PREROLL. This is strange but we never know. We + * commit our async state change (if any) and return the NO_PREROLL */ + do_async_done (playsink); + ret = bret; + break; + case GST_STATE_CHANGE_ASYNC: + /* some child was async, return this */ + ret = bret; + break; + default: + /* return our previously configured return value */ + break; + } switch (transition) { case GST_STATE_CHANGE_READY_TO_PAUSED: break; case GST_STATE_CHANGE_PLAYING_TO_PAUSED: /* FIXME Release audio device when we implement that */ + playsink->need_async_start = TRUE; break; case GST_STATE_CHANGE_PAUSED_TO_READY: + case GST_STATE_CHANGE_READY_TO_NULL: /* remove sinks we added */ if (playsink->videochain) { - activate_chain (playsink->videochain, FALSE); - add_chain (playsink->videochain, FALSE); + activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE); + add_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE); } if (playsink->audiochain) { - activate_chain (playsink->audiochain, FALSE); - add_chain (playsink->audiochain, FALSE); + activate_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE); + add_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE); + } + if (playsink->vischain) { + activate_chain (GST_PLAY_CHAIN (playsink->vischain), FALSE); + add_chain (GST_PLAY_CHAIN (playsink->vischain), FALSE); } + if (playsink->textchain) { + activate_chain (GST_PLAY_CHAIN (playsink->textchain), FALSE); + add_chain (GST_PLAY_CHAIN (playsink->textchain), FALSE); + } + if (playsink->subpchain) { + activate_chain (GST_PLAY_CHAIN (playsink->subpchain), FALSE); + add_chain (GST_PLAY_CHAIN (playsink->subpchain), FALSE); + } + do_async_done (playsink); break; default: break; } + return ret; - return ret; + /* ERRORS */ +activate_failed: + { + GST_DEBUG_OBJECT (element, + "element failed to change states -- activation problem?"); + return GST_STATE_CHANGE_FAILURE; + } }