gst_plugins_base/gst/videorate/gstvideorate.c
branchRCL_3
changeset 30 7e817e7e631c
parent 29 567bb019e3e3
equal deleted inserted replaced
29:567bb019e3e3 30:7e817e7e631c
    17  * Boston, MA 02111-1307, USA.
    17  * Boston, MA 02111-1307, USA.
    18  */
    18  */
    19 
    19 
    20 /**
    20 /**
    21  * SECTION:element-videorate
    21  * SECTION:element-videorate
       
    22  * @short_description: retimestamps and drops/duplicates video frames to
       
    23  *  match the source pad's framerate and create a perfect stream
    22  *
    24  *
       
    25  * <refsect2>
       
    26  * <para>
    23  * This element takes an incoming stream of timestamped video frames.
    27  * This element takes an incoming stream of timestamped video frames.
    24  * It will produce a perfect stream that matches the source pad's framerate.
    28  * It will produce a perfect stream that matches the source pad's framerate.
    25  *
    29  *
    26  * The correction is performed by dropping and duplicating frames, no fancy
    30  * The correction is performed by dropping and duplicating frames, no fancy
    27  * algorithm is used to interpolate frames (yet).
    31  * algorithm is used to interpolate frames (yet).
    28  *
    32  * </para>
       
    33  * <para>
    29  * By default the element will simply negotiate the same framerate on its
    34  * By default the element will simply negotiate the same framerate on its
    30  * source and sink pad.
    35  * source and sink pad.
    31  *
    36  * </para>
       
    37  * <para>
    32  * This operation is useful to link to elements that require a perfect stream.
    38  * This operation is useful to link to elements that require a perfect stream.
    33  * Typical examples are formats that do not store timestamps for video frames,
    39  * Typical examples are formats that do not store timestamps for video frames,
    34  * but only store a framerate, like Ogg and AVI.
    40  * but only store a framerate, like Ogg and AVI.
    35  *
    41  * </para>
       
    42  * <para>
    36  * A conversion to a specific framerate can be forced by using filtered caps on
    43  * A conversion to a specific framerate can be forced by using filtered caps on
    37  * the source pad.
    44  * the source pad.
    38  *
    45  * </para>
    39  * The properties #GstVideoRate:in, #GstVideoRate:out, #GstVideoRate:duplicate
    46  * <para>
    40  * and #GstVideoRate:drop can be read to obtain information about number of
    47  * The properties "in", "out", "duplicate" and "drop" can be read to obtain
    41  * input frames, output frames, dropped frames (i.e. the number of unused input
    48  * information about number of input frames, output frames, dropped frames
    42  * frames) and duplicated frames (i.e. the number of times an input frame was
    49  * (i.e. the number of unused input frames) and duplicated frames (i.e. the
    43  * duplicated, beside being used normally).
    50  *  number of times an input frame was duplicated, beside being used normally).
    44  *
    51  *
    45  * An input stream that needs no adjustments will thus never have dropped or
    52  * An input stream that needs no adjustments will thus never have dropped or
    46  * duplicated frames.
    53  * duplicated frames.
    47  *
    54  *
    48  * When the #GstVideoRate:silent property is set to FALSE, a GObject property
    55  * When the "silent" property is set to FALSE, a GObject property notification
    49  * notification will be emitted whenever one of the #GstVideoRate:duplicate or
    56  * will be emitted whenever one of the "duplicate" or "drop" values changes.
    50  * #GstVideoRate:drop values changes.
       
    51  * This can potentially cause performance degradation.
    57  * This can potentially cause performance degradation.
    52  * Note that property notification will happen from the streaming thread, so
    58  * Note that property notification will happen from the streaming thread, so
    53  * applications should be prepared for this.
    59  * applications should be prepared for this.
    54  *
    60  * </para>
    55  * <refsect2>
       
    56  * <title>Example pipelines</title>
    61  * <title>Example pipelines</title>
    57  * |[
    62  * <para>
       
    63  * <programlisting>
    58  * gst-launch -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videorate ! video/x-raw-yuv,framerate=15/1 ! xvimagesink
    64  * gst-launch -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videorate ! video/x-raw-yuv,framerate=15/1 ! xvimagesink
    59  * ]| Decode an Ogg/Theora file and adjust the framerate to 15 fps before playing.
    65  * </programlisting>
       
    66  * Decode an Ogg/Theora file and adjust the framerate to 15 fps before playing.
    60  * To create the test Ogg/Theora file refer to the documentation of theoraenc.
    67  * To create the test Ogg/Theora file refer to the documentation of theoraenc.
    61  * |[
    68  * </para>
       
    69  * <para>
       
    70  * <programlisting>
    62  * gst-launch -v v4lsrc ! videorate ! video/x-raw-yuv,framerate=25/2 ! theoraenc ! oggmux ! filesink location=v4l.ogg
    71  * gst-launch -v v4lsrc ! videorate ! video/x-raw-yuv,framerate=25/2 ! theoraenc ! oggmux ! filesink location=v4l.ogg
    63  * ]| Capture video from a V4L device, and adjust the stream to 12.5 fps before
    72  * </programlisting>
       
    73  * Capture video from a V4L device, and adjust the stream to 12.5 fps before
    64  * encoding to Ogg/Theora.
    74  * encoding to Ogg/Theora.
    65  * </refsect2>
    75  * </para>
       
    76   * </refsect2>
    66  *
    77  *
    67  * Last reviewed on 2006-09-02 (0.10.11)
    78  * Last reviewed on 2006-09-02 (0.10.11)
    68  */
    79  */
    69 
    80 
    70 #ifdef HAVE_CONFIG_H
    81 #ifdef HAVE_CONFIG_H
   107 
   118 
   108 static GstStaticPadTemplate gst_video_rate_src_template =
   119 static GstStaticPadTemplate gst_video_rate_src_template =
   109     GST_STATIC_PAD_TEMPLATE ("src",
   120     GST_STATIC_PAD_TEMPLATE ("src",
   110     GST_PAD_SRC,
   121     GST_PAD_SRC,
   111     GST_PAD_ALWAYS,
   122     GST_PAD_ALWAYS,
   112     GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb; image/jpeg; image/png")
   123     GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb")
   113     );
   124     );
   114 
   125 
   115 static GstStaticPadTemplate gst_video_rate_sink_template =
   126 static GstStaticPadTemplate gst_video_rate_sink_template =
   116     GST_STATIC_PAD_TEMPLATE ("sink",
   127     GST_STATIC_PAD_TEMPLATE ("sink",
   117     GST_PAD_SINK,
   128     GST_PAD_SINK,
   118     GST_PAD_ALWAYS,
   129     GST_PAD_ALWAYS,
   119     GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb; image/jpeg; image/png")
   130     GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb")
   120     );
   131     );
   121 
   132 
   122 static void gst_video_rate_swap_prev (GstVideoRate * videorate,
   133 static void gst_video_rate_swap_prev (GstVideoRate * videorate,
   123     GstBuffer * buffer, gint64 time);
   134     GstBuffer * buffer, gint64 time);
   124 static gboolean gst_video_rate_event (GstPad * pad, GstEvent * event);
   135 static gboolean gst_video_rate_event (GstPad * pad, GstEvent * event);
   147   gst_element_class_add_pad_template (element_class,
   158   gst_element_class_add_pad_template (element_class,
   148       gst_static_pad_template_get (&gst_video_rate_sink_template));
   159       gst_static_pad_template_get (&gst_video_rate_sink_template));
   149   gst_element_class_add_pad_template (element_class,
   160   gst_element_class_add_pad_template (element_class,
   150       gst_static_pad_template_get (&gst_video_rate_src_template));
   161       gst_static_pad_template_get (&gst_video_rate_src_template));
   151 }
   162 }
   152 
       
   153 static void
   163 static void
   154 gst_video_rate_class_init (GstVideoRateClass * klass)
   164 gst_video_rate_class_init (GstVideoRateClass * klass)
   155 {
   165 {
   156   GObjectClass *object_class = G_OBJECT_CLASS (klass);
   166   GObjectClass *object_class = G_OBJECT_CLASS (klass);
   157   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
   167   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
   161   object_class->set_property = gst_video_rate_set_property;
   171   object_class->set_property = gst_video_rate_set_property;
   162   object_class->get_property = gst_video_rate_get_property;
   172   object_class->get_property = gst_video_rate_get_property;
   163 
   173 
   164   g_object_class_install_property (object_class, ARG_IN,
   174   g_object_class_install_property (object_class, ARG_IN,
   165       g_param_spec_uint64 ("in", "In",
   175       g_param_spec_uint64 ("in", "In",
   166           "Number of input frames", 0, G_MAXUINT64, 0,
   176           "Number of input frames", 0, G_MAXUINT64, 0, G_PARAM_READABLE));
   167           G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
       
   168   g_object_class_install_property (object_class, ARG_OUT,
   177   g_object_class_install_property (object_class, ARG_OUT,
   169       g_param_spec_uint64 ("out", "Out", "Number of output frames", 0,
   178       g_param_spec_uint64 ("out", "Out",
   170           G_MAXUINT64, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
   179           "Number of output frames", 0, G_MAXUINT64, 0, G_PARAM_READABLE));
   171   g_object_class_install_property (object_class, ARG_DUP,
   180   g_object_class_install_property (object_class, ARG_DUP,
   172       g_param_spec_uint64 ("duplicate", "Duplicate",
   181       g_param_spec_uint64 ("duplicate", "Duplicate",
   173           "Number of duplicated frames", 0, G_MAXUINT64, 0,
   182           "Number of duplicated frames", 0, G_MAXUINT64, 0, G_PARAM_READABLE));
   174           G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
       
   175   g_object_class_install_property (object_class, ARG_DROP,
   183   g_object_class_install_property (object_class, ARG_DROP,
   176       g_param_spec_uint64 ("drop", "Drop", "Number of dropped frames", 0,
   184       g_param_spec_uint64 ("drop", "Drop",
   177           G_MAXUINT64, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
   185           "Number of dropped frames", 0, G_MAXUINT64, 0, G_PARAM_READABLE));
   178   g_object_class_install_property (object_class, ARG_SILENT,
   186   g_object_class_install_property (object_class, ARG_SILENT,
   179       g_param_spec_boolean ("silent", "silent",
   187       g_param_spec_boolean ("silent", "silent",
   180           "Don't emit notify for dropped and duplicated frames", DEFAULT_SILENT,
   188           "Don't emit notify for dropped and duplicated frames",
   181           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
   189           DEFAULT_SILENT, G_PARAM_READWRITE));
   182   g_object_class_install_property (object_class, ARG_NEW_PREF,
   190   g_object_class_install_property (object_class, ARG_NEW_PREF,
   183       g_param_spec_double ("new-pref", "New Pref",
   191       g_param_spec_double ("new_pref", "New Pref",
   184           "Value indicating how much to prefer new frames (unused)", 0.0, 1.0,
   192           "Value indicating how much to prefer new frames (unused)",
   185           DEFAULT_NEW_PREF, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
   193           0.0, 1.0, DEFAULT_NEW_PREF, G_PARAM_READWRITE));
   186 
   194 
   187   element_class->change_state = gst_video_rate_change_state;
   195   element_class->change_state = gst_video_rate_change_state;
   188 }
   196 }
   189 
   197 
   190 /* return the caps that can be used on out_pad given in_caps on in_pad */
   198 /* return the caps that can be used on out_pad given in_caps on in_pad */
   266   } else {
   274   } else {
   267     videorate->from_rate_numerator = rate_numerator;
   275     videorate->from_rate_numerator = rate_numerator;
   268     videorate->from_rate_denominator = rate_denominator;
   276     videorate->from_rate_denominator = rate_denominator;
   269     otherpad = videorate->srcpad;
   277     otherpad = videorate->srcpad;
   270   }
   278   }
   271 
       
   272   /* now try to find something for the peer */
   279   /* now try to find something for the peer */
   273   opeer = gst_pad_get_peer (otherpad);
   280   opeer = gst_pad_get_peer (otherpad);
   274   if (opeer) {
   281   if (opeer) {
   275     if (gst_pad_accept_caps (opeer, caps)) {
   282     if (gst_pad_accept_caps (opeer, caps)) {
   276       /* the peer accepts the caps as they are */
   283       /* the peer accepts the caps as they are */
   353 {
   360 {
   354   GST_DEBUG ("resetting internal variables");
   361   GST_DEBUG ("resetting internal variables");
   355 
   362 
   356   videorate->in = 0;
   363   videorate->in = 0;
   357   videorate->out = 0;
   364   videorate->out = 0;
   358   videorate->segment_out = 0;
       
   359   videorate->drop = 0;
   365   videorate->drop = 0;
   360   videorate->dup = 0;
   366   videorate->dup = 0;
   361   videorate->next_ts = GST_CLOCK_TIME_NONE;
   367   videorate->next_ts = GST_CLOCK_TIME_NONE;
   362   videorate->last_ts = GST_CLOCK_TIME_NONE;
       
   363   videorate->discont = TRUE;
       
   364   gst_video_rate_swap_prev (videorate, NULL, 0);
   368   gst_video_rate_swap_prev (videorate, NULL, 0);
   365 
   369 
   366   gst_segment_init (&videorate->segment, GST_FORMAT_TIME);
   370   gst_segment_init (&videorate->segment, GST_FORMAT_TIME);
   367 }
   371 }
   368 
   372 
   411       (gst_buffer_ref (videorate->prevbuf));
   415       (gst_buffer_ref (videorate->prevbuf));
   412 
   416 
   413   GST_BUFFER_OFFSET (outbuf) = videorate->out;
   417   GST_BUFFER_OFFSET (outbuf) = videorate->out;
   414   GST_BUFFER_OFFSET_END (outbuf) = videorate->out + 1;
   418   GST_BUFFER_OFFSET_END (outbuf) = videorate->out + 1;
   415 
   419 
   416   if (videorate->discont) {
       
   417     GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
       
   418     videorate->discont = FALSE;
       
   419   } else
       
   420     GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT);
       
   421 
       
   422   /* this is the timestamp we put on the buffer */
   420   /* this is the timestamp we put on the buffer */
   423   push_ts = videorate->next_ts;
   421   push_ts = videorate->next_ts;
   424 
   422 
   425   videorate->out++;
   423   videorate->out++;
   426   videorate->segment_out++;
       
   427   if (videorate->to_rate_numerator) {
   424   if (videorate->to_rate_numerator) {
   428     /* interpolate next expected timestamp in the segment */
   425     /* interpolate next expected timestamp in the segment */
   429     videorate->next_ts = videorate->segment.accum + videorate->segment.start +
   426     videorate->next_ts = videorate->segment.accum + videorate->segment.start +
   430         gst_util_uint64_scale (videorate->segment_out,
   427         gst_util_uint64_scale (videorate->out,
   431         videorate->to_rate_denominator * GST_SECOND,
   428         videorate->to_rate_denominator * GST_SECOND,
   432         videorate->to_rate_numerator);
   429         videorate->to_rate_numerator);
   433     GST_BUFFER_DURATION (outbuf) = videorate->next_ts - push_ts;
   430     GST_BUFFER_DURATION (outbuf) = videorate->next_ts - push_ts;
   434   }
   431   }
   435 
   432 
   463     gst_buffer_unref (videorate->prevbuf);
   460     gst_buffer_unref (videorate->prevbuf);
   464   videorate->prevbuf = buffer;
   461   videorate->prevbuf = buffer;
   465   videorate->prev_ts = time;
   462   videorate->prev_ts = time;
   466 }
   463 }
   467 
   464 
   468 #define MAGIC_LIMIT  25
       
   469 static gboolean
   465 static gboolean
   470 gst_video_rate_event (GstPad * pad, GstEvent * event)
   466 gst_video_rate_event (GstPad * pad, GstEvent * event)
   471 {
   467 {
   472   GstVideoRate *videorate;
   468   GstVideoRate *videorate;
   473   gboolean ret;
   469   gboolean ret;
   487 
   483 
   488       if (format != GST_FORMAT_TIME)
   484       if (format != GST_FORMAT_TIME)
   489         goto format_error;
   485         goto format_error;
   490 
   486 
   491       GST_DEBUG_OBJECT (videorate, "handle NEWSEGMENT");
   487       GST_DEBUG_OBJECT (videorate, "handle NEWSEGMENT");
   492 
       
   493       /* close up the previous segment, if appropriate */
       
   494       if (!update && videorate->prevbuf) {
       
   495         gint count = 0;
       
   496         GstFlowReturn res;
       
   497 
       
   498         res = GST_FLOW_OK;
       
   499         /* fill up to the end of current segment,
       
   500          * or only send out the stored buffer if there is no specific stop.
       
   501          * regardless, prevent going loopy in strange cases */
       
   502         while (res == GST_FLOW_OK && count <= MAGIC_LIMIT &&
       
   503             ((GST_CLOCK_TIME_IS_VALID (videorate->segment.stop) &&
       
   504                     videorate->next_ts - videorate->segment.accum
       
   505                     < videorate->segment.stop)
       
   506                 || count < 1)) {
       
   507           gst_video_rate_flush_prev (videorate);
       
   508           count++;
       
   509         }
       
   510         if (count > 1) {
       
   511           videorate->dup += count - 1;
       
   512           if (!videorate->silent)
       
   513             g_object_notify (G_OBJECT (videorate), "duplicate");
       
   514         } else if (count == 0) {
       
   515           videorate->drop++;
       
   516           if (!videorate->silent)
       
   517             g_object_notify (G_OBJECT (videorate), "drop");
       
   518         }
       
   519         /* clean up for the new one; _chain will resume from the new start */
       
   520         videorate->segment_out = 0;
       
   521         gst_video_rate_swap_prev (videorate, NULL, 0);
       
   522         videorate->next_ts = GST_CLOCK_TIME_NONE;
       
   523       }
       
   524 
   488 
   525       /* We just want to update the accumulated stream_time  */
   489       /* We just want to update the accumulated stream_time  */
   526       gst_segment_set_newsegment_full (&videorate->segment, update, rate, arate,
   490       gst_segment_set_newsegment_full (&videorate->segment, update, rate, arate,
   527           format, start, stop, time);
   491           format, start, stop, time);
   528 
   492 
   584 
   548 
   585           GST_DEBUG_OBJECT (videorate, "Peer latency: min %"
   549           GST_DEBUG_OBJECT (videorate, "Peer latency: min %"
   586               GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
   550               GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
   587               GST_TIME_ARGS (min), GST_TIME_ARGS (max));
   551               GST_TIME_ARGS (min), GST_TIME_ARGS (max));
   588 
   552 
   589           if (videorate->from_rate_numerator != 0) {
   553           /* add latency. We don't really know since we hold on to the frames
   590             /* add latency. We don't really know since we hold on to the frames
   554            * until we get a next frame, which can be anything. We assume
   591              * until we get a next frame, which can be anything. We assume
   555            * however that this will take from_rate time. */
   592              * however that this will take from_rate time. */
   556           latency = gst_util_uint64_scale (GST_SECOND,
   593             latency = gst_util_uint64_scale (GST_SECOND,
   557               videorate->from_rate_denominator, videorate->from_rate_numerator);
   594                 videorate->from_rate_denominator,
       
   595                 videorate->from_rate_numerator);
       
   596           } else {
       
   597             /* no input framerate, we don't know */
       
   598             latency = 0;
       
   599           }
       
   600 
   558 
   601           GST_DEBUG_OBJECT (videorate, "Our latency: %"
   559           GST_DEBUG_OBJECT (videorate, "Our latency: %"
   602               GST_TIME_FORMAT, GST_TIME_ARGS (latency));
   560               GST_TIME_FORMAT, GST_TIME_ARGS (latency));
   603 
   561 
   604           min += latency;
   562           min += latency;
   627 static GstFlowReturn
   585 static GstFlowReturn
   628 gst_video_rate_chain (GstPad * pad, GstBuffer * buffer)
   586 gst_video_rate_chain (GstPad * pad, GstBuffer * buffer)
   629 {
   587 {
   630   GstVideoRate *videorate;
   588   GstVideoRate *videorate;
   631   GstFlowReturn res = GST_FLOW_OK;
   589   GstFlowReturn res = GST_FLOW_OK;
   632   GstClockTime intime, in_ts, in_dur;
   590   GstClockTime intime, in_ts;
   633 
   591 
   634   videorate = GST_VIDEO_RATE (GST_PAD_PARENT (pad));
   592   videorate = GST_VIDEO_RATE (GST_PAD_PARENT (pad));
   635 
   593 
   636   /* make sure the denominators are not 0 */
   594   /* make sure the denominators are not 0 */
   637   if (videorate->from_rate_denominator == 0 ||
   595   if (videorate->from_rate_denominator == 0 ||
   638       videorate->to_rate_denominator == 0)
   596       videorate->to_rate_denominator == 0)
   639     goto not_negotiated;
   597     goto not_negotiated;
   640 
   598 
   641   in_ts = GST_BUFFER_TIMESTAMP (buffer);
   599   in_ts = GST_BUFFER_TIMESTAMP (buffer);
   642   in_dur = GST_BUFFER_DURATION (buffer);
   600 
   643 
   601   if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE))
   644   if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) {
   602     goto invalid_buffer;
   645     in_ts = videorate->last_ts;
       
   646     if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE))
       
   647       goto invalid_buffer;
       
   648   }
       
   649 
       
   650   /* get the time of the next expected buffer timestamp, we use this when the
       
   651    * next buffer has -1 as a timestamp */
       
   652   videorate->last_ts = in_ts;
       
   653   if (in_dur != GST_CLOCK_TIME_NONE)
       
   654     videorate->last_ts += in_dur;
       
   655 
   603 
   656   GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT,
   604   GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT,
   657       GST_TIME_ARGS (in_ts));
   605       GST_TIME_ARGS (in_ts));
   658 
   606 
   659   /* the input time is the time in the segment + all previously accumulated
   607   /* the input time is the time in the segment + all previously accumulated
   665     gst_video_rate_swap_prev (videorate, buffer, intime);
   613     gst_video_rate_swap_prev (videorate, buffer, intime);
   666     videorate->in++;
   614     videorate->in++;
   667     if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) {
   615     if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) {
   668       /* new buffer, we expect to output a buffer that matches the first
   616       /* new buffer, we expect to output a buffer that matches the first
   669        * timestamp in the segment */
   617        * timestamp in the segment */
   670       videorate->next_ts = videorate->segment.start + videorate->segment.accum;
   618       videorate->next_ts = videorate->segment.start;
   671     }
   619     }
   672   } else {
   620   } else {
   673     GstClockTime prevtime;
   621     GstClockTime prevtime;
   674     gint count = 0;
   622     gint count = 0;
   675     gint64 diff1, diff2;
   623     gint64 diff1, diff2;
   714           GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT,
   662           GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT,
   715           GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2),
   663           GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2),
   716           GST_TIME_ARGS (videorate->next_ts));
   664           GST_TIME_ARGS (videorate->next_ts));
   717 
   665 
   718       /* output first one when its the best */
   666       /* output first one when its the best */
   719       if (diff1 <= diff2) {
   667       if (diff1 < diff2) {
   720         count++;
   668         count++;
   721 
   669 
   722         /* on error the _flush function posted a warning already */
   670         /* on error the _flush function posted a warning already */
   723         if ((res = gst_video_rate_flush_prev (videorate)) != GST_FLOW_OK) {
   671         if ((res = gst_video_rate_flush_prev (videorate)) != GST_FLOW_OK) {
   724           gst_buffer_unref (buffer);
   672           gst_buffer_unref (buffer);
   725           goto done;
   673           goto done;
   726         }
   674         }
   727       }
   675       }
   728       /* continue while the first one was the best, if they were equal avoid
   676       /* continue while the first one was the best */
   729        * going into an infinite loop */
       
   730     }
   677     }
   731     while (diff1 < diff2);
   678     while (diff1 < diff2);
   732 
   679 
   733     /* if we outputed the first buffer more then once, we have dups */
   680     /* if we outputed the first buffer more then once, we have dups */
   734     if (count > 1) {
   681     if (count > 1) {
   835   GstVideoRate *videorate;
   782   GstVideoRate *videorate;
   836 
   783 
   837   videorate = GST_VIDEO_RATE (element);
   784   videorate = GST_VIDEO_RATE (element);
   838 
   785 
   839   switch (transition) {
   786   switch (transition) {
   840     case GST_STATE_CHANGE_READY_TO_PAUSED:
       
   841       videorate->discont = TRUE;
       
   842       videorate->last_ts = -1;
       
   843       break;
       
   844     default:
   787     default:
   845       break;
   788       break;
   846   }
   789   }
   847 
   790 
   848   ret = parent_class->change_state (element, transition);
   791   ret = parent_class->change_state (element, transition);