• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

C++ GST_TIME_ARGS函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C++中GST_TIME_ARGS函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_TIME_ARGS函数的具体用法?C++ GST_TIME_ARGS怎么用?C++ GST_TIME_ARGS使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了GST_TIME_ARGS函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: gst_base_rtp_audio_payload_flush

/**
 * gst_base_rtp_audio_payload_flush:
 * @baseaudiopayload: a #GstBaseRTPPayload
 * @payload_len: length of payload
 * @timestamp: a #GstClockTime
 *
 * Create an RTP buffer and store @payload_len bytes of the adapter as the
 * payload. Set the timestamp on the new buffer to @timestamp before pushing
 * the buffer downstream.
 *
 * If @payload_len is -1, all pending bytes will be flushed. If @timestamp is
 * -1, the timestamp will be calculated automatically.
 *
 * Returns: a #GstFlowReturn
 *
 * Since: 0.10.25
 */
GstFlowReturn
gst_base_rtp_audio_payload_flush (GstBaseRTPAudioPayload * baseaudiopayload,
    guint payload_len, GstClockTime timestamp)
{
  GstBaseRTPPayload *basepayload;
  GstBaseRTPAudioPayloadPrivate *priv;
  GstBuffer *outbuf;
  guint8 *payload;
  GstFlowReturn ret;
  GstAdapter *adapter;
  guint64 distance;

  priv = baseaudiopayload->priv;
  adapter = priv->adapter;

  basepayload = GST_BASE_RTP_PAYLOAD (baseaudiopayload);

  if (payload_len == -1)
    payload_len = gst_adapter_available (adapter);

  /* nothing to do, just return */
  if (payload_len == 0)
    return GST_FLOW_OK;

  if (timestamp == -1) {
    /* calculate the timestamp */
    timestamp = gst_adapter_prev_timestamp (adapter, &distance);

    GST_LOG_OBJECT (baseaudiopayload,
        "last timestamp %" GST_TIME_FORMAT ", distance %" G_GUINT64_FORMAT,
        GST_TIME_ARGS (timestamp), distance);

    if (GST_CLOCK_TIME_IS_VALID (timestamp) && distance > 0) {
      /* convert the number of bytes since the last timestamp to time and add to
       * the last seen timestamp */
      timestamp += priv->bytes_to_time (baseaudiopayload, distance);
    }
  }

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  if (priv->buffer_list && gst_adapter_available_fast (adapter) >= payload_len) {
    GstBuffer *buffer;
    /* we can quickly take a buffer out of the adapter without having to copy
     * anything. */
    buffer = gst_adapter_take_buffer (adapter, payload_len);

    ret = gst_base_rtp_audio_payload_push_buffer (baseaudiopayload, buffer);
  } else {
    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

    /* copy payload */
    payload = gst_rtp_buffer_get_payload (outbuf);
    gst_adapter_copy (adapter, payload, 0, payload_len);
    gst_adapter_flush (adapter, payload_len);

    /* set metadata */
    gst_base_rtp_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
        timestamp);

    ret = gst_basertppayload_push (basepayload, outbuf);
  }

  return ret;
}
开发者ID:genesi,项目名称:gst-base-plugins,代码行数:84,代码来源:gstbasertpaudiopayload.c


示例2: gst_shape_wipe_video_sink_chain

static GstFlowReturn
gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *mask = NULL, *outbuf = NULL;
  GstClockTime timestamp;
  gboolean new_outbuf = FALSE;

  if (G_UNLIKELY (self->fmt == GST_VIDEO_FORMAT_UNKNOWN))
    return GST_FLOW_NOT_NEGOTIATED;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  timestamp =
      gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);

  if (GST_CLOCK_TIME_IS_VALID (timestamp))
    gst_object_sync_values (G_OBJECT (self), timestamp);

  GST_DEBUG_OBJECT (self,
      "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %lf",
      GST_TIME_ARGS (timestamp), self->mask_position);

  g_mutex_lock (self->mask_mutex);
  if (!self->mask)
    g_cond_wait (self->mask_cond, self->mask_mutex);

  if (self->mask == NULL) {
    g_mutex_unlock (self->mask_mutex);
    gst_buffer_unref (buffer);
    return GST_FLOW_UNEXPECTED;
  } else {
    mask = gst_buffer_ref (self->mask);
  }
  g_mutex_unlock (self->mask_mutex);

  if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer))) {
    gst_buffer_unref (buffer);
    gst_buffer_unref (mask);
    return GST_FLOW_OK;
  }

  /* Try to blend inplace, if it's not possible
   * get a new buffer from downstream.
   */
  if (!gst_buffer_is_writable (buffer)) {
    ret =
        gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE,
        GST_BUFFER_SIZE (buffer), GST_PAD_CAPS (self->srcpad), &outbuf);
    if (G_UNLIKELY (ret != GST_FLOW_OK)) {
      gst_buffer_unref (buffer);
      gst_buffer_unref (mask);
      return ret;
    }
    gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_ALL);
    new_outbuf = TRUE;
  } else {
    outbuf = buffer;
  }

  if (self->fmt == GST_VIDEO_FORMAT_AYUV && self->mask_bpp == 16)
    ret = gst_shape_wipe_blend_ayuv_16 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_AYUV)
    ret = gst_shape_wipe_blend_ayuv_8 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_ARGB && self->mask_bpp == 16)
    ret = gst_shape_wipe_blend_argb_16 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_ARGB)
    ret = gst_shape_wipe_blend_argb_8 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_BGRA && self->mask_bpp == 16)
    ret = gst_shape_wipe_blend_bgra_16 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_BGRA)
    ret = gst_shape_wipe_blend_bgra_8 (self, buffer, mask, outbuf);
  else
    g_assert_not_reached ();

  gst_buffer_unref (mask);
  if (new_outbuf)
    gst_buffer_unref (buffer);

  if (ret != GST_FLOW_OK) {
    gst_buffer_unref (outbuf);
    return ret;
  }

  ret = gst_pad_push (self->srcpad, outbuf);
  return ret;
}
开发者ID:bilboed,项目名称:gst-plugins-bad,代码行数:87,代码来源:gstshapewipe.c


示例3: gst_videoframe_audiolevel_asink_chain


//.........这里部分代码省略.........
      if (g_queue_get_length (&self->vtimeq) < 2) {
        vtemp = self->vsegment.position;
      } else if (self->vsegment.position == GST_CLOCK_TIME_NONE) {
        /* g_queue_get_length is surely >= 2 at this point
         * so the adapter isn't empty */
        buf =
            gst_adapter_take_buffer (self->adapter,
            gst_adapter_available (self->adapter));
        if (buf != NULL) {
          GstMessage *msg;
          msg = update_rms_from_buffer (self, buf);
          g_mutex_unlock (&self->mutex);
          gst_element_post_message (GST_ELEMENT (self), msg);
          gst_buffer_unref (buf);
          g_mutex_lock (&self->mutex);  /* we unlock again later */
        }
        break;
      }
    } else if (g_queue_get_length (&self->vtimeq) < 2) {
      continue;
    }

    vt0 = g_queue_pop_head (&self->vtimeq);
    if (vtemp == GST_CLOCK_TIME_NONE)
      vt1 = g_queue_peek_head (&self->vtimeq);
    else
      vt1 = &vtemp;

    cur_time =
        self->first_time + gst_util_uint64_scale (self->total_frames,
        GST_SECOND, rate);
    GST_DEBUG_OBJECT (self,
        "Processing: current time is %" GST_TIME_FORMAT,
        GST_TIME_ARGS (cur_time));
    GST_DEBUG_OBJECT (self, "Total frames is %i with a rate of %d",
        self->total_frames, rate);
    GST_DEBUG_OBJECT (self, "Start time is %" GST_TIME_FORMAT,
        GST_TIME_ARGS (self->first_time));
    GST_DEBUG_OBJECT (self, "Time on top is %" GST_TIME_FORMAT,
        GST_TIME_ARGS (*vt0));

    if (cur_time < *vt0) {
      guint num_frames =
          gst_util_uint64_scale (*vt0 - cur_time, rate, GST_SECOND);
      bytes = num_frames * GST_AUDIO_INFO_BPF (&self->ainfo);
      available_bytes = gst_adapter_available (self->adapter);
      if (available_bytes == 0) {
        g_queue_push_head (&self->vtimeq, vt0);
        break;
      }
      if (bytes == 0) {
        cur_time = *vt0;
      } else {
        GST_DEBUG_OBJECT (self,
            "Flushed %" G_GSIZE_FORMAT " out of %" G_GSIZE_FORMAT " bytes",
            bytes, available_bytes);
        gst_adapter_flush (self->adapter, MIN (bytes, available_bytes));
        self->total_frames += num_frames;
        if (available_bytes <= bytes) {
          g_queue_push_head (&self->vtimeq, vt0);
          break;
        }
        cur_time =
            self->first_time + gst_util_uint64_scale (self->total_frames,
            GST_SECOND, rate);
      }
开发者ID:0p1pp1,项目名称:gst-plugins-bad,代码行数:67,代码来源:gstvideoframe-audiolevel.c


示例4: discoverer_collect

/* Called when pipeline is pre-rolled */
static void
discoverer_collect (GstDiscoverer * dc)
{
  GST_DEBUG ("Collecting information");

  /* Stop the timeout handler if present */
  if (dc->priv->timeoutid) {
    g_source_remove (dc->priv->timeoutid);
    dc->priv->timeoutid = 0;
  }

  if (dc->priv->streams) {
    /* FIXME : Make this querying optional */
    if (TRUE) {
      GstElement *pipeline = (GstElement *) dc->priv->pipeline;
      GstFormat format = GST_FORMAT_TIME;
      gint64 dur;

      GST_DEBUG ("Attempting to query duration");

      if (gst_element_query_duration (pipeline, &format, &dur)) {
        if (format == GST_FORMAT_TIME) {
          GST_DEBUG ("Got duration %" GST_TIME_FORMAT, GST_TIME_ARGS (dur));
          dc->priv->current_info->duration = (guint64) dur;
        }
      }

      if (dc->priv->seeking_query) {
        if (gst_element_query (pipeline, dc->priv->seeking_query)) {
          gboolean seekable;

          gst_query_parse_seeking (dc->priv->seeking_query, &format,
              &seekable, NULL, NULL);
          if (format == GST_FORMAT_TIME) {
            GST_DEBUG ("Got seekable %d", seekable);
            dc->priv->current_info->seekable = seekable;
          }
        }
      }
    }

    if (dc->priv->current_topology)
      dc->priv->current_info->stream_info = parse_stream_topology (dc,
          dc->priv->current_topology, NULL);

    /*
     * Images need some special handling. They do not have a duration, have
     * caps named image/<foo> (th exception being MJPEG video which is also
     * type image/jpeg), and should consist of precisely one stream (actually
     * initially there are 2, the image and raw stream, but we squash these
     * while parsing the stream topology). At some ponit, if we find that these
     * conditions are not sufficient, we can count the number of decoders and
     * parsers in the chain, and if there's more than one decoder, or any
     * parser at all, we should not mark this as an image.
     */
    if (dc->priv->current_info->duration == 0 &&
        dc->priv->current_info->stream_info != NULL &&
        dc->priv->current_info->stream_info->next == NULL) {
      GstStructure *st =
          gst_caps_get_structure (dc->priv->current_info->stream_info->caps, 0);

      if (g_str_has_prefix (gst_structure_get_name (st), "image/"))
        ((GstDiscovererVideoInfo *) dc->priv->current_info->
            stream_info)->is_image = TRUE;
    }
  }

  if (dc->priv->async) {
    GST_DEBUG ("Emitting 'discoverered'");
    g_signal_emit (dc, gst_discoverer_signals[SIGNAL_DISCOVERED], 0,
        dc->priv->current_info, dc->priv->current_error);
    /* Clients get a copy of current_info since it is a boxed type */
    gst_discoverer_info_unref (dc->priv->current_info);
  }
}
开发者ID:166MMX,项目名称:openjdk.java.net-openjfx-8u40-rt,代码行数:76,代码来源:gstdiscoverer.c


示例5: gst_isoff_sidx_parser_add_buffer


//.........这里部分代码省略.........
      }
      if (parser->size == 1) {
        if (gst_byte_reader_get_remaining (&reader) < 12) {
          gst_byte_reader_set_pos (&reader, 0);
          break;
        }

        parser->size = gst_byte_reader_get_uint64_be_unchecked (&reader);
      }
      if (parser->size == 0) {
        res = GST_ISOFF_PARSER_ERROR;
        gst_byte_reader_set_pos (&reader, 0);
        break;
      }
      parser->sidx.version = gst_byte_reader_get_uint8_unchecked (&reader);
      parser->sidx.flags = gst_byte_reader_get_uint24_le_unchecked (&reader);

      parser->status = GST_ISOFF_SIDX_PARSER_HEADER;

    case GST_ISOFF_SIDX_PARSER_HEADER:
      remaining = gst_byte_reader_get_remaining (&reader);
      if (remaining < 12 + (parser->sidx.version == 0 ? 8 : 16)) {
        break;
      }

      parser->sidx.ref_id = gst_byte_reader_get_uint32_be_unchecked (&reader);
      parser->sidx.timescale =
          gst_byte_reader_get_uint32_be_unchecked (&reader);
      if (parser->sidx.version == 0) {
        parser->sidx.earliest_pts =
            gst_byte_reader_get_uint32_be_unchecked (&reader);
        parser->sidx.first_offset =
            gst_byte_reader_get_uint32_be_unchecked (&reader);
      } else {
        parser->sidx.earliest_pts =
            gst_byte_reader_get_uint64_be_unchecked (&reader);
        parser->sidx.first_offset =
            gst_byte_reader_get_uint64_be_unchecked (&reader);
      }
      /* skip 2 reserved bytes */
      gst_byte_reader_skip_unchecked (&reader, 2);
      parser->sidx.entries_count =
          gst_byte_reader_get_uint16_be_unchecked (&reader);

      GST_LOG ("Timescale: %" G_GUINT32_FORMAT, parser->sidx.timescale);
      GST_LOG ("Earliest pts: %" G_GUINT64_FORMAT, parser->sidx.earliest_pts);
      GST_LOG ("First offset: %" G_GUINT64_FORMAT, parser->sidx.first_offset);

      parser->cumulative_pts =
          gst_util_uint64_scale_int_round (parser->sidx.earliest_pts,
          GST_SECOND, parser->sidx.timescale);

      if (parser->sidx.entries_count) {
        parser->sidx.entries =
            g_malloc (sizeof (GstSidxBoxEntry) * parser->sidx.entries_count);
      }
      parser->sidx.entry_index = 0;

      parser->status = GST_ISOFF_SIDX_PARSER_DATA;

    case GST_ISOFF_SIDX_PARSER_DATA:
      while (parser->sidx.entry_index < parser->sidx.entries_count) {
        GstSidxBoxEntry *entry =
            &parser->sidx.entries[parser->sidx.entry_index];

        remaining = gst_byte_reader_get_remaining (&reader);
        if (remaining < 12)
          break;

        entry->offset = parser->cumulative_entry_size;
        entry->pts = parser->cumulative_pts;
        gst_isoff_parse_sidx_entry (entry, &reader);
        entry->duration = gst_util_uint64_scale_int_round (entry->duration,
            GST_SECOND, parser->sidx.timescale);
        parser->cumulative_entry_size += entry->size;
        parser->cumulative_pts += entry->duration;

        GST_LOG ("Sidx entry %d) offset: %" G_GUINT64_FORMAT ", pts: %"
            GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT " - size %"
            G_GUINT32_FORMAT, parser->sidx.entry_index, entry->offset,
            GST_TIME_ARGS (entry->pts), GST_TIME_ARGS (entry->duration),
            entry->size);

        parser->sidx.entry_index++;
      }

      if (parser->sidx.entry_index == parser->sidx.entries_count)
        parser->status = GST_ISOFF_SIDX_PARSER_FINISHED;
      else
        break;
    case GST_ISOFF_SIDX_PARSER_FINISHED:
      parser->sidx.entry_index = 0;
      res = GST_ISOFF_PARSER_DONE;
      break;
  }

  *consumed = gst_byte_reader_get_pos (&reader);
  gst_buffer_unmap (buffer, &info);
  return res;
}
开发者ID:Haifen,项目名称:gst-plugins-bad,代码行数:101,代码来源:gstisoff.c


示例6: delayed_seek_cb

/* Delayed seek callback. This gets called by the timer setup in the above function. */
static gboolean delayed_seek_cb (CustomData *data) {
  GST_DEBUG ("Doing delayed seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (data->desired_position));
  execute_seek (data->desired_position, data);
  return FALSE;
}
开发者ID:0x8BADFOOD,项目名称:GstreamerCodeSnippets,代码行数:6,代码来源:tutorial-5.c


示例7: gst_base_video_decoder_sink_event

static gboolean
gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoDecoder *base_video_decoder;
  gboolean res = FALSE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_EOS:
      if (!base_video_decoder->packetized)
        gst_base_video_decoder_drain (base_video_decoder, TRUE);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
          (base_video_decoder), event);
      break;

    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      double rate;
      double applied_rate;
      GstFormat format;
      gint64 start;
      gint64 stop;
      gint64 position;
      GstSegment *segment = &base_video_decoder->segment;

      gst_event_parse_new_segment_full (event, &update, &rate,
          &applied_rate, &format, &start, &stop, &position);

      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      if (!update) {
        gst_base_video_decoder_flush (base_video_decoder);
      }

      base_video_decoder->timestamp_offset = start;

      gst_segment_set_newsegment_full (segment,
          update, rate, applied_rate, format, start, stop, position);
      base_video_decoder->have_segment = TRUE;

      GST_WARNING ("new segment: format %d rate %g start %" GST_TIME_FORMAT
          " stop %" GST_TIME_FORMAT
          " position %" GST_TIME_FORMAT
          " update %d",
          format, rate,
          GST_TIME_ARGS (segment->start),
          GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time), update);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
          (base_video_decoder), event);
      break;
    }

    case GST_EVENT_FLUSH_STOP:
      gst_base_video_decoder_flush (base_video_decoder);
      gst_segment_init (&base_video_decoder->segment, GST_FORMAT_TIME);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
          (base_video_decoder), event);
      break;

    default:
      res = gst_pad_event_default (pad, event);
      break;
  }

done:
  gst_object_unref (base_video_decoder);
  return res;

newseg_wrong_format:
  GST_DEBUG_OBJECT (base_video_decoder, "received non TIME newsegment");
  gst_event_unref (event);
  goto done;
}
开发者ID:collects,项目名称:gst-plugins-bad,代码行数:82,代码来源:gstbasevideodecoder.c


示例8: gst_base_video_decoder_src_event

static gboolean
gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoDecoder *base_video_decoder;
  gboolean res = FALSE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:

      /* FIXME: do seek using bitrate incase upstream doesn't handle it */
      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);

      break;

    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      GstClockTime duration;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (base_video_decoder);
      base_video_decoder->proportion = proportion;
      if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
        if (G_UNLIKELY (diff > 0)) {
          if (base_video_decoder->state.fps_n > 0)
            duration =
                gst_util_uint64_scale (GST_SECOND,
                base_video_decoder->state.fps_d,
                base_video_decoder->state.fps_n);
          else
            duration = 0;
          base_video_decoder->earliest_time = timestamp + 2 * diff + duration;
        } else {
          base_video_decoder->earliest_time = timestamp + diff;
        }
      } else {
        base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE;
      }
      GST_OBJECT_UNLOCK (base_video_decoder);

      GST_DEBUG_OBJECT (base_video_decoder,
          "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g",
          GST_TIME_ARGS (timestamp), diff, proportion);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
    }

    default:
      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
  }

  gst_object_unref (base_video_decoder);
  return res;
}
开发者ID:collects,项目名称:gst-plugins-bad,代码行数:67,代码来源:gstbasevideodecoder.c


示例9: gst_mve_demux_chain

static GstFlowReturn
gst_mve_demux_chain (GstPad * sinkpad, GstBuffer * inbuf)
{
  GstMveDemux *mve = GST_MVE_DEMUX (GST_PAD_PARENT (sinkpad));
  GstFlowReturn ret = GST_FLOW_OK;

  gst_adapter_push (mve->adapter, inbuf);

  GST_DEBUG_OBJECT (mve, "queuing buffer, needed:%d, available:%u",
      mve->needed_bytes, gst_adapter_available (mve->adapter));

  while ((gst_adapter_available (mve->adapter) >= mve->needed_bytes) &&
      (ret == GST_FLOW_OK)) {
    GstMveDemuxStream *stream = NULL;
    GstBuffer *outbuf = NULL;

    switch (mve->state) {
      case MVEDEMUX_STATE_INITIAL:
        gst_adapter_flush (mve->adapter, mve->needed_bytes);

        mve->chunk_offset += mve->needed_bytes;
        mve->needed_bytes = 4;
        mve->state = MVEDEMUX_STATE_NEXT_CHUNK;
        break;

      case MVEDEMUX_STATE_NEXT_CHUNK:{
        const guint8 *data;
        guint16 size;

        data = gst_adapter_peek (mve->adapter, mve->needed_bytes);
        size = GST_MVE_SEGMENT_SIZE (data);

        if (mve->chunk_offset >= mve->chunk_size) {
          /* new chunk, flush buffer and proceed with next segment */
          guint16 chunk_type = GST_READ_UINT16_LE (data + 2);

          gst_adapter_flush (mve->adapter, mve->needed_bytes);
          mve->chunk_size = size;
          mve->chunk_offset = 0;

          if (chunk_type > MVE_CHUNK_END) {
            GST_WARNING_OBJECT (mve,
                "skipping unknown chunk type 0x%02x of size:%u", chunk_type,
                size);
            mve->needed_bytes += size;
            mve->state = MVEDEMUX_STATE_SKIP;
          } else {
            GST_DEBUG_OBJECT (mve, "found new chunk type 0x%02x of size:%u",
                chunk_type, size);
          }
        } else if (mve->chunk_offset <= mve->chunk_size) {
          /* new segment */
          GST_DEBUG_OBJECT (mve, "found segment type 0x%02x of size:%u",
              GST_MVE_SEGMENT_TYPE (data), size);

          mve->needed_bytes += size;
          mve->state = MVEDEMUX_STATE_MOVIE;
        }
      }
        break;

      case MVEDEMUX_STATE_MOVIE:
        ret = gst_mve_parse_segment (mve, &stream, &outbuf);

        if ((ret == GST_FLOW_OK) && (outbuf != NULL)) {
          /* send buffer */
          GST_DEBUG_OBJECT (mve,
              "pushing buffer with time %" GST_TIME_FORMAT
              " (%u bytes) on pad %s",
              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
              GST_BUFFER_SIZE (outbuf), GST_PAD_NAME (stream->pad));

          ret = gst_pad_push (stream->pad, outbuf);
          stream->last_flow = ret;
        }

        if (ret == GST_FLOW_NOT_LINKED) {
          if (mve->audio_stream
              && mve->audio_stream->last_flow != GST_FLOW_NOT_LINKED)
            ret = GST_FLOW_OK;
          if (mve->video_stream
              && mve->video_stream->last_flow != GST_FLOW_NOT_LINKED)
            ret = GST_FLOW_OK;
        }

        /* update current offset */
        mve->chunk_offset += mve->needed_bytes;

        mve->state = MVEDEMUX_STATE_NEXT_CHUNK;
        mve->needed_bytes = 4;
        break;

      case MVEDEMUX_STATE_SKIP:
        mve->chunk_offset += mve->needed_bytes;
        gst_adapter_flush (mve->adapter, mve->needed_bytes);
        mve->state = MVEDEMUX_STATE_NEXT_CHUNK;
        mve->needed_bytes = 4;
        break;

      default:
//.........这里部分代码省略.........
开发者ID:drothlis,项目名称:gst-plugins-bad,代码行数:101,代码来源:gstmvedemux.c


示例10: theora_parse_src_query

static gboolean
theora_parse_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
  GstTheoraParse *parse;
  gboolean res = FALSE;

  parse = GST_THEORA_PARSE (parent);

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_POSITION:
    {
      gint64 frame, value;
      GstFormat my_format, format;
      gint64 time;

      frame = parse->prev_frame;

      GST_LOG_OBJECT (parse,
          "query %p: we have current frame: %" G_GINT64_FORMAT, query, frame);

      /* parse format */
      gst_query_parse_position (query, &format, NULL);

      /* and convert to the final format in two steps with time as the 
       * intermediate step */
      my_format = GST_FORMAT_TIME;
      if (!(res =
              theora_parse_src_convert (parse->sinkpad, GST_FORMAT_DEFAULT,
                  frame, &my_format, &time)))
        goto error;

      /* fixme: handle segments
         time = (time - parse->segment.start) + parse->segment.time;
       */

      GST_LOG_OBJECT (parse,
          "query %p: our time: %" GST_TIME_FORMAT " (conv to %s)",
          query, GST_TIME_ARGS (time), gst_format_get_name (format));

      if (!(res =
              theora_parse_src_convert (pad, my_format, time, &format, &value)))
        goto error;

      gst_query_set_position (query, format, value);

      GST_LOG_OBJECT (parse,
          "query %p: we return %" G_GINT64_FORMAT " (format %u)", query, value,
          format);

      break;
    }
    case GST_QUERY_DURATION:
      /* forward to peer for total */
      if (!(res = gst_pad_query (GST_PAD_PEER (parse->sinkpad), query)))
        goto error;
      break;
    case GST_QUERY_CONVERT:
    {
      GstFormat src_fmt, dest_fmt;
      gint64 src_val, dest_val;

      gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
      if (!(res =
              theora_parse_src_convert (pad, src_fmt, src_val, &dest_fmt,
                  &dest_val)))
        goto error;

      gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
      break;
    }
    default:
      res = gst_pad_query_default (pad, parent, query);
      break;
  }
done:

  return res;

  /* ERRORS */
error:
  {
    GST_DEBUG_OBJECT (parse, "query failed");
    goto done;
  }
}
开发者ID:reynaldo-samsung,项目名称:gst-plugins-base,代码行数:85,代码来源:gsttheoraparse.c


示例11: main

gint
main (gint argc, gchar * argv[])
{
  GstElement *pipeline, *filesrc, *decodebin;
  GstStateChangeReturn res;
  GstIterator *it;
  GstBus *bus;
  GValue data = { 0, };

  gst_init (&argc, &argv);

  pipeline = gst_pipeline_new ("pipeline");

  filesrc = gst_element_factory_make ("filesrc", "filesrc");
  g_assert (filesrc);

  decodebin = gst_element_factory_make ("decodebin", "decodebin");
  g_assert (decodebin);

  gst_bin_add_many (GST_BIN (pipeline), filesrc, decodebin, NULL);
  gst_element_link (filesrc, decodebin);

  if (argc < 2) {
    g_print ("usage: %s <filenames>\n", argv[0]);
    exit (-1);
  }

  if (!g_str_has_prefix (argv[1], "file://")) {
    g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
  } else {
    g_object_set (G_OBJECT (filesrc), "location", argv[1] + 7, NULL);
  }

  /* we've got to connect fakesinks to newly decoded pads to make sure
   * buffers have actually been flowing over those pads and caps have
   * been set on them. decodebin might insert internal queues and
   * without fakesinks it's pot-luck what caps we get from the pad, because
   * it depends on whether the queues have started pushing buffers yet or not.
   * With fakesinks we make sure that the pipeline doesn't go to PAUSED state
   * before each fakesink has a buffer queued. */
  g_signal_connect (decodebin, "new-decoded-pad",
      G_CALLBACK (new_decoded_pad_cb), pipeline);

  bus = gst_element_get_bus (pipeline);

  g_print ("pause..\n");
  res = gst_element_set_state (pipeline, GST_STATE_PAUSED);
  if (res == GST_STATE_CHANGE_FAILURE) {
    show_error ("Could not go to PAUSED state", bus);
    exit (-1);
  }
  g_print ("waiting..\n");
  res = gst_element_get_state (pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
  if (res != GST_STATE_CHANGE_SUCCESS) {
    show_error ("Failed to complete state change to PAUSED", bus);
    exit (-1);
  }
  g_print ("stats..\n");

  it = gst_element_iterate_src_pads (decodebin);
  while (gst_iterator_next (it, &data) == GST_ITERATOR_OK) {
    GstPad *pad = g_value_get_object (&data);
    GstCaps *caps;
    gchar *str;
    GstQuery *query;

    g_print ("stream %s:\n", GST_OBJECT_NAME (pad));

    caps = gst_pad_query_caps (pad, NULL);
    str = gst_caps_to_string (caps);
    g_print (" caps: %s\n", str);
    g_free (str);
    gst_caps_unref (caps);

    query = gst_query_new_duration (GST_FORMAT_TIME);
    if (gst_pad_query (pad, query)) {
      gint64 duration;

      gst_query_parse_duration (query, NULL, &duration);

      g_print (" duration: %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (duration));
    }
    gst_query_unref (query);

    g_value_reset (&data);
  }
  g_value_unset (&data);
  gst_iterator_free (it);

  return 0;
}
开发者ID:lubing521,项目名称:gst-embedded-builder,代码行数:91,代码来源:test6.c


示例12: gst_identity_transform_ip

static GstFlowReturn
gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstIdentity *identity = GST_IDENTITY (trans);
  GstClockTime runtimestamp = G_GINT64_CONSTANT (0);

  if (identity->check_perfect)
    gst_identity_check_perfect (identity, buf);
  if (identity->check_imperfect_timestamp)
    gst_identity_check_imperfect_timestamp (identity, buf);
  if (identity->check_imperfect_offset)
    gst_identity_check_imperfect_offset (identity, buf);

  /* update prev values */
  identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf);
  identity->prev_duration = GST_BUFFER_DURATION (buf);
  identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf);
  identity->prev_offset = GST_BUFFER_OFFSET (buf);

  if (identity->error_after >= 0) {
    identity->error_after--;
    if (identity->error_after == 0) {
      GST_ELEMENT_ERROR (identity, CORE, FAILED,
          (_("Failed after iterations as requested.")), (NULL));
      return GST_FLOW_ERROR;
    }
  }

  if (identity->drop_probability > 0.0) {
    if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability) {
      if (!identity->silent) {
        GST_OBJECT_LOCK (identity);
        g_free (identity->last_message);
        identity->last_message =
            g_strdup_printf
            ("dropping   ******* (%s:%s)i (%d bytes, timestamp: %"
            GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
            G_GINT64_FORMAT ", offset_end: % " G_GINT64_FORMAT
            ", flags: %d) %p", GST_DEBUG_PAD_NAME (trans->sinkpad),
            GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
            GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf),
            GST_BUFFER_OFFSET_END (buf), GST_BUFFER_FLAGS (buf), buf);
        GST_OBJECT_UNLOCK (identity);
        gst_identity_notify_last_message (identity);
      }
      /* return DROPPED to basetransform. */
      return GST_BASE_TRANSFORM_FLOW_DROPPED;
    }
  }

  if (identity->dump) {
    gst_util_dump_mem (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
  }

  if (!identity->silent) {
    GST_OBJECT_LOCK (identity);
    g_free (identity->last_message);
    identity->last_message =
        g_strdup_printf ("chain   ******* (%s:%s)i (%d bytes, timestamp: %"
        GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
        G_GINT64_FORMAT ", offset_end: % " G_GINT64_FORMAT ", flags: %d) %p",
        GST_DEBUG_PAD_NAME (trans->sinkpad), GST_BUFFER_SIZE (buf),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
        GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf),
        GST_BUFFER_FLAGS (buf), buf);
    GST_OBJECT_UNLOCK (identity);
    gst_identity_notify_last_message (identity);
  }

  if (identity->datarate > 0) {
    GstClockTime time = gst_util_uint64_scale_int (identity->offset,
        GST_SECOND, identity->datarate);

    GST_BUFFER_TIMESTAMP (buf) = time;
    GST_BUFFER_DURATION (buf) =
        GST_BUFFER_SIZE (buf) * GST_SECOND / identity->datarate;
  }

  if (identity->signal_handoffs)
    g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf);

  if (trans->segment.format == GST_FORMAT_TIME)
    runtimestamp = gst_segment_to_running_time (&trans->segment,
        GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf));

  if ((identity->sync) && (trans->segment.format == GST_FORMAT_TIME)) {
    GstClock *clock;

    GST_OBJECT_LOCK (identity);
    if ((clock = GST_ELEMENT (identity)->clock)) {
      GstClockReturn cret;
      GstClockTime timestamp;

      timestamp = runtimestamp + GST_ELEMENT (identity)->base_time;

      /* save id if we need to unlock */
      /* FIXME: actually unlock this somewhere in the state changes */
      identity->clock_id = gst_clock_new_single_shot_id (clock, timestamp);
//.........这里部分代码省略.........
开发者ID:spunktsch,项目名称:svtplayer,代码行数:101,代码来源:gstidentity.c


示例13: gst_base_rtp_audio_payload_handle_buffer

static GstFlowReturn
gst_base_rtp_audio_payload_handle_buffer (GstBaseRTPPayload *
    basepayload, GstBuffer * buffer)
{
  GstBaseRTPAudioPayload *payload;
  GstBaseRTPAudioPayloadPrivate *priv;
  guint payload_len;
  GstFlowReturn ret;
  guint available;
  guint min_payload_len;
  guint max_payload_len;
  guint align;
  guint size;
  gboolean discont;

  ret = GST_FLOW_OK;

  payload = GST_BASE_RTP_AUDIO_PAYLOAD_CAST (basepayload);
  priv = payload->priv;

  discont = GST_BUFFER_IS_DISCONT (buffer);
  if (discont) {
    GstClockTime timestamp;

    GST_DEBUG_OBJECT (payload, "Got DISCONT");
    /* flush everything out of the adapter, mark DISCONT */
    ret = gst_base_rtp_audio_payload_flush (payload, -1, -1);
    priv->discont = TRUE;

    timestamp = GST_BUFFER_TIMESTAMP (buffer);

    /* get the distance between the timestamp gap and produce the same gap in
     * the RTP timestamps */
    if (priv->last_timestamp != -1 && timestamp != -1) {
      /* we had a last timestamp, compare it to the new timestamp and update the
       * offset counter for RTP timestamps. The effect is that we will produce
       * output buffers containing the same RTP timestamp gap as the gap
       * between the GST timestamps. */
      if (timestamp > priv->last_timestamp) {
        GstClockTime diff;
        guint64 bytes;
        /* we're only going to apply a positive gap, otherwise we let the marker
         * bit do its thing. simply convert to bytes and add the the current
         * offset */
        diff = timestamp - priv->last_timestamp;
        bytes = priv->time_to_bytes (payload, diff);
        priv->offset += bytes;

        GST_DEBUG_OBJECT (payload,
            "elapsed time %" GST_TIME_FORMAT ", bytes %" G_GUINT64_FORMAT
            ", new offset %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff), bytes,
            priv->offset);
      }
    }
  }

  if (!gst_base_rtp_audio_payload_get_lengths (basepayload, &min_payload_len,
          &max_payload_len, &align))
    goto config_error;

  GST_DEBUG_OBJECT (payload,
      "Calculated min_payload_len %u and max_payload_len %u",
      min_payload_len, max_payload_len);

  size = GST_BUFFER_SIZE (buffer);

  /* shortcut, we don't need to use the adapter when the packet can be pushed
   * through directly. */
  available = gst_adapter_available (priv->adapter);

  GST_DEBUG_OBJECT (payload, "got buffer size %u, available %u",
      size, available);

  if (available == 0 && (size >= min_payload_len && size <= max_payload_len) &&
      (size % align == 0)) {
    /* If buffer fits on an RTP packet, let's just push it through
     * this will check against max_ptime and max_mtu */
    GST_DEBUG_OBJECT (payload, "Fast packet push");
    ret = gst_base_rtp_audio_payload_push_buffer (payload, buffer);
  } else {
    /* push the buffer in the adapter */
    gst_adapter_push (priv->adapter, buffer);
    available += size;

    GST_DEBUG_OBJECT (payload, "available now %u", available);

    /* as long as we have full frames */
    while (available >= min_payload_len) {
      /* get multiple of alignment */
      payload_len = MIN (max_payload_len, available);
      payload_len = ALIGN_DOWN (payload_len, align);

      /* and flush out the bytes from the adapter, automatically set the
       * timestamp. */
      ret = gst_base_rtp_audio_payload_flush (payload, payload_len, -1);

      available -= payload_len;
      GST_DEBUG_OBJECT (payload, "available after push %u", available);
    }
  }
//.........这里部分代码省略.........
开发者ID:genesi,项目名称:gst-base-plugins,代码行数:101,代码来源:gstbasertpaudiopayload.c


示例14: gst_ogg_parse_chain

/* Reads in buffers, parses them, reframes into one-buffer-per-ogg-page, submits
 * pages to output pad.
 */
static GstFlowReturn
gst_ogg_parse_chain (GstPad * pad, GstBuffer * buffer)
{
  GstOggParse *ogg;
  GstFlowReturn result = GST_FLOW_OK;
  gint ret = -1;
  guint32 serialno;
  GstBuffer *pagebuffer;
  GstClockTime buffertimestamp = GST_BUFFER_TIMESTAMP (buffer);

  ogg = GST_OGG_PARSE (GST_OBJECT_PARENT (pad));

  GST_LOG_OBJECT (ogg, "Chain function received buffer of size %d",
      GST_BUFFER_SIZE (buffer));

  gst_ogg_parse_submit_buffer (ogg, buffer);

  while (ret != 0 && result == GST_FLOW_OK) {
    ogg_page page;

    /* We use ogg_sync_pageseek() rather than ogg_sync_pageout() so that we can
     * track how many bytes the ogg layer discarded (in the case of sync errors,
     * etc.); this allows us to accurately track the current stream offset
     */
    ret = ogg_sync_pageseek (&ogg->sync, &page);
    if (ret == 0) {
      /* need more data, that's fine... */
      break;
    } else if (ret < 0) {
      /* discontinuity; track how many bytes we skipped (-ret) */
      ogg->offset -= ret;
    } else {
      gint64 granule = ogg_page_granulepos (&page);
#ifndef GST_DISABLE_GST_DEBUG
      int bos = ogg_page_bos (&page);
#endif
      guint64 startoffset = ogg->offset;
      GstOggStream *stream;
      gboolean keyframe;

      serialno = ogg_page_serialno (&page);
      stream = gst_ogg_parse_find_stream (ogg, serialno);

      GST_LOG_OBJECT (ogg, "Timestamping outgoing buffer as %" GST_TIME_FORMAT,
          GST_TIME_ARGS (buffertimestamp));

      if (stream) {
        buffertimestamp = gst_ogg_stream_get_end_time_for_granulepos (stream,
            granule);
        if (ogg->video_stream) {
          if (stream == ogg->video_stream) {
            keyframe = gst_ogg_stream_granulepos_is_key_frame (stream, granule);
          } else {
            keyframe = FALSE;
          }
        } else {
          keyframe = TRUE;
        }
      } else {
        buffertimestamp = GST_CLOCK_TIME_NONE;
        keyframe = TRUE;
      }
      pagebuffer = gst_ogg_parse_buffer_from_page (&page, startoffset,
          buffertimestamp);

      /* We read out 'ret' bytes, so we set the next offset appropriately */
      ogg->offset += ret;

      GST_LOG_OBJECT (ogg,
          "processing ogg page (serial %08x, pageno %ld, "
          "granule pos %" G_GUINT64_FORMAT ", bos %d, offset %"
          G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT ") keyframe=%d",
          serialno, ogg_page_pageno (&page),
          granule, bos, startoffset, ogg->offset, keyframe);

      if (ogg_page_bos (&page)) {
        /* If we've seen this serialno before, this is technically an error,
         * we log this case but accept it - this one replaces the previous
         * stream with this serialno. We can do this since we're streaming, and
         * not supporting seeking...
         */
        GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno);

        if (stream != NULL) {
          GST_LOG_OBJECT (ogg, "Incorrect stream; repeats serial number %u "
              "at offset %" G_GINT64_FORMAT, serialno, ogg->offset);
        }

        if (ogg->last_page_not_bos) {
          GST_LOG_OBJECT (ogg, "Deleting all referenced streams, found a new "
              "chain starting with serial %u", serialno);
          gst_ogg_parse_delete_all_streams (ogg);
        }

        stream = gst_ogg_parse_new_stream (ogg, &page);

        ogg->last_page_not_bos = FALSE;
//.........这里部分代码省略.........
开发者ID:ChinnaSuhas,项目名称:ossbuild,代码行数:101,代码来源:gstoggparse.c


示例15: gst_base_video_decoder_chain

static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_flush (base_video_decoder);
  }

  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }

  if (!base_video_decoder->current_frame)
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);

  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    ret = gst_base_video_decoder_drain (base_video_decoder, FALSE);
  }

  gst_object_unref (base_video_decoder);
  return ret;
}

鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C++ GST_TRACE函数代码示例发布时间:2022-05-30
下一篇:
C++ GST_STR_NULL函数代码示例发布时间:2022-05-30
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap