• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

C++ bytestream_put_buffer函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C++中bytestream_put_buffer函数的典型用法代码示例。如果您正苦于以下问题:C++ bytestream_put_buffer函数的具体用法?C++ bytestream_put_buffer怎么用?C++ bytestream_put_buffer使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了bytestream_put_buffer函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: ogg_build_flac_headers

static int ogg_build_flac_headers(const uint8_t *extradata, int extradata_size,
                                  OGGStreamContext *oggstream, int bitexact)
{
    const char *vendor = bitexact ? "ffmpeg" : LIBAVFORMAT_IDENT;
    uint8_t *p;
    if (extradata_size != 34)
        return -1;
    oggstream->header_len[0] = 51;
    oggstream->header[0] = av_mallocz(51); // per ogg flac specs
    p = oggstream->header[0];
    bytestream_put_byte(&p, 0x7F);
    bytestream_put_buffer(&p, "FLAC", 4);
    bytestream_put_byte(&p, 1); // major version
    bytestream_put_byte(&p, 0); // minor version
    bytestream_put_be16(&p, 1); // headers packets without this one
    bytestream_put_buffer(&p, "fLaC", 4);
    bytestream_put_byte(&p, 0x00); // streaminfo
    bytestream_put_be24(&p, 34);
    bytestream_put_buffer(&p, extradata, 34);
    oggstream->header_len[1] = 1+3+4+strlen(vendor)+4;
    oggstream->header[1] = av_mallocz(oggstream->header_len[1]);
    p = oggstream->header[1];
    bytestream_put_byte(&p, 0x84); // last metadata block and vorbis comment
    bytestream_put_be24(&p, oggstream->header_len[1] - 4);
    bytestream_put_le32(&p, strlen(vendor));
    bytestream_put_buffer(&p, vendor, strlen(vendor));
    bytestream_put_le32(&p, 0); // user comment list length
    return 0;
}
开发者ID:Haaaaaank,项目名称:avbin,代码行数:29,代码来源:oggenc.c


示例2: av_packet_merge_side_data

int av_packet_merge_side_data(AVPacket *pkt){
    if(pkt->side_data_elems){
        AVBufferRef *buf;
        int i;
        uint8_t *p;
        uint64_t size= pkt->size + 8LL + AV_INPUT_BUFFER_PADDING_SIZE;
        AVPacket old= *pkt;
        for (i=0; i<old.side_data_elems; i++) {
            size += old.side_data[i].size + 5LL;
        }
        if (size > INT_MAX)
            return AVERROR(EINVAL);
        buf = av_buffer_alloc(size);
        if (!buf)
            return AVERROR(ENOMEM);
        pkt->buf = buf;
        pkt->data = p = buf->data;
        pkt->size = size - AV_INPUT_BUFFER_PADDING_SIZE;
        bytestream_put_buffer(&p, old.data, old.size);
        for (i=old.side_data_elems-1; i>=0; i--) {
            bytestream_put_buffer(&p, old.side_data[i].data, old.side_data[i].size);
            bytestream_put_be32(&p, old.side_data[i].size);
            *p++ = old.side_data[i].type | ((i==old.side_data_elems-1)*128);
        }
        bytestream_put_be64(&p, FF_MERGE_MARKER);
        av_assert0(p-pkt->data == pkt->size);
        memset(p, 0, AV_INPUT_BUFFER_PADDING_SIZE);
        av_packet_unref(&old);
        pkt->side_data_elems = 0;
        pkt->side_data = NULL;
        return 1;
    }
    return 0;
}
开发者ID:Bilibili,项目名称:FFmpeg,代码行数:34,代码来源:avpacket.c


示例3: mjpega_dump_header

static int mjpega_dump_header(AVBitStreamFilterContext *bsfc, AVCodecContext *avctx, const char *args,
                              uint8_t **poutbuf, int *poutbuf_size,
                              const uint8_t *buf, int buf_size, int keyframe)
{
    uint8_t *poutbufp;
    int i;

    if (avctx->codec_id != CODEC_ID_MJPEG) {
        av_log(avctx, AV_LOG_ERROR, "mjpega bitstream filter only applies to mjpeg codec\n");
        return 0;
    }

    *poutbuf_size = 0;
    *poutbuf = av_malloc(buf_size + 44 + FF_INPUT_BUFFER_PADDING_SIZE);
    poutbufp = *poutbuf;
    bytestream_put_byte(&poutbufp, 0xff);
    bytestream_put_byte(&poutbufp, SOI);
    bytestream_put_byte(&poutbufp, 0xff);
    bytestream_put_byte(&poutbufp, APP1);
    bytestream_put_be16(&poutbufp, 42); /* size */
    bytestream_put_be32(&poutbufp, 0);
#ifdef __CW32__
    bytestream_put_buffer(&poutbufp, (const unsigned char*)"mjpg", 4);
#else
    bytestream_put_buffer(&poutbufp, "mjpg", 4);
#endif
    bytestream_put_be32(&poutbufp, buf_size + 44); /* field size */
    bytestream_put_be32(&poutbufp, buf_size + 44); /* pad field size */
    bytestream_put_be32(&poutbufp, 0);             /* next ptr */

    for (i = 0; i < buf_size - 1; i++) {
        if (buf[i] == 0xff) {
            switch (buf[i + 1]) {
            case DQT:  /* quant off */
            case DHT:  /* huff  off */
            case SOF0: /* image off */
                bytestream_put_be32(&poutbufp, i + 46);
                break;
            case SOS:
                bytestream_put_be32(&poutbufp, i + 46); /* scan off */
                bytestream_put_be32(&poutbufp, i + 46 + AV_RB16(buf + i + 2)); /* data off */
                bytestream_put_buffer(&poutbufp, buf + 2, buf_size - 2); /* skip already written SOI */
                *poutbuf_size = poutbufp - *poutbuf;
                return 1;
            case APP1:
                if (i + 8 < buf_size && AV_RL32(buf + i + 8) == ff_get_fourcc("mjpg")) {
                    av_log(avctx, AV_LOG_ERROR, "bitstream already formatted\n");
                    memcpy(*poutbuf, buf, buf_size);
                    *poutbuf_size = buf_size;
                    return 1;
                }
            }
        }
    }
    av_freep(poutbuf);
    av_log(avctx, AV_LOG_ERROR, "could not find SOS marker in bitstream\n");
    return 0;
}
开发者ID:AnthonyNystrom,项目名称:MobiVU,代码行数:58,代码来源:mjpega_dump_header_bsf.c


示例4: ff_amf_write_string2

void ff_amf_write_string2(uint8_t **dst, const char *str1, const char *str2)
{
    int len1 = 0, len2 = 0;
    if (str1)
        len1 = strlen(str1);
    if (str2)
        len2 = strlen(str2);
    bytestream_put_byte(dst, AMF_DATA_TYPE_STRING);
    bytestream_put_be16(dst, len1 + len2);
    bytestream_put_buffer(dst, str1, len1);
    bytestream_put_buffer(dst, str2, len2);
}
开发者ID:JulianoAmaralChaves,项目名称:ppsspp-ffmpeg,代码行数:12,代码来源:rtmppkt.c


示例5: read_packet

static int read_packet(AVFormatContext *s, AVPacket *pkt)
{
    AVCodecContext *codec = s->streams[0]->codec;
    BRSTMDemuxContext *b = s->priv_data;
    uint32_t samples, size;
    int ret;

    if (avio_feof(s->pb))
        return AVERROR_EOF;
    b->current_block++;
    if (b->current_block == b->block_count) {
        size    = b->last_block_used_bytes;
        samples = size / (8 * codec->channels) * 14;
    } else if (b->current_block < b->block_count) {
        size    = b->block_size;
        samples = b->samples_per_block;
    } else {
        return AVERROR_EOF;
    }

    if ((codec->codec_id == AV_CODEC_ID_ADPCM_THP ||
         codec->codec_id == AV_CODEC_ID_ADPCM_THP_LE) && !codec->extradata) {
        uint8_t *dst;

        if (av_new_packet(pkt, 8 + (32 + 4) * codec->channels + size) < 0)
            return AVERROR(ENOMEM);
        dst = pkt->data;
        bytestream_put_be32(&dst, size);
        bytestream_put_be32(&dst, samples);
        bytestream_put_buffer(&dst, b->table, 32 * codec->channels);
        bytestream_put_buffer(&dst, b->adpc + 4 * codec->channels *
                                    (b->current_block - 1), 4 * codec->channels);

        ret = avio_read(s->pb, dst, size);
        if (ret != size)
            av_free_packet(pkt);
        pkt->duration = samples;
    } else {
        ret = av_get_packet(s->pb, pkt, size);
    }

    pkt->stream_index = 0;

    if (ret != size)
        ret = AVERROR(EIO);

    return ret;
}
开发者ID:tenbullssh,项目名称:FFmpeg,代码行数:48,代码来源:brstm.c


示例6: ogg_build_speex_headers

static int ogg_build_speex_headers(AVCodecContext *avctx,
                                   OGGStreamContext *oggstream, int bitexact)
{
    uint8_t *p;

    if (avctx->extradata_size < SPEEX_HEADER_SIZE)
        return -1;

    // first packet: Speex header
    p = av_mallocz(SPEEX_HEADER_SIZE);
    if (!p)
        return AVERROR_NOMEM;
    oggstream->header[0] = p;
    oggstream->header_len[0] = SPEEX_HEADER_SIZE;
    bytestream_put_buffer(&p, avctx->extradata, SPEEX_HEADER_SIZE);
    AV_WL32(&oggstream->header[0][68], 0);  // set extra_headers to 0

    // second packet: VorbisComment
    p = ogg_write_vorbiscomment(0, bitexact, &oggstream->header_len[1]);
    if (!p)
        return AVERROR_NOMEM;
    oggstream->header[1] = p;

    return 0;
}
开发者ID:BigBadOwl,项目名称:iPhone-Ffmpeg,代码行数:25,代码来源:oggenc.c


示例7: gif_image_write_image

static int gif_image_write_image(uint8_t **bytestream,
                                 int x1, int y1, int width, int height,
                                 const uint8_t *buf, int linesize, int pix_fmt)
{
    PutBitContext p;
    uint8_t buffer[200]; /* 100 * 9 / 8 = 113 */
    int i, left, w;
    const uint8_t *ptr;
    /* image block */

    bytestream_put_byte(bytestream, 0x2c);
    bytestream_put_le16(bytestream, x1);
    bytestream_put_le16(bytestream, y1);
    bytestream_put_le16(bytestream, width);
    bytestream_put_le16(bytestream, height);
    bytestream_put_byte(bytestream, 0x00); /* flags */
    /* no local clut */

    bytestream_put_byte(bytestream, 0x08);

    left= width * height;

    init_put_bits(&p, buffer, 130);

/*
 * the thing here is the bitstream is written as little packets, with a size byte before
 * but it's still the same bitstream between packets (no flush !)
 */
    ptr = buf;
    w = width;
    while(left>0) {

        put_bits(&p, 9, 0x0100); /* clear code */

        for(i=(left<GIF_CHUNKS)?left:GIF_CHUNKS;i;i--) {
            put_bits(&p, 9, *ptr++);
            if (--w == 0) {
                w = width;
                buf += linesize;
                ptr = buf;
            }
        }

        if(left<=GIF_CHUNKS) {
            put_bits(&p, 9, 0x101); /* end of stream */
            flush_put_bits(&p);
        }
        if(pbBufPtr(&p) - p.buf > 0) {
            bytestream_put_byte(bytestream, pbBufPtr(&p) - p.buf); /* byte count of the packet */
            bytestream_put_buffer(bytestream, p.buf, pbBufPtr(&p) - p.buf); /* the actual buffer */
            p.buf_ptr = p.buf; /* dequeue the bytes off the bitstream */
        }
        left-=GIF_CHUNKS;
    }
    bytestream_put_byte(bytestream, 0x00); /* end of image block */
    bytestream_put_byte(bytestream, 0x3b);
    return 0;
}
开发者ID:BlackMael,项目名称:DirectEncode,代码行数:58,代码来源:gif.c


示例8: ff_vorbiscomment_write

int ff_vorbiscomment_write(uint8_t **p, AVDictionary **m,
                           const char *vendor_string, const unsigned count)
{
    bytestream_put_le32(p, strlen(vendor_string));
    bytestream_put_buffer(p, vendor_string, strlen(vendor_string));
    if (*m) {
        AVDictionaryEntry *tag = NULL;
        bytestream_put_le32(p, count);
        while ((tag = av_dict_get(*m, "", tag, AV_DICT_IGNORE_SUFFIX))) {
            unsigned int len1 = strlen(tag->key);
            unsigned int len2 = strlen(tag->value);
            bytestream_put_le32(p, len1+1+len2);
            bytestream_put_buffer(p, tag->key, len1);
            bytestream_put_byte(p, '=');
            bytestream_put_buffer(p, tag->value, len2);
        }
    } else
        bytestream_put_le32(p, 0);
    return 0;
}
开发者ID:AVbin,项目名称:libav,代码行数:20,代码来源:vorbiscomment.c


示例9: av_packet_merge_side_data

int av_packet_merge_side_data(AVPacket *pkt){
    if(pkt->side_data_elems){
        AVBufferRef *buf;
        int i;
        uint8_t *p;
        uint64_t size= pkt->size + 8LL + FF_INPUT_BUFFER_PADDING_SIZE;
        AVPacket old= *pkt;
        for (i=0; i<old.side_data_elems; i++) {
            size += old.side_data[i].size + 5LL;
        }
        if (size > INT_MAX)
            return AVERROR(EINVAL);
        buf = av_buffer_alloc(size);
        if (!buf)
            return AVERROR(ENOMEM);
        pkt->buf = buf;
        pkt->data = p = buf->data;
#if FF_API_DESTRUCT_PACKET
FF_DISABLE_DEPRECATION_WARNINGS
        pkt->destruct = dummy_destruct_packet;
FF_ENABLE_DEPRECATION_WARNINGS
#endif
        pkt->size = size - FF_INPUT_BUFFER_PADDING_SIZE;
        bytestream_put_buffer(&p, old.data, old.size);
        for (i=old.side_data_elems-1; i>=0; i--) {
            bytestream_put_buffer(&p, old.side_data[i].data, old.side_data[i].size);
            bytestream_put_be32(&p, old.side_data[i].size);
            *p++ = old.side_data[i].type | ((i==old.side_data_elems-1)*128);
        }
        bytestream_put_be64(&p, FF_MERGE_MARKER);
        av_assert0(p-pkt->data == pkt->size);
        memset(p, 0, FF_INPUT_BUFFER_PADDING_SIZE);
        av_free_packet(&old);
        pkt->side_data_elems = 0;
        pkt->side_data = NULL;
        return 1;
    }
    return 0;
}
开发者ID:Johnsonwu,项目名称:FFmpeg,代码行数:39,代码来源:avpacket.c


示例10: write_typecode

/* NOTE: Typecodes must be spooled AFTER arguments!! */
static void write_typecode(CodingSpool *s, uint8_t type)
{
    s->typeSpool |= (type & 3) << (14 - s->typeSpoolLength);
    s->typeSpoolLength += 2;
    if (s->typeSpoolLength == 16) {
        bytestream_put_le16(s->pout, s->typeSpool);
        bytestream_put_buffer(s->pout, s->argumentSpool,
                              s->args - s->argumentSpool);
        s->typeSpoolLength = 0;
        s->typeSpool = 0;
        s->args = s->argumentSpool;
    }
}
开发者ID:119,项目名称:dropcam_for_iphone,代码行数:14,代码来源:roqvideoenc.c


示例11: gif_image_write_header

/* GIF header */
static int gif_image_write_header(AVCodecContext *avctx,
                                  uint8_t **bytestream, uint32_t *palette)
{
    int i;
    unsigned int v;

    bytestream_put_buffer(bytestream, "GIF", 3);
    bytestream_put_buffer(bytestream, "89a", 3);
    bytestream_put_le16(bytestream, avctx->width);
    bytestream_put_le16(bytestream, avctx->height);

    bytestream_put_byte(bytestream, 0xf7); /* flags: global clut, 256 entries */
    bytestream_put_byte(bytestream, 0x1f); /* background color index */
    bytestream_put_byte(bytestream, 0); /* aspect ratio */

    /* the global palette */
    for(i=0;i<256;i++) {
        v = palette[i];
        bytestream_put_be24(bytestream, v);
    }

    return 0;
}
开发者ID:Flameeyes,项目名称:libav,代码行数:24,代码来源:gif.c


示例12: imx_dump_header

static int imx_dump_header(AVBitStreamFilterContext *bsfc, AVCodecContext *avctx, const char *args,
                           uint8_t **poutbuf, int *poutbuf_size,
                           const uint8_t *buf, int buf_size, int keyframe)
{
    /* MXF essence element key */
    static const uint8_t imx_header[16] = { 0x06,0x0e,0x2b,0x34,0x01,0x02,0x01,0x01,0x0d,0x01,0x03,0x01,0x05,0x01,0x01,0x00 };
    uint8_t *poutbufp;

    if (avctx->codec_id != AV_CODEC_ID_MPEG2VIDEO) {
        av_log(avctx, AV_LOG_ERROR, "imx bitstream filter only applies to mpeg2video codec\n");
        return 0;
    }

    *poutbuf = av_malloc(buf_size + 20 + FF_INPUT_BUFFER_PADDING_SIZE);
    if (!*poutbuf)
        return AVERROR(ENOMEM);
    poutbufp = *poutbuf;
    bytestream_put_buffer(&poutbufp, imx_header, 16);
    bytestream_put_byte(&poutbufp, 0x83); /* KLV BER long form */
    bytestream_put_be24(&poutbufp, buf_size);
    bytestream_put_buffer(&poutbufp, buf, buf_size);
    *poutbuf_size = poutbufp - *poutbuf;
    return 1;
}
开发者ID:TaoheGit,项目名称:hmi_sdl_android,代码行数:24,代码来源:imx_dump_header_bsf.c


示例13: ogg_build_flac_headers

static int ogg_build_flac_headers(AVCodecContext *avctx,
                                  OGGStreamContext *oggstream, int bitexact)
{
    enum FLACExtradataFormat format;
    uint8_t *streaminfo;
    uint8_t *p;

    if (!ff_flac_is_extradata_valid(avctx, &format, &streaminfo))
        return -1;

    // first packet: STREAMINFO
    oggstream->header_len[0] = 51;
    oggstream->header[0] = av_mallocz(51); // per ogg flac specs
    p = oggstream->header[0];
    if (!p)
        return AVERROR_NOMEM;
    bytestream_put_byte(&p, 0x7F);
    bytestream_put_buffer(&p, "FLAC", 4);
    bytestream_put_byte(&p, 1); // major version
    bytestream_put_byte(&p, 0); // minor version
    bytestream_put_be16(&p, 1); // headers packets without this one
    bytestream_put_buffer(&p, "fLaC", 4);
    bytestream_put_byte(&p, 0x00); // streaminfo
    bytestream_put_be24(&p, 34);
    bytestream_put_buffer(&p, streaminfo, FLAC_STREAMINFO_SIZE);

    // second packet: VorbisComment
    p = ogg_write_vorbiscomment(4, bitexact, &oggstream->header_len[1]);
    if (!p)
        return AVERROR_NOMEM;
    oggstream->header[1] = p;
    bytestream_put_byte(&p, 0x84); // last metadata block and vorbis comment
    bytestream_put_be24(&p, oggstream->header_len[1] - 4);

    return 0;
}
开发者ID:BigBadOwl,项目名称:iPhone-Ffmpeg,代码行数:36,代码来源:oggenc.c


示例14: gif_image_write_image

static int gif_image_write_image(AVCodecContext *avctx,
                                 uint8_t **bytestream, uint8_t *end,
                                 const uint8_t *buf, int linesize)
{
    GIFContext *s = avctx->priv_data;
    int len = 0, height;
    const uint8_t *ptr;
    /* image block */

    bytestream_put_byte(bytestream, 0x2c);
    bytestream_put_le16(bytestream, 0);
    bytestream_put_le16(bytestream, 0);
    bytestream_put_le16(bytestream, avctx->width);
    bytestream_put_le16(bytestream, avctx->height);
    bytestream_put_byte(bytestream, 0x00); /* flags */
    /* no local clut */

    bytestream_put_byte(bytestream, 0x08);

    ff_lzw_encode_init(s->lzw, s->buf, avctx->width*avctx->height,
                       12, FF_LZW_GIF, put_bits);

    ptr = buf;
    for (height = avctx->height; height--;) {
        len += ff_lzw_encode(s->lzw, ptr, avctx->width);
        ptr += linesize;
    }
    len += ff_lzw_encode_flush(s->lzw, flush_put_bits);

    ptr = s->buf;
    while (len > 0) {
        int size = FFMIN(255, len);
        bytestream_put_byte(bytestream, size);
        if (end - *bytestream < size)
            return -1;
        bytestream_put_buffer(bytestream, ptr, size);
        ptr += size;
        len -= size;
    }
    bytestream_put_byte(bytestream, 0x00); /* end of image block */
    bytestream_put_byte(bytestream, 0x3b);
    return 0;
}
开发者ID:Flameeyes,项目名称:libav,代码行数:43,代码来源:gif.c


示例15: strlen

static uint8_t *ogg_write_vorbiscomment(int offset, int bitexact,
                                        int *header_len)
{
    const char *vendor = bitexact ? "ffmpeg" : LIBAVFORMAT_IDENT;
    int size;
    uint8_t *p, *p0;

    size = offset + 4 + strlen(vendor) + 4;
    p = av_mallocz(size);
    if (!p)
        return NULL;
    p0 = p;

    p += offset;
    bytestream_put_le32(&p, strlen(vendor));
    bytestream_put_buffer(&p, vendor, strlen(vendor));
    bytestream_put_le32(&p, 0); // user comment list length

    *header_len = size;
    return p0;
}
开发者ID:BigBadOwl,项目名称:iPhone-Ffmpeg,代码行数:21,代码来源:oggenc.c


示例16: adx_encode_header

static int adx_encode_header(AVCodecContext *avctx, uint8_t *buf, int bufsize)
{
    ADXContext *c = avctx->priv_data;

    bytestream_put_be16(&buf, 0x8000);              /* header signature */
    bytestream_put_be16(&buf, HEADER_SIZE - 4);     /* copyright offset */
    bytestream_put_byte(&buf, 3);                   /* encoding */
    bytestream_put_byte(&buf, BLOCK_SIZE);          /* block size */
    bytestream_put_byte(&buf, 4);                   /* sample size */
    bytestream_put_byte(&buf, avctx->channels);     /* channels */
    bytestream_put_be32(&buf, avctx->sample_rate);  /* sample rate */
    bytestream_put_be32(&buf, 0);                   /* total sample count */
    bytestream_put_be16(&buf, c->cutoff);           /* cutoff frequency */
    bytestream_put_byte(&buf, 3);                   /* version */
    bytestream_put_byte(&buf, 0);                   /* flags */
    bytestream_put_be32(&buf, 0);                   /* unknown */
    bytestream_put_be32(&buf, 0);                   /* loop enabled */
    bytestream_put_be16(&buf, 0);                   /* padding */
    bytestream_put_buffer(&buf, "(c)CRI", 6);       /* copyright signature */

    return HEADER_SIZE;
}
开发者ID:0xFFeng,项目名称:ffmpeg,代码行数:22,代码来源:adxenc.c


示例17: write_codebooks

/**
 * Write codebook chunk
 */
static void write_codebooks(RoqContext *enc, RoqTempdata *tempData)
{
    int i, j;
    uint8_t **outp= &enc->out_buf;

    if (tempData->numCB2) {
        bytestream_put_le16(outp, RoQ_QUAD_CODEBOOK);
        bytestream_put_le32(outp, tempData->numCB2*6 + tempData->numCB4*4);
        bytestream_put_byte(outp, tempData->numCB4);
        bytestream_put_byte(outp, tempData->numCB2);

        for (i=0; i<tempData->numCB2; i++) {
            bytestream_put_buffer(outp, enc->cb2x2[tempData->f2i2[i]].y, 4);
            bytestream_put_byte(outp, enc->cb2x2[tempData->f2i2[i]].u);
            bytestream_put_byte(outp, enc->cb2x2[tempData->f2i2[i]].v);
        }

        for (i=0; i<tempData->numCB4; i++)
            for (j=0; j<4; j++)
                bytestream_put_byte(outp, tempData->i2f2[enc->cb4x4[tempData->f2i4[i]].idx[j]]);

    }
}
开发者ID:119,项目名称:dropcam_for_iphone,代码行数:26,代码来源:roqvideoenc.c


示例18: iff_read_header


//.........这里部分代码省略.........

                metadata_size  = avio_rb32(pb);
                if ((res = get_metadata(s, tag, metadata_size)) < 0) {
                    av_log(s, AV_LOG_ERROR, "cannot allocate metadata tag %s!\n", tag);
                    return res;
                }

                if (metadata_size & 1)
                    avio_skip(pb, 1);
            }
            break;
        }

        if (metadata_tag) {
            if ((res = get_metadata(s, metadata_tag, data_size)) < 0) {
                av_log(s, AV_LOG_ERROR, "cannot allocate metadata tag %s!\n", metadata_tag);
                return res;
            }
        }
        avio_skip(pb, data_size - (avio_tell(pb) - orig_pos) + (data_size & 1));
    }

    avio_seek(pb, iff->body_pos, SEEK_SET);

    switch(st->codec->codec_type) {
    case AVMEDIA_TYPE_AUDIO:
        avpriv_set_pts_info(st, 32, 1, st->codec->sample_rate);

        if (st->codec->codec_tag == ID_16SV)
            st->codec->codec_id = AV_CODEC_ID_PCM_S16BE_PLANAR;
        else if (st->codec->codec_tag == ID_MAUD) {
            if (iff->maud_bits == 8 && !iff->maud_compression) {
                st->codec->codec_id = AV_CODEC_ID_PCM_U8;
            } else if (iff->maud_bits == 16 && !iff->maud_compression) {
                st->codec->codec_id = AV_CODEC_ID_PCM_S16BE;
            } else if (iff->maud_bits ==  8 && iff->maud_compression == 2) {
                st->codec->codec_id = AV_CODEC_ID_PCM_ALAW;
            } else if (iff->maud_bits ==  8 && iff->maud_compression == 3) {
                st->codec->codec_id = AV_CODEC_ID_PCM_MULAW;
            } else {
                avpriv_request_sample(s, "compression %d and bit depth %d", iff->maud_compression, iff->maud_bits);
                return AVERROR_PATCHWELCOME;
            }
        } else if (st->codec->codec_tag != ID_DSD) {
            switch (iff->svx8_compression) {
            case COMP_NONE:
                st->codec->codec_id = AV_CODEC_ID_PCM_S8_PLANAR;
                break;
            case COMP_FIB:
                st->codec->codec_id = AV_CODEC_ID_8SVX_FIB;
                break;
            case COMP_EXP:
                st->codec->codec_id = AV_CODEC_ID_8SVX_EXP;
                break;
            default:
                av_log(s, AV_LOG_ERROR,
                       "Unknown SVX8 compression method '%d'\n", iff->svx8_compression);
                return -1;
            }
        }

        st->codec->bits_per_coded_sample = av_get_bits_per_sample(st->codec->codec_id);
        st->codec->bit_rate = st->codec->channels * st->codec->sample_rate * st->codec->bits_per_coded_sample;
        st->codec->block_align = st->codec->channels * st->codec->bits_per_coded_sample;
        break;

    case AVMEDIA_TYPE_VIDEO:
        iff->bpp          = st->codec->bits_per_coded_sample;
        if ((screenmode & 0x800 /* Hold And Modify */) && iff->bpp <= 8) {
            iff->ham      = iff->bpp > 6 ? 6 : 4;
            st->codec->bits_per_coded_sample = 24;
        }
        iff->flags        = (screenmode & 0x80 /* Extra HalfBrite */) && iff->bpp <= 8;
        iff->masking      = masking;
        iff->transparency = transparency;

        if (!st->codec->extradata) {
            st->codec->extradata_size = IFF_EXTRA_VIDEO_SIZE;
            st->codec->extradata      = av_malloc(IFF_EXTRA_VIDEO_SIZE + FF_INPUT_BUFFER_PADDING_SIZE);
            if (!st->codec->extradata)
                return AVERROR(ENOMEM);
        }
        av_assert0(st->codec->extradata_size >= IFF_EXTRA_VIDEO_SIZE);
        buf = st->codec->extradata;
        bytestream_put_be16(&buf, IFF_EXTRA_VIDEO_SIZE);
        bytestream_put_byte(&buf, iff->bitmap_compression);
        bytestream_put_byte(&buf, iff->bpp);
        bytestream_put_byte(&buf, iff->ham);
        bytestream_put_byte(&buf, iff->flags);
        bytestream_put_be16(&buf, iff->transparency);
        bytestream_put_byte(&buf, iff->masking);
        bytestream_put_buffer(&buf, iff->tvdc, sizeof(iff->tvdc));
        st->codec->codec_id = AV_CODEC_ID_IFF_ILBM;
        break;
    default:
        return -1;
    }

    return 0;
}
开发者ID:ConeyLiu,项目名称:humble-video,代码行数:101,代码来源:iff.c


示例19: gif_image_write_header

/* GIF header */
static int gif_image_write_header(uint8_t **bytestream,
                                  int width, int height, int loop_count,
                                  uint32_t *palette)
{
    int i;
    unsigned int v;

    bytestream_put_buffer(bytestream, "GIF", 3);
    bytestream_put_buffer(bytestream, "89a", 3);
    bytestream_put_le16(bytestream, width);
    bytestream_put_le16(bytestream, height);

    bytestream_put_byte(bytestream, 0xf7); /* flags: global clut, 256 entries */
    bytestream_put_byte(bytestream, 0x1f); /* background color index */
    bytestream_put_byte(bytestream, 0); /* aspect ratio */

    /* the global palette */
    if (!palette) {
        bytestream_put_buffer(bytestream, (const unsigned char *)gif_clut, 216*3);
        for(i=0;i<((256-216)*3);i++)
            bytestream_put_byte(bytestream, 0);
    } else {
        for(i=0;i<256;i++) {
            v = palette[i];
            bytestream_put_be24(bytestream, v);
        }
    }

        /*        update: this is the 'NETSCAPE EXTENSION' that allows for looped animated gif
                see http://members.aol.com/royalef/gifabout.htm#net-extension

                byte   1       : 33 (hex 0x21) GIF Extension code
                byte   2       : 255 (hex 0xFF) Application Extension Label
                byte   3       : 11 (hex (0x0B) Length of Application Block
                                         (eleven bytes of data to follow)
                bytes  4 to 11 : "NETSCAPE"
                bytes 12 to 14 : "2.0"
                byte  15       : 3 (hex 0x03) Length of Data Sub-Block
                                         (three bytes of data to follow)
                byte  16       : 1 (hex 0x01)
                bytes 17 to 18 : 0 to 65535, an unsigned integer in
                                         lo-hi byte format. This indicate the
                                         number of iterations the loop should
                                         be executed.
                bytes 19       : 0 (hex 0x00) a Data Sub-block Terminator
        */

    /* application extension header */
#ifdef GIF_ADD_APP_HEADER
    if (loop_count >= 0 && loop_count <= 65535) {
        bytestream_put_byte(bytestream, 0x21);
        bytestream_put_byte(bytestream, 0xff);
        bytestream_put_byte(bytestream, 0x0b);
        bytestream_put_buffer(bytestream, "NETSCAPE2.0", 11);  // bytes 4 to 14
        bytestream_put_byte(bytestream, 0x03); // byte 15
        bytestream_put_byte(bytestream, 0x01); // byte 16
        bytestream_put_le16(bytestream, (uint16_t)loop_count);
        bytestream_put_byte(bytestream, 0x00); // byte 19
    }
#endif
    return 0;
}
开发者ID:DanielGit,项目名称:Intrisit201202,代码行数:63,代码来源:gif.c


示例20: read_packet

static int read_packet(AVFormatContext *s, AVPacket *pkt)
{
    AVCodecContext *codec = s->streams[0]->codec;
    BRSTMDemuxContext *b = s->priv_data;
    uint32_t samples, size, skip = 0;
    int ret, i;

    if (avio_feof(s->pb))
        return AVERROR_EOF;
    b->current_block++;
    if (b->current_block == b->block_count) {
        size    = b->last_block_used_bytes;
        samples = b->last_block_samples;
        skip    = b->last_block_size - b->last_block_used_bytes;

        if (samples < size * 14 / 8) {
            uint32_t adjusted_size = samples / 14 * 8;
            if (samples % 14)
                adjusted_size += (samples % 14 + 1) / 2 + 1;

            skip += size - adjusted_size;
            size = adjusted_size;
        }
    } else if (b->current_block < b->block_count) {
        size    = b->block_size;
        samples = b->samples_per_block;
    } else {
        return AVERROR_EOF;
    }

    if (codec->codec_id == AV_CODEC_ID_ADPCM_THP ||
        codec->codec_id == AV_CODEC_ID_ADPCM_THP_LE) {
        uint8_t *dst;

        if (!b->adpc) {
            av_log(s, AV_LOG_ERROR, "adpcm_thp requires ADPC chunk, but none was found.\n");
            return AVERROR_INVALIDDATA;
        }
        if (!b->table) {
            b->table = av_mallocz(32 * codec->channels);
            if (!b->table)
                return AVERROR(ENOMEM);
        }

        if (size > (INT_MAX - 32 - 4) ||
            (32 + 4 + size) > (INT_MAX / codec->channels) ||
            (32 + 4 + size) * codec->channels > INT_MAX - 8)
            return AVERROR_INVALIDDATA;
        if (av_new_packet(pkt, 8 + (32 + 4 + size) * codec->channels) < 0)
            return AVERROR(ENOMEM);
        dst = pkt->data;
        if (codec->codec_id == AV_CODEC_ID_ADPCM_THP_LE) {
            bytestream_put_le32(&dst, size * codec->channels);
            bytestream_put_le32(&dst, samples);
        } else {
            bytestream_put_be32(&dst, size * codec->channels);
            bytestream_put_be32(&dst, samples);
        }
        bytestream_put_buffer(&dst, b->table, 32 * codec->channels);
        bytestream_put_buffer(&dst, b->adpc + 4 * codec->channels *
                                    (b->current_block - 1), 4 * codec->channels);

        for (i = 0; i < codec->channels; i++) {
            ret = avio_read(s->pb, dst, size);
            dst += size;
            avio_skip(s->pb, skip);
            if (ret != size) {
                av_free_packet(pkt);
                break;
            }
        }
        pkt->duration = samples;
    } else {
        size *= codec->channels;
        ret = av_get_packet(s->pb, pkt, size);
    }

    pkt->stream_index = 0;

    if (ret != size)
        ret = AVERROR(EIO);

    return ret;
}
开发者ID:artclarke,项目名称:humble-video,代码行数:84,代码来源:brstm.c



注:本文中的bytestream_put_buffer函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C++ bytestream_put_byte函数代码示例发布时间:2022-05-30
下一篇:
C++ bytestream_put_be32函数代码示例发布时间:2022-05-30
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap