本文整理汇总了C++中bytestream2_get_bytes_left函数的典型用法代码示例。如果您正苦于以下问题:C++ bytestream2_get_bytes_left函数的具体用法?C++ bytestream2_get_bytes_left怎么用?C++ bytestream2_get_bytes_left使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了bytestream2_get_bytes_left函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: read_rle_sgi
/**
* Read a run length encoded SGI image.
* @param out_buf output buffer
* @param s the current image state
* @return 0 if no error, else return error code.
*/
static int read_rle_sgi(uint8_t *out_buf, SgiState *s)
{
uint8_t *dest_row;
unsigned int len = s->height * s->depth * 4;
GetByteContext g_table = s->g;
unsigned int y, z;
unsigned int start_offset;
int linesize, ret;
/* size of RLE offset and length tables */
if (len * 2 > bytestream2_get_bytes_left(&s->g)) {
return AVERROR_INVALIDDATA;
}
for (z = 0; z < s->depth; z++) {
dest_row = out_buf;
for (y = 0; y < s->height; y++) {
linesize = s->width * s->depth * s->bytes_per_channel;
dest_row -= s->linesize;
start_offset = bytestream2_get_be32(&g_table);
bytestream2_seek(&s->g, start_offset, SEEK_SET);
if (s->bytes_per_channel == 1)
ret = expand_rle_row8(s, dest_row + z, linesize, s->depth);
else
ret = expand_rle_row16(s, (uint16_t *)dest_row + z, linesize, s->depth);
if (ret != s->width)
return AVERROR_INVALIDDATA;
}
}
return 0;
}
示例2: expand_rle_row
/**
* Expand an RLE row into a channel.
* @param s the current image state
* @param out_buf Points to one line after the output buffer.
* @param out_end end of line in output buffer
* @param pixelstride pixel stride of input buffer
* @return size of output in bytes, -1 if buffer overflows
*/
static int expand_rle_row(SgiState *s, uint8_t *out_buf,
uint8_t *out_end, int pixelstride)
{
unsigned char pixel, count;
unsigned char *orig = out_buf;
while (1) {
if (bytestream2_get_bytes_left(&s->g) < 1)
return AVERROR_INVALIDDATA;
pixel = bytestream2_get_byteu(&s->g);
if (!(count = (pixel & 0x7f))) {
return (out_buf - orig) / pixelstride;
}
/* Check for buffer overflow. */
if(out_buf + pixelstride * count >= out_end) return -1;
if (pixel & 0x80) {
while (count--) {
*out_buf = bytestream2_get_byte(&s->g);
out_buf += pixelstride;
}
} else {
pixel = bytestream2_get_byte(&s->g);
while (count--) {
*out_buf = pixel;
out_buf += pixelstride;
}
}
}
}
示例3: gif_parse_next_image
static int gif_parse_next_image(GifState *s, AVFrame *frame)
{
while (bytestream2_get_bytes_left(&s->gb) > 0) {
int code = bytestream2_get_byte(&s->gb);
int ret;
av_log(s->avctx, AV_LOG_DEBUG, "code=%02x '%c'\n", code, code);
switch (code) {
case GIF_IMAGE_SEPARATOR:
return gif_read_image(s, frame);
case GIF_EXTENSION_INTRODUCER:
if ((ret = gif_read_extension(s)) < 0)
return ret;
break;
case GIF_TRAILER:
/* end of image */
return AVERROR_EOF;
default:
/* erroneous block label */
return AVERROR_INVALIDDATA;
}
}
return AVERROR_EOF;
}
示例4: read_rle_sgi
/**
* Read a run length encoded SGI image.
* @param out_buf output buffer
* @param s the current image state
* @return 0 if no error, else return error number.
*/
static int read_rle_sgi(uint8_t *out_buf, SgiState *s)
{
uint8_t *dest_row;
unsigned int len = s->height * s->depth * 4;
GetByteContext g_table = s->g;
unsigned int y, z;
unsigned int start_offset;
/* size of RLE offset and length tables */
if (len * 2 > bytestream2_get_bytes_left(&s->g)) {
return AVERROR_INVALIDDATA;
}
for (z = 0; z < s->depth; z++) {
dest_row = out_buf;
for (y = 0; y < s->height; y++) {
dest_row -= s->linesize;
start_offset = bytestream2_get_be32(&g_table);
bytestream2_seek(&s->g, start_offset, SEEK_SET);
if (expand_rle_row(s, dest_row + z, dest_row + FFABS(s->linesize),
s->depth) != s->width) {
return AVERROR_INVALIDDATA;
}
}
}
return 0;
}
示例5: get_cod
/** get coding parameters for a particular tile or whole image*/
static int get_cod(J2kDecoderContext *s, J2kCodingStyle *c, uint8_t *properties)
{
J2kCodingStyle tmp;
int compno;
if (bytestream2_get_bytes_left(&s->g) < 5)
return AVERROR(EINVAL);
tmp.log2_prec_width =
tmp.log2_prec_height = 15;
tmp.csty = bytestream2_get_byteu(&s->g);
if (bytestream2_get_byteu(&s->g)) { // progression level
av_log(s->avctx, AV_LOG_ERROR, "only LRCP progression supported\n");
return -1;
}
tmp.nlayers = bytestream2_get_be16u(&s->g);
tmp.mct = bytestream2_get_byteu(&s->g); // multiple component transformation
get_cox(s, &tmp);
for (compno = 0; compno < s->ncomponents; compno++) {
if (!(properties[compno] & HAD_COC))
memcpy(c + compno, &tmp, sizeof(J2kCodingStyle));
}
return 0;
}
示例6: tgq_decode_mb
/**
* @return <0 on error
*/
static int tgq_decode_mb(TgqContext *s, int mb_y, int mb_x){
int mode;
int i;
int8_t dc[6];
mode = bytestream2_get_byte(&s->gb);
if (mode>12) {
GetBitContext gb;
init_get_bits(&gb, s->gb.buffer, FFMIN(bytestream2_get_bytes_left(&s->gb), mode) * 8);
for(i=0; i<6; i++)
tgq_decode_block(s, s->block[i], &gb);
tgq_idct_put_mb(s, s->block, mb_x, mb_y);
bytestream2_skip(&s->gb, mode);
}else{
if (mode==3) {
memset(dc, bytestream2_get_byte(&s->gb), 4);
dc[4] = bytestream2_get_byte(&s->gb);
dc[5] = bytestream2_get_byte(&s->gb);
}else if (mode==6) {
bytestream2_get_buffer(&s->gb, dc, 6);
}else if (mode==12) {
for (i = 0; i < 6; i++) {
dc[i] = bytestream2_get_byte(&s->gb);
bytestream2_skip(&s->gb, 1);
}
}else{
av_log(s->avctx, AV_LOG_ERROR, "unsupported mb mode %i\n", mode);
return -1;
}
tgq_idct_put_mb_dconly(s, mb_x, mb_y, dc);
}
return 0;
}
示例7: read_uncompressed_sgi
/**
* Read an uncompressed SGI image.
* @param out_buf output buffer
* @param s the current image state
* @return 0 if read success, else return error code.
*/
static int read_uncompressed_sgi(unsigned char *out_buf, SgiState *s)
{
int x, y, z;
unsigned int offset = s->height * s->width * s->bytes_per_channel;
GetByteContext gp[4];
uint8_t *out_end;
/* Test buffer size. */
if (offset * s->depth > bytestream2_get_bytes_left(&s->g))
return AVERROR_INVALIDDATA;
/* Create a reader for each plane */
for (z = 0; z < s->depth; z++) {
gp[z] = s->g;
bytestream2_skip(&gp[z], z * offset);
}
for (y = s->height - 1; y >= 0; y--) {
out_end = out_buf + (y * s->linesize);
if (s->bytes_per_channel == 1) {
for (x = s->width; x > 0; x--)
for (z = 0; z < s->depth; z++)
*out_end++ = bytestream2_get_byteu(&gp[z]);
} else {
uint16_t *out16 = (uint16_t *)out_end;
for (x = s->width; x > 0; x--)
for (z = 0; z < s->depth; z++)
*out16++ = bytestream2_get_ne16u(&gp[z]);
}
}
return 0;
}
示例8: op
/**
* Perform decode operation
* @param dst pointer to destination image buffer
* @param dst_end pointer to end of destination image buffer
* @param gb GetByteContext (optional, see below)
* @param pixel Fill color (optional, see below)
* @param count Pixel count
* @param x Pointer to x-axis counter
* @param width Image width
* @param linesize Destination image buffer linesize
* @return non-zero if destination buffer is exhausted
*
* a copy operation is achieved when 'gb' is set
* a fill operation is achieved when 'gb' is null and pixel is >= 0
* a skip operation is achieved when 'gb' is null and pixel is < 0
*/
static inline int op(uint8_t **dst, const uint8_t *dst_end,
GetByteContext *gb,
int pixel, int count,
int *x, int width, int linesize)
{
int remaining = width - *x;
while(count > 0) {
int striplen = FFMIN(count, remaining);
if (gb) {
if (bytestream2_get_bytes_left(gb) < striplen)
goto exhausted;
bytestream2_get_bufferu(gb, *dst, striplen);
} else if (pixel >= 0)
memset(*dst, pixel, striplen);
*dst += striplen;
remaining -= striplen;
count -= striplen;
if (remaining <= 0) {
*dst += linesize - width;
remaining = width;
}
if (linesize > 0) {
if (*dst >= dst_end) goto exhausted;
} else {
if (*dst <= dst_end) goto exhausted;
}
}
*x = width - remaining;
return 0;
exhausted:
*x = width - remaining;
return 1;
}
示例9: mm_decode_inter
/*
* @param half_horiz Half horizontal resolution (0 or 1)
* @param half_vert Half vertical resolution (0 or 1)
*/
static int mm_decode_inter(MmContext * s, int half_horiz, int half_vert)
{
int data_off = bytestream2_get_le16(&s->gb), y;
GetByteContext data_ptr;
if (bytestream2_get_bytes_left(&s->gb) < data_off)
return AVERROR_INVALIDDATA;
bytestream2_init(&data_ptr, s->gb.buffer + data_off, bytestream2_get_bytes_left(&s->gb) - data_off);
while (s->gb.buffer < data_ptr.buffer_start) {
int i, j;
int length = bytestream2_get_byte(&s->gb);
int x = bytestream2_get_byte(&s->gb) + ((length & 0x80) << 1);
length &= 0x7F;
if (length==0) {
y += x;
continue;
}
if (y + half_vert >= s->avctx->height)
return 0;
for(i=0; i<length; i++) {
int replace_array = bytestream2_get_byte(&s->gb);
for(j=0; j<8; j++) {
int replace = (replace_array >> (7-j)) & 1;
if (replace) {
int color = bytestream2_get_byte(&data_ptr);
s->frame.data[0][y*s->frame.linesize[0] + x] = color;
if (half_horiz)
s->frame.data[0][y*s->frame.linesize[0] + x + 1] = color;
if (half_vert) {
s->frame.data[0][(y+1)*s->frame.linesize[0] + x] = color;
if (half_horiz)
s->frame.data[0][(y+1)*s->frame.linesize[0] + x + 1] = color;
}
}
x += 1 + half_horiz;
}
}
y += 1 + half_vert;
}
return 0;
}
示例10: decode_extradata_ps_mp4
/* There are (invalid) samples in the wild with mp4-style extradata, where the
* parameter sets are stored unescaped (i.e. as RBSP).
* This function catches the parameter set decoding failure and tries again
* after escaping it */
static int decode_extradata_ps_mp4(const uint8_t *buf, int buf_size, H264ParamSets *ps,
int err_recognition, void *logctx)
{
int ret;
ret = decode_extradata_ps(buf, buf_size, ps, 1, logctx);
if (ret < 0 && !(err_recognition & AV_EF_EXPLODE)) {
GetByteContext gbc;
PutByteContext pbc;
uint8_t *escaped_buf;
int escaped_buf_size;
av_log(logctx, AV_LOG_WARNING,
"SPS decoding failure, trying again after escaping the NAL\n");
if (buf_size / 2 >= (INT16_MAX - AV_INPUT_BUFFER_PADDING_SIZE) / 3)
return AVERROR(ERANGE);
escaped_buf_size = buf_size * 3 / 2 + AV_INPUT_BUFFER_PADDING_SIZE;
escaped_buf = av_mallocz(escaped_buf_size);
if (!escaped_buf)
return AVERROR(ENOMEM);
bytestream2_init(&gbc, buf, buf_size);
bytestream2_init_writer(&pbc, escaped_buf, escaped_buf_size);
while (bytestream2_get_bytes_left(&gbc)) {
if (bytestream2_get_bytes_left(&gbc) >= 3 &&
bytestream2_peek_be24(&gbc) <= 3) {
bytestream2_put_be24(&pbc, 3);
bytestream2_skip(&gbc, 2);
} else
bytestream2_put_byte(&pbc, bytestream2_get_byte(&gbc));
}
escaped_buf_size = bytestream2_tell_p(&pbc);
AV_WB16(escaped_buf, escaped_buf_size - 2);
ret = decode_extradata_ps(escaped_buf, escaped_buf_size, ps, 1, logctx);
av_freep(&escaped_buf);
if (ret < 0)
return ret;
}
return 0;
}
示例11: gif_read_header1
static int gif_read_header1(GifState *s)
{
uint8_t sig[6];
int v, n;
int has_global_palette;
if (bytestream2_get_bytes_left(&s->gb) < 13)
return AVERROR_INVALIDDATA;
/* read gif signature */
bytestream2_get_buffer(&s->gb, sig, 6);
if (memcmp(sig, gif87a_sig, 6) != 0 &&
memcmp(sig, gif89a_sig, 6) != 0)
return AVERROR_INVALIDDATA;
/* read screen header */
s->transparent_color_index = -1;
s->screen_width = bytestream2_get_le16(&s->gb);
s->screen_height = bytestream2_get_le16(&s->gb);
if( (unsigned)s->screen_width > 32767
|| (unsigned)s->screen_height > 32767) {
av_log(NULL, AV_LOG_ERROR, "picture size too large\n");
return AVERROR_INVALIDDATA;
}
v = bytestream2_get_byte(&s->gb);
s->color_resolution = ((v & 0x70) >> 4) + 1;
has_global_palette = (v & 0x80);
s->bits_per_pixel = (v & 0x07) + 1;
s->background_color_index = bytestream2_get_byte(&s->gb);
bytestream2_get_byte(&s->gb); /* ignored */
av_dlog(s->avctx, "gif: screen_w=%d screen_h=%d bpp=%d global_palette=%d\n",
s->screen_width, s->screen_height, s->bits_per_pixel,
has_global_palette);
if (has_global_palette) {
n = 1 << s->bits_per_pixel;
if (bytestream2_get_bytes_left(&s->gb) < n * 3)
return AVERROR_INVALIDDATA;
bytestream2_get_buffer(&s->gb, s->global_palette, n * 3);
}
return 0;
}
示例12: get_qcx
/** get common part for QCD and QCC segments */
static int get_qcx(J2kDecoderContext *s, int n, J2kQuantStyle *q)
{
int i, x;
if (bytestream2_get_bytes_left(&s->g) < 1)
return AVERROR(EINVAL);
x = bytestream2_get_byteu(&s->g); // Sqcd
q->nguardbits = x >> 5;
q->quantsty = x & 0x1f;
if (q->quantsty == J2K_QSTY_NONE){
n -= 3;
if (bytestream2_get_bytes_left(&s->g) < n || 32*3 < n)
return AVERROR(EINVAL);
for (i = 0; i < n; i++)
q->expn[i] = bytestream2_get_byteu(&s->g) >> 3;
} else if (q->quantsty == J2K_QSTY_SI){
示例13: hq_hqa_decode_frame
static int hq_hqa_decode_frame(AVCodecContext *avctx, void *data,
int *got_frame, AVPacket *avpkt)
{
HQContext *ctx = avctx->priv_data;
AVFrame *pic = data;
uint32_t info_tag;
unsigned int data_size;
int ret;
unsigned tag;
bytestream2_init(&ctx->gbc, avpkt->data, avpkt->size);
if (bytestream2_get_bytes_left(&ctx->gbc) < 4 + 4) {
av_log(avctx, AV_LOG_ERROR, "Frame is too small (%d).\n", avpkt->size);
return AVERROR_INVALIDDATA;
}
info_tag = bytestream2_peek_le32(&ctx->gbc);
if (info_tag == MKTAG('I', 'N', 'F', 'O')) {
int info_size;
bytestream2_skip(&ctx->gbc, 4);
info_size = bytestream2_get_le32(&ctx->gbc);
if (bytestream2_get_bytes_left(&ctx->gbc) < info_size) {
av_log(avctx, AV_LOG_ERROR, "Invalid INFO size (%d).\n", info_size);
return AVERROR_INVALIDDATA;
}
ff_canopus_parse_info_tag(avctx, ctx->gbc.buffer, info_size);
bytestream2_skip(&ctx->gbc, info_size);
}
data_size = bytestream2_get_bytes_left(&ctx->gbc);
if (data_size < 4) {
av_log(avctx, AV_LOG_ERROR, "Frame is too small (%d).\n", data_size);
return AVERROR_INVALIDDATA;
}
/* HQ defines dimensions and number of slices, and thus slice traversal
* order. HQA has no size constraint and a fixed number of slices, so it
* needs a separate scheme for it. */
tag = bytestream2_get_le32(&ctx->gbc);
if ((tag & 0x00FFFFFF) == (MKTAG('U', 'V', 'C', ' ') & 0x00FFFFFF)) {
ret = hq_decode_frame(ctx, pic, tag >> 24, data_size);
} else if (tag == MKTAG('H', 'Q', 'A', '1')) {
示例14: apng_probe
/*
* To be a valid APNG file, we mandate, in this order:
* PNGSIG
* IHDR
* ...
* acTL
* ...
* IDAT
*/
static int apng_probe(AVProbeData *p)
{
GetByteContext gb;
int state = 0;
uint32_t len, tag;
bytestream2_init(&gb, p->buf, p->buf_size);
if (bytestream2_get_be64(&gb) != PNGSIG)
return 0;
for (;;) {
len = bytestream2_get_be32(&gb);
if (len > 0x7fffffff)
return 0;
tag = bytestream2_get_le32(&gb);
/* we don't check IDAT size, as this is the last tag
* we check, and it may be larger than the probe buffer */
if (tag != MKTAG('I', 'D', 'A', 'T') &&
len > bytestream2_get_bytes_left(&gb))
return 0;
switch (tag) {
case MKTAG('I', 'H', 'D', 'R'):
if (len != 13)
return 0;
if (av_image_check_size(bytestream2_get_be32(&gb), bytestream2_get_be32(&gb), 0, NULL))
return 0;
bytestream2_skip(&gb, 9);
state++;
break;
case MKTAG('a', 'c', 'T', 'L'):
if (state != 1 ||
len != 8 ||
bytestream2_get_be32(&gb) == 0) /* 0 is not a valid value for number of frames */
return 0;
bytestream2_skip(&gb, 8);
state++;
break;
case MKTAG('I', 'D', 'A', 'T'):
if (state != 2)
return 0;
goto end;
default:
/* skip other tags */
bytestream2_skip(&gb, len + 4);
break;
}
}
end:
return AVPROBE_SCORE_MAX;
}
示例15: parse_section_header
/* The first three bytes are the size of the section past the header, or zero
* if the length is stored in the next long word. The fourth byte in the first
* long word indicates the type of the current section. */
static int parse_section_header(GetByteContext *gbc, int *section_size,
enum HapSectionType *section_type)
{
if (bytestream2_get_bytes_left(gbc) < 4)
return AVERROR_INVALIDDATA;
*section_size = bytestream2_get_le24(gbc);
*section_type = bytestream2_get_byte(gbc);
if (*section_size == 0) {
if (bytestream2_get_bytes_left(gbc) < 4)
return AVERROR_INVALIDDATA;
*section_size = bytestream2_get_le32(gbc);
}
if (*section_size > bytestream2_get_bytes_left(gbc) || *section_size < 0)
return AVERROR_INVALIDDATA;
else
return 0;
}