本文整理汇总了C++中show_bits函数的典型用法代码示例。如果您正苦于以下问题:C++ show_bits函数的具体用法?C++ show_bits怎么用?C++ show_bits使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了show_bits函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: ff_ccitt_unpack
int ff_ccitt_unpack(AVCodecContext *avctx,
const uint8_t *src, int srcsize,
uint8_t *dst, int height, int stride,
enum TiffCompr compr, int opts)
{
int j;
GetBitContext gb;
int *runs, *ref = NULL, *runend;
int ret;
int runsize= avctx->width + 2;
int err = 0;
int has_eol;
runs = av_malloc(runsize * sizeof(runs[0]));
ref = av_malloc(runsize * sizeof(ref[0]));
if (!runs || ! ref) {
err = AVERROR(ENOMEM);
goto fail;
}
ref[0] = avctx->width;
ref[1] = 0;
ref[2] = 0;
init_get_bits(&gb, src, srcsize*8);
has_eol = show_bits(&gb, 12) == 1 || show_bits(&gb, 16) == 1;
for(j = 0; j < height; j++){
runend = runs + runsize;
if(compr == TIFF_G4){
ret = decode_group3_2d_line(avctx, &gb, avctx->width, runs, runend, ref);
if(ret < 0){
err = -1;
goto fail;
}
}else{
int g3d1 = (compr == TIFF_G3) && !(opts & 1);
if(compr!=TIFF_CCITT_RLE && has_eol && find_group3_syncmarker(&gb, srcsize*8) < 0)
break;
if(compr==TIFF_CCITT_RLE || g3d1 || get_bits1(&gb))
ret = decode_group3_1d_line(avctx, &gb, avctx->width, runs, runend);
else
ret = decode_group3_2d_line(avctx, &gb, avctx->width, runs, runend, ref);
if(compr==TIFF_CCITT_RLE)
align_get_bits(&gb);
}
if(ret < 0){
put_line(dst, stride, avctx->width, ref);
}else{
put_line(dst, stride, avctx->width, runs);
FFSWAP(int*, runs, ref);
}
dst += stride;
}
fail:
av_free(runs);
av_free(ref);
return err;
}
示例2: ff_h264_decode_sei
int ff_h264_decode_sei(H264Context *h){
while (get_bits_left(&h->gb) > 16) {
int size, type;
type=0;
do{
if (get_bits_left(&h->gb) < 8)
return -1;
type+= show_bits(&h->gb, 8);
}while(get_bits(&h->gb, 8) == 255);
size=0;
do{
if (get_bits_left(&h->gb) < 8)
return -1;
size+= show_bits(&h->gb, 8);
}while(get_bits(&h->gb, 8) == 255);
if(h->avctx->debug&FF_DEBUG_STARTCODE)
av_log(h->avctx, AV_LOG_DEBUG, "SEI %d len:%d\n", type, size);
switch(type){
case SEI_TYPE_PIC_TIMING: // Picture timing SEI
if(decode_picture_timing(h) < 0)
return -1;
break;
case SEI_TYPE_USER_DATA_ITU_T_T35:
if(decode_user_data_itu_t_t35(h, size) < 0)
return -1;
break;
case SEI_TYPE_USER_DATA_UNREGISTERED:
if(decode_unregistered_user_data(h, size) < 0)
return -1;
break;
case SEI_TYPE_RECOVERY_POINT:
if(decode_recovery_point(h) < 0)
return -1;
break;
case SEI_BUFFERING_PERIOD:
if(decode_buffering_period(h) < 0)
return -1;
break;
case SEI_TYPE_FRAME_PACKING:
if(decode_frame_packing(h, size) < 0)
return -1;
default:
skip_bits(&h->gb, 8*size);
}
//FIXME check bits here
align_get_bits(&h->gb);
}
return 0;
}
示例3: ff_h264_decode_sei
int ff_h264_decode_sei(H264Context *h)
{
while (get_bits_left(&h->gb) > 16) {
int size = 0;
int type = 0;
int ret = 0;
do
type += show_bits(&h->gb, 8);
while (get_bits(&h->gb, 8) == 255);
do
size += show_bits(&h->gb, 8);
while (get_bits(&h->gb, 8) == 255);
if (size > get_bits_left(&h->gb) / 8) {
av_log(h->avctx, AV_LOG_ERROR, "SEI type %d truncated at %d\n",
type, get_bits_left(&h->gb));
return AVERROR_INVALIDDATA;
}
switch (type) {
case SEI_TYPE_PIC_TIMING: // Picture timing SEI
ret = decode_picture_timing(h);
if (ret < 0)
return ret;
break;
case SEI_TYPE_USER_DATA_UNREGISTERED:
ret = decode_unregistered_user_data(h, size);
if (ret < 0)
return ret;
break;
case SEI_TYPE_RECOVERY_POINT:
ret = decode_recovery_point(h);
if (ret < 0)
return ret;
break;
case SEI_BUFFERING_PERIOD:
ret = decode_buffering_period(h);
if (ret < 0)
return ret;
break;
default:
av_log(h->avctx, AV_LOG_DEBUG, "unknown SEI type %d\n", type);
skip_bits(&h->gb, 8 * size);
}
// FIXME check bits here
align_get_bits(&h->gb);
}
return 0;
}
示例4: Get_Chroma_DC_dct_diff
static int Get_Chroma_DC_dct_diff(APEG_LAYER *layer)
{
int code, size;
/* decode length */
code = show_bits(layer, 5);
if (code<31)
{
/* Table B-13, dct_dc_size_chrominance, codes 00xxx ... 11110 */
static const VLCtab DCchromtab0[32] = {
{0, 2}, {0, 2}, {0, 2}, {0, 2}, {0, 2}, {0, 2}, {0, 2}, {0, 2},
{1, 2}, {1, 2}, {1, 2}, {1, 2}, {1, 2}, {1, 2}, {1, 2}, {1, 2},
{2, 2}, {2, 2}, {2, 2}, {2, 2}, {2, 2}, {2, 2}, {2, 2}, {2, 2},
{3, 3}, {3, 3}, {3, 3}, {3, 3}, {4, 4}, {4, 4}, {5, 5}, {0, 0}
};
size = DCchromtab0[code].val;
apeg_flush_bits8(layer, DCchromtab0[code].len);
if(size == 0)
return 0;
}
else
{
/* Table B-13, dct_dc_size_chrominance, codes 111110xxxx ... 1111111111 */
static const VLCtab DCchromtab1[32] = {
{6, 6}, {6, 6}, {6, 6}, {6, 6}, {6, 6}, {6, 6}, { 6, 6}, { 6, 6},
{6, 6}, {6, 6}, {6, 6}, {6, 6}, {6, 6}, {6, 6}, { 6, 6}, { 6, 6},
{7, 7}, {7, 7}, {7, 7}, {7, 7}, {7, 7}, {7, 7}, { 7, 7}, { 7, 7},
{8, 8}, {8, 8}, {8, 8}, {8, 8}, {9, 9}, {9, 9}, {10,10}, {11,10}
};
code = show_bits(layer, 10) - 992;
size = DCchromtab1[code].val;
apeg_flush_bits(layer, DCchromtab1[code].len);
}
{
const int dct_diff = apeg_get_bits(layer, size);
const int f = ((dct_diff >> (size-1))&1)^1;
return dct_diff - ((f<<size) - f);
/* int dct_diff = apeg_get_bits(layer, size);
if (( dct_diff & (1 << (size-1)) ) == 0)
return dct_diff - ((1<<size) - 1);
return dct_diff;*/
}
}
示例5: latm_decode_frame
static int latm_decode_frame(AVCodecContext *avctx, void *out,
int *got_frame_ptr, AVPacket *avpkt)
{
struct LATMContext *latmctx = avctx->priv_data;
int muxlength, err;
GetBitContext gb;
if ((err = init_get_bits8(&gb, avpkt->data, avpkt->size)) < 0)
return err;
// check for LOAS sync word
if (get_bits(&gb, 11) != LOAS_SYNC_WORD)
return AVERROR_INVALIDDATA;
muxlength = get_bits(&gb, 13) + 3;
// not enough data, the parser should have sorted this out
if (muxlength > avpkt->size)
return AVERROR_INVALIDDATA;
if ((err = read_audio_mux_element(latmctx, &gb)) < 0)
return err;
if (!latmctx->initialized) {
if (!avctx->extradata) {
*got_frame_ptr = 0;
return avpkt->size;
} else {
push_output_configuration(&latmctx->aac_ctx);
if ((err = decode_audio_specific_config(
&latmctx->aac_ctx, avctx, &latmctx->aac_ctx.oc[1].m4ac,
avctx->extradata, avctx->extradata_size*8LL, 1)) < 0) {
pop_output_configuration(&latmctx->aac_ctx);
return err;
}
latmctx->initialized = 1;
}
}
if (show_bits(&gb, 12) == 0xfff) {
av_log(latmctx->aac_ctx.avctx, AV_LOG_ERROR,
"ADTS header detected, probably as result of configuration "
"misparsing\n");
return AVERROR_INVALIDDATA;
}
switch (latmctx->aac_ctx.oc[1].m4ac.object_type) {
case AOT_ER_AAC_LC:
case AOT_ER_AAC_LTP:
case AOT_ER_AAC_LD:
case AOT_ER_AAC_ELD:
err = aac_decode_er_frame(avctx, out, got_frame_ptr, &gb);
break;
default:
err = aac_decode_frame_int(avctx, out, got_frame_ptr, &gb, avpkt);
}
if (err < 0)
return err;
return muxlength;
}
示例6: ff_wmv2_decode_picture_header
int ff_wmv2_decode_picture_header(MpegEncContext *s)
{
Wmv2Context *const w = (Wmv2Context *) s;
int code;
if (s->picture_number == 0)
decode_ext_header(w);
s->pict_type = get_bits1(&s->gb) + 1;
if (s->pict_type == AV_PICTURE_TYPE_I) {
code = get_bits(&s->gb, 7);
av_log(s->avctx, AV_LOG_DEBUG, "I7:%X/\n", code);
}
s->chroma_qscale = s->qscale = get_bits(&s->gb, 5);
if (s->qscale <= 0)
return AVERROR_INVALIDDATA;
if (s->pict_type != AV_PICTURE_TYPE_I && show_bits(&s->gb, 1)) {
GetBitContext gb = s->gb;
int skip_type = get_bits(&gb, 2);
int run = skip_type == SKIP_TYPE_COL ? s->mb_width : s->mb_height;
while (run > 0) {
int block = FFMIN(run, 25);
if (get_bits(&gb, block) + 1 != 1<<block)
break;
run -= block;
}
if (!run)
return FRAME_SKIPPED;
}
return 0;
}
示例7: getDCsizeChr
static int getDCsizeChr(bitstream *bs)
{
// [Ag][note] bad code
if (show_bits(bs, 12) == 1) {
flush_bits(bs, 12);
return 12;
}
if (show_bits(bs, 11) == 1) {
flush_bits(bs, 11);
return 11;
}
if (show_bits(bs, 10) == 1) {
flush_bits(bs, 10);
return 10;
}
if (show_bits(bs, 9) == 1) {
flush_bits(bs, 9);
return 9;
}
if (show_bits(bs, 8) == 1) {
flush_bits(bs, 8);
return 8;
}
if (show_bits(bs, 7) == 1) {
flush_bits(bs, 7);
return 7;
}
if (show_bits(bs, 6) == 1) {
flush_bits(bs, 6);
return 6;
}
if (show_bits(bs, 5) == 1) {
flush_bits(bs, 5);
return 5;
}
if (show_bits(bs, 4) == 1) {
flush_bits(bs, 4);
return 4;
}
if (show_bits(bs, 3) == 1) {
flush_bits(bs, 3);
return 3;
}
return (3 - get_bits(bs, 2));
}
示例8: ff_h264_decode_sei
int ff_h264_decode_sei(H264Context *h)
{
while (get_bits_left(&h->gb) > 16) {
int size = 0;
int type = 0;
int ret = 0;
do
type += show_bits(&h->gb, 8);
while (get_bits(&h->gb, 8) == 255);
do
size += show_bits(&h->gb, 8);
while (get_bits(&h->gb, 8) == 255);
switch (type) {
case SEI_TYPE_PIC_TIMING: // Picture timing SEI
ret = decode_picture_timing(h);
if (ret < 0)
return ret;
break;
case SEI_TYPE_USER_DATA_UNREGISTERED:
ret = decode_unregistered_user_data(h, size);
if (ret < 0)
return ret;
break;
case SEI_TYPE_RECOVERY_POINT:
ret = decode_recovery_point(h);
if (ret < 0)
return ret;
break;
case SEI_BUFFERING_PERIOD:
ret = decode_buffering_period(h);
if (ret < 0)
return ret;
break;
default:
skip_bits(&h->gb, 8 * size);
}
// FIXME check bits here
align_get_bits(&h->gb);
}
return 0;
}
示例9: main
int main(int argc, char **argv)
{
unsigned result = invert(atob(argv[1]), atoi(argv[2]), atoi(argv[3]));
printf("result = \n\t");
show_bits(result);
putchar('\n');
return 0;
}
示例10: invert
unsigned invert(unsigned x, unsigned p, unsigned n)
{
unsigned mask = ~((~0) << n) << (p+1-n);
printf("mask = \n\t");
show_bits(mask);
putchar('\n');
return x ^ mask;
}
示例11: rv10_decode_picture_header
/* read RV 1.0 compatible frame header */
static int rv10_decode_picture_header(MpegEncContext *s)
{
int mb_count, pb_frame, marker, mb_xy;
marker = get_bits1(&s->gb);
if (get_bits1(&s->gb))
s->pict_type = AV_PICTURE_TYPE_P;
else
s->pict_type = AV_PICTURE_TYPE_I;
if (!marker)
av_log(s->avctx, AV_LOG_ERROR, "marker missing\n");
pb_frame = get_bits1(&s->gb);
av_dlog(s->avctx, "pict_type=%d pb_frame=%d\n", s->pict_type, pb_frame);
if (pb_frame) {
avpriv_request_sample(s->avctx, "pb frame");
return AVERROR_PATCHWELCOME;
}
s->qscale = get_bits(&s->gb, 5);
if (s->qscale == 0) {
av_log(s->avctx, AV_LOG_ERROR, "Invalid qscale value: 0\n");
return AVERROR_INVALIDDATA;
}
if (s->pict_type == AV_PICTURE_TYPE_I) {
if (s->rv10_version == 3) {
/* specific MPEG like DC coding not used */
s->last_dc[0] = get_bits(&s->gb, 8);
s->last_dc[1] = get_bits(&s->gb, 8);
s->last_dc[2] = get_bits(&s->gb, 8);
av_dlog(s->avctx, "DC:%d %d %d\n", s->last_dc[0],
s->last_dc[1], s->last_dc[2]);
}
}
/* if multiple packets per frame are sent, the position at which
* to display the macroblocks is coded here */
mb_xy = s->mb_x + s->mb_y * s->mb_width;
if (show_bits(&s->gb, 12) == 0 || (mb_xy && mb_xy < s->mb_num)) {
s->mb_x = get_bits(&s->gb, 6); /* mb_x */
s->mb_y = get_bits(&s->gb, 6); /* mb_y */
mb_count = get_bits(&s->gb, 12);
} else {
s->mb_x = 0;
s->mb_y = 0;
mb_count = s->mb_width * s->mb_height;
}
skip_bits(&s->gb, 3); /* ignored */
s->f_code = 1;
s->unrestricted_mv = 1;
return mb_count;
}
示例12: tgq_decode_block
static void tgq_decode_block(TgqContext *s, int16_t block[64], GetBitContext *gb)
{
uint8_t *perm = s->scantable.permutated;
int i, j, value;
block[0] = get_sbits(gb, 8) * s->qtable[0];
for (i = 1; i < 64;) {
switch (show_bits(gb, 3)) {
case 4:
block[perm[i++]] = 0;
case 0:
block[perm[i++]] = 0;
skip_bits(gb, 3);
break;
case 5:
case 1:
skip_bits(gb, 2);
value = get_bits(gb, 6);
for (j = 0; j < value; j++)
block[perm[i++]] = 0;
break;
case 6:
skip_bits(gb, 3);
block[perm[i]] = -s->qtable[perm[i]];
i++;
break;
case 2:
skip_bits(gb, 3);
block[perm[i]] = s->qtable[perm[i]];
i++;
break;
case 7: // 111b
case 3: // 011b
skip_bits(gb, 2);
if (show_bits(gb, 6) == 0x3F) {
skip_bits(gb, 6);
block[perm[i]] = get_sbits(gb, 8) * s->qtable[perm[i]];
} else {
block[perm[i]] = get_sbits(gb, 6) * s->qtable[perm[i]];
}
i++;
break;
}
}
block[0] += 128 << 4;
}
示例13: console_echo_czech
void console_echo_czech()
{
int c;
while(1) {
c = getc(stdin);
printf("\nKey: '%3d', char: '%c'", c, c);
show_bits(c);
}
}
示例14: check_stream_type
static void check_stream_type(APEG_LAYER *layer)
{
// Assume a non-system stream
layer->system_stream_flag = NO_SYSTEM;
// Initialize the buffer
_apeg_initialize_buffer(layer);
// Transport streams (what'r those?) aren't supported
if(show_bits(layer, 8) == 0x47)
apeg_error_jump("Transport streams not supported");
/* Get the first start code */
recheck:
switch(show_bits32(layer))
{
case VIDEO_ELEMENTARY_STREAM:
/* Found video, system stream */
layer->system_stream_flag = MPEG_SYSTEM;
/* fall-through; set flag and break */
case SEQUENCE_HEADER_CODE:
/* Found video */
if(!_apeg_ignore_video)
layer->stream.flags |= APEG_MPG_VIDEO;
break;
case AUDIO_ELEMENTARY_STREAM:
/* apeg_start_audio will set APEG_MPG_AUDIO later */
layer->system_stream_flag = MPEG_SYSTEM;
apeg_start_code(layer);
goto recheck;
default:
if(layer->system_stream_flag == NO_SYSTEM)
{
if(show_bits32(layer) == (('O'<<24)|('g'<<16)|('g'<<8)|('S')))
{
layer->system_stream_flag = OGG_SYSTEM;
_apeg_initialize_buffer(layer);
if(alogg_open(layer) != APEG_OK)
apeg_error_jump("Error opening Ogg stream");
return;
}
#ifndef DISABLE_MPEG_AUDIO
if(almpa_head_backcheck(show_bits32(layer)))
break;
#endif
}
/* no positive stream identified; recheck */
apeg_flush_bits8(layer, 8);
goto recheck;
}
_apeg_initialize_buffer(layer);
}
示例15: ff_h264_decode_sei
int ff_h264_decode_sei(H264Context *h){
MpegEncContext * const s = &h->s;
while(get_bits_count(&s->gb) + 16 < s->gb.size_in_bits){
int size, type;
type=0;
do{
type+= show_bits(&s->gb, 8);
}while(get_bits(&s->gb, 8) == 255);
size=0;
do{
size+= show_bits(&s->gb, 8);
}while(get_bits(&s->gb, 8) == 255);
switch(type){
case SEI_TYPE_PIC_TIMING: // Picture timing SEI
if(decode_picture_timing(h) < 0)
return -1;
break;
case SEI_TYPE_USER_DATA_UNREGISTERED:
if(decode_unregistered_user_data(h, size) < 0)
return -1;
break;
case SEI_TYPE_RECOVERY_POINT:
if(decode_recovery_point(h) < 0)
return -1;
break;
case SEI_BUFFERING_PERIOD:
if(decode_buffering_period(h) < 0)
return -1;
break;
default:
skip_bits(&s->gb, 8*size);
}
//FIXME check bits here
align_get_bits(&s->gb);
}
return 0;
}