本文整理汇总了C++中bytestream_put_le16函数的典型用法代码示例。如果您正苦于以下问题:C++ bytestream_put_le16函数的具体用法?C++ bytestream_put_le16怎么用?C++ bytestream_put_le16使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了bytestream_put_le16函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: gif_image_write_image
static int gif_image_write_image(uint8_t **bytestream,
int x1, int y1, int width, int height,
const uint8_t *buf, int linesize, int pix_fmt)
{
PutBitContext p;
uint8_t buffer[200]; /* 100 * 9 / 8 = 113 */
int i, left, w;
const uint8_t *ptr;
/* image block */
bytestream_put_byte(bytestream, 0x2c);
bytestream_put_le16(bytestream, x1);
bytestream_put_le16(bytestream, y1);
bytestream_put_le16(bytestream, width);
bytestream_put_le16(bytestream, height);
bytestream_put_byte(bytestream, 0x00); /* flags */
/* no local clut */
bytestream_put_byte(bytestream, 0x08);
left= width * height;
init_put_bits(&p, buffer, 130);
/*
* the thing here is the bitstream is written as little packets, with a size byte before
* but it's still the same bitstream between packets (no flush !)
*/
ptr = buf;
w = width;
while(left>0) {
put_bits(&p, 9, 0x0100); /* clear code */
for(i=(left<GIF_CHUNKS)?left:GIF_CHUNKS;i;i--) {
put_bits(&p, 9, *ptr++);
if (--w == 0) {
w = width;
buf += linesize;
ptr = buf;
}
}
if(left<=GIF_CHUNKS) {
put_bits(&p, 9, 0x101); /* end of stream */
flush_put_bits(&p);
}
if(pbBufPtr(&p) - p.buf > 0) {
bytestream_put_byte(bytestream, pbBufPtr(&p) - p.buf); /* byte count of the packet */
bytestream_put_buffer(bytestream, p.buf, pbBufPtr(&p) - p.buf); /* the actual buffer */
p.buf_ptr = p.buf; /* dequeue the bytes off the bitstream */
}
left-=GIF_CHUNKS;
}
bytestream_put_byte(bytestream, 0x00); /* end of image block */
bytestream_put_byte(bytestream, 0x3b);
return 0;
}
示例2: read_packet
static int read_packet(AVFormatContext *s, AVPacket *pkt)
{
IcoDemuxContext *ico = s->priv_data;
IcoImage *image;
AVIOContext *pb = s->pb;
AVStream *st = s->streams[0];
int ret;
if (ico->current_image >= ico->nb_images)
return AVERROR(EIO);
image = &ico->images[ico->current_image];
if ((ret = avio_seek(pb, image->offset, SEEK_SET)) < 0)
return ret;
if (s->streams[ico->current_image]->codec->codec_id == AV_CODEC_ID_PNG) {
if ((ret = av_get_packet(pb, pkt, image->size)) < 0)
return ret;
} else {
uint8_t *buf;
if ((ret = av_new_packet(pkt, 14 + image->size)) < 0)
return ret;
buf = pkt->data;
/* add BMP header */
bytestream_put_byte(&buf, 'B');
bytestream_put_byte(&buf, 'M');
bytestream_put_le32(&buf, pkt->size);
bytestream_put_le16(&buf, 0);
bytestream_put_le16(&buf, 0);
bytestream_put_le32(&buf, 0);
if ((ret = avio_read(pb, buf, image->size)) < 0)
return ret;
st->codec->bits_per_coded_sample = AV_RL16(buf + 14);
if (AV_RL32(buf + 32))
image->nb_pal = AV_RL32(buf + 32);
if (st->codec->bits_per_coded_sample <= 8 && !image->nb_pal) {
image->nb_pal = 1 << st->codec->bits_per_coded_sample;
AV_WL32(buf + 32, image->nb_pal);
}
AV_WL32(buf - 4, 14 + 40 + image->nb_pal * 4);
AV_WL32(buf + 8, AV_RL32(buf + 8) / 2);
}
pkt->stream_index = ico->current_image++;
pkt->flags |= AV_PKT_FLAG_KEY;
return 0;
}
示例3: bmp_encode_frame
static int bmp_encode_frame(AVCodecContext *avctx, unsigned char *buf, int buf_size, void *data){
BMPContext *s = avctx->priv_data;
AVFrame *pict = data;
AVFrame * const p= (AVFrame*)&s->picture;
int n_bytes_image, n_bytes_per_row, n_bytes, i, n, hsize;
uint8_t *ptr;
unsigned char* buf0 = buf;
*p = *pict;
p->pict_type= FF_I_TYPE;
p->key_frame= 1;
n_bytes_per_row = (avctx->width*3 + 3) & ~3;
n_bytes_image = avctx->height*n_bytes_per_row;
// STRUCTURE.field refer to the MSVC documentation for BITMAPFILEHEADER
// and related pages.
#define SIZE_BITMAPFILEHEADER 14
#define SIZE_BITMAPINFOHEADER 40
hsize = SIZE_BITMAPFILEHEADER + SIZE_BITMAPINFOHEADER;
n_bytes = n_bytes_image + hsize;
if(n_bytes>buf_size) {
av_log(avctx, AV_LOG_ERROR, "buf size too small (need %d, got %d)\n", n_bytes, buf_size);
return -1;
}
bytestream_put_byte(&buf, 'B'); // BITMAPFILEHEADER.bfType
bytestream_put_byte(&buf, 'M'); // do.
bytestream_put_le32(&buf, n_bytes); // BITMAPFILEHEADER.bfSize
bytestream_put_le16(&buf, 0); // BITMAPFILEHEADER.bfReserved1
bytestream_put_le16(&buf, 0); // BITMAPFILEHEADER.bfReserved2
bytestream_put_le32(&buf, hsize); // BITMAPFILEHEADER.bfOffBits
bytestream_put_le32(&buf, SIZE_BITMAPINFOHEADER); // BITMAPINFOHEADER.biSize
bytestream_put_le32(&buf, avctx->width); // BITMAPINFOHEADER.biWidth
bytestream_put_le32(&buf, avctx->height); // BITMAPINFOHEADER.biHeight
bytestream_put_le16(&buf, 1); // BITMAPINFOHEADER.biPlanes
bytestream_put_le16(&buf, 24); // BITMAPINFOHEADER.biBitCount
bytestream_put_le32(&buf, BMP_RGB); // BITMAPINFOHEADER.biCompression
bytestream_put_le32(&buf, n_bytes_image); // BITMAPINFOHEADER.biSizeImage
bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biXPelsPerMeter
bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biYPelsPerMeter
bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biClrUsed
bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biClrImportant
// BMP files are bottom-to-top so we start from the end...
ptr = p->data[0] + (avctx->height - 1) * p->linesize[0];
buf = buf0 + hsize;
for(i = 0; i < avctx->height; i++) {
n = 3*avctx->width;
memcpy(buf, ptr, n);
buf += n;
memset(buf, 0, n_bytes_per_row-n);
buf += n_bytes_per_row-n;
ptr -= p->linesize[0]; // ... and go back
}
return n_bytes;
}
示例4: send_stream_selection_request
/** Send MMST stream selection command based on the AVStream->discard values. */
static int send_stream_selection_request(MMSTContext *mmst)
{
int i;
MMSContext *mms = &mmst->mms;
// send the streams we want back...
start_command_packet(mmst, CS_PKT_STREAM_ID_REQUEST);
bytestream_put_le32(&mms->write_out_ptr, mms->stream_num); // stream nums
for(i= 0; i<mms->stream_num; i++) {
bytestream_put_le16(&mms->write_out_ptr, 0xffff); // flags
bytestream_put_le16(&mms->write_out_ptr, mms->streams[i].id); // stream id
bytestream_put_le16(&mms->write_out_ptr, 0); // selection
}
return send_command_packet(mmst);
}
示例5: start_command_packet
/** Create MMST command packet header */
static void start_command_packet(MMSContext *mms, MMSCSPacketType packet_type)
{
mms->write_out_ptr = mms->out_buffer;
bytestream_put_le32(&mms->write_out_ptr, 1); // start sequence
bytestream_put_le32(&mms->write_out_ptr, 0xb00bface);
bytestream_put_le32(&mms->write_out_ptr, 0); // Length starts from after the protocol type bytes
bytestream_put_le32(&mms->write_out_ptr, MKTAG('M','M','S',' '));
bytestream_put_le32(&mms->write_out_ptr, 0);
bytestream_put_le32(&mms->write_out_ptr, mms->outgoing_packet_seq++);
bytestream_put_le64(&mms->write_out_ptr, 0); // timestamp
bytestream_put_le32(&mms->write_out_ptr, 0);
bytestream_put_le16(&mms->write_out_ptr, packet_type);
bytestream_put_le16(&mms->write_out_ptr, 3); // direction to server
}
示例6: gif_image_write_image
static int gif_image_write_image(AVCodecContext *avctx,
uint8_t **bytestream, uint8_t *end,
const uint8_t *buf, int linesize)
{
GIFContext *s = avctx->priv_data;
int len = 0, height;
const uint8_t *ptr;
/* image block */
bytestream_put_byte(bytestream, 0x2c);
bytestream_put_le16(bytestream, 0);
bytestream_put_le16(bytestream, 0);
bytestream_put_le16(bytestream, avctx->width);
bytestream_put_le16(bytestream, avctx->height);
bytestream_put_byte(bytestream, 0x00); /* flags */
/* no local clut */
bytestream_put_byte(bytestream, 0x08);
ff_lzw_encode_init(s->lzw, s->buf, avctx->width*avctx->height,
12, FF_LZW_GIF, put_bits);
ptr = buf;
for (height = avctx->height; height--;) {
len += ff_lzw_encode(s->lzw, ptr, avctx->width);
ptr += linesize;
}
len += ff_lzw_encode_flush(s->lzw, flush_put_bits);
ptr = s->buf;
while (len > 0) {
int size = FFMIN(255, len);
bytestream_put_byte(bytestream, size);
if (end - *bytestream < size)
return -1;
bytestream_put_buffer(bytestream, ptr, size);
ptr += size;
len -= size;
}
bytestream_put_byte(bytestream, 0x00); /* end of image block */
bytestream_put_byte(bytestream, 0x3b);
return 0;
}
示例7: write_typecode
/* NOTE: Typecodes must be spooled AFTER arguments!! */
static void write_typecode(CodingSpool *s, uint8_t type)
{
s->typeSpool |= (type & 3) << (14 - s->typeSpoolLength);
s->typeSpoolLength += 2;
if (s->typeSpoolLength == 16) {
bytestream_put_le16(s->pout, s->typeSpool);
bytestream_put_buffer(s->pout, s->argumentSpool,
s->args - s->argumentSpool);
s->typeSpoolLength = 0;
s->typeSpool = 0;
s->args = s->argumentSpool;
}
}
示例8: gif_image_write_header
/* GIF header */
static int gif_image_write_header(AVCodecContext *avctx,
uint8_t **bytestream, uint32_t *palette)
{
int i;
unsigned int v;
bytestream_put_buffer(bytestream, "GIF", 3);
bytestream_put_buffer(bytestream, "89a", 3);
bytestream_put_le16(bytestream, avctx->width);
bytestream_put_le16(bytestream, avctx->height);
bytestream_put_byte(bytestream, 0xf7); /* flags: global clut, 256 entries */
bytestream_put_byte(bytestream, 0x1f); /* background color index */
bytestream_put_byte(bytestream, 0); /* aspect ratio */
/* the global palette */
for(i=0;i<256;i++) {
v = palette[i];
bytestream_put_be24(bytestream, v);
}
return 0;
}
示例9: roq_write_video_info_chunk
static void roq_write_video_info_chunk(RoqContext *enc)
{
/* ROQ info chunk */
bytestream_put_le16(&enc->out_buf, RoQ_INFO);
/* Size: 8 bytes */
bytestream_put_le32(&enc->out_buf, 8);
/* Unused argument */
bytestream_put_byte(&enc->out_buf, 0x00);
bytestream_put_byte(&enc->out_buf, 0x00);
/* Width */
bytestream_put_le16(&enc->out_buf, enc->width);
/* Height */
bytestream_put_le16(&enc->out_buf, enc->height);
/* Unused in Quake 3, mimics the output of the real encoder */
bytestream_put_byte(&enc->out_buf, 0x08);
bytestream_put_byte(&enc->out_buf, 0x00);
bytestream_put_byte(&enc->out_buf, 0x04);
bytestream_put_byte(&enc->out_buf, 0x00);
}
示例10: write_codebooks
/**
* Write codebook chunk
*/
static void write_codebooks(RoqContext *enc, RoqTempdata *tempData)
{
int i, j;
uint8_t **outp= &enc->out_buf;
if (tempData->numCB2) {
bytestream_put_le16(outp, RoQ_QUAD_CODEBOOK);
bytestream_put_le32(outp, tempData->numCB2*6 + tempData->numCB4*4);
bytestream_put_byte(outp, tempData->numCB4);
bytestream_put_byte(outp, tempData->numCB2);
for (i=0; i<tempData->numCB2; i++) {
bytestream_put_buffer(outp, enc->cb2x2[tempData->f2i2[i]].y, 4);
bytestream_put_byte(outp, enc->cb2x2[tempData->f2i2[i]].u);
bytestream_put_byte(outp, enc->cb2x2[tempData->f2i2[i]].v);
}
for (i=0; i<tempData->numCB4; i++)
for (j=0; j<4; j++)
bytestream_put_byte(outp, tempData->i2f2[enc->cb4x4[tempData->f2i4[i]].idx[j]]);
}
}
示例11: reconstruct_and_encode_image
static void reconstruct_and_encode_image(RoqContext *enc, RoqTempdata *tempData, int w, int h, int numBlocks)
{
int i, j, k;
int x, y;
int subX, subY;
int dist=0;
roq_qcell *qcell;
CelEvaluation *eval;
CodingSpool spool;
spool.typeSpool=0;
spool.typeSpoolLength=0;
spool.args = spool.argumentSpool;
spool.pout = &enc->out_buf;
if (tempData->used_option[RoQ_ID_CCC]%2)
tempData->mainChunkSize+=8; //FIXME
/* Write the video chunk header */
bytestream_put_le16(&enc->out_buf, RoQ_QUAD_VQ);
bytestream_put_le32(&enc->out_buf, tempData->mainChunkSize/8);
bytestream_put_byte(&enc->out_buf, 0x0);
bytestream_put_byte(&enc->out_buf, 0x0);
for (i=0; i<numBlocks; i++) {
eval = tempData->cel_evals + i;
x = eval->sourceX;
y = eval->sourceY;
dist += eval->eval_dist[eval->best_coding];
switch (eval->best_coding) {
case RoQ_ID_MOT:
write_typecode(&spool, RoQ_ID_MOT);
break;
case RoQ_ID_FCC:
bytestream_put_byte(&spool.args, motion_arg(eval->motion));
write_typecode(&spool, RoQ_ID_FCC);
ff_apply_motion_8x8(enc, x, y,
eval->motion.d[0], eval->motion.d[1]);
break;
case RoQ_ID_SLD:
bytestream_put_byte(&spool.args, tempData->i2f4[eval->cbEntry]);
write_typecode(&spool, RoQ_ID_SLD);
qcell = enc->cb4x4 + eval->cbEntry;
ff_apply_vector_4x4(enc, x , y , enc->cb2x2 + qcell->idx[0]);
ff_apply_vector_4x4(enc, x+4, y , enc->cb2x2 + qcell->idx[1]);
ff_apply_vector_4x4(enc, x , y+4, enc->cb2x2 + qcell->idx[2]);
ff_apply_vector_4x4(enc, x+4, y+4, enc->cb2x2 + qcell->idx[3]);
break;
case RoQ_ID_CCC:
write_typecode(&spool, RoQ_ID_CCC);
for (j=0; j<4; j++) {
subX = x + 4*(j&1);
subY = y + 2*(j&2);
switch(eval->subCels[j].best_coding) {
case RoQ_ID_MOT:
break;
case RoQ_ID_FCC:
bytestream_put_byte(&spool.args,
motion_arg(eval->subCels[j].motion));
ff_apply_motion_4x4(enc, subX, subY,
eval->subCels[j].motion.d[0],
eval->subCels[j].motion.d[1]);
break;
case RoQ_ID_SLD:
bytestream_put_byte(&spool.args,
tempData->i2f4[eval->subCels[j].cbEntry]);
qcell = enc->cb4x4 + eval->subCels[j].cbEntry;
ff_apply_vector_2x2(enc, subX , subY ,
enc->cb2x2 + qcell->idx[0]);
ff_apply_vector_2x2(enc, subX+2, subY ,
enc->cb2x2 + qcell->idx[1]);
ff_apply_vector_2x2(enc, subX , subY+2,
enc->cb2x2 + qcell->idx[2]);
ff_apply_vector_2x2(enc, subX+2, subY+2,
enc->cb2x2 + qcell->idx[3]);
break;
case RoQ_ID_CCC:
for (k=0; k<4; k++) {
int cb_idx = eval->subCels[j].subCels[k];
bytestream_put_byte(&spool.args,
tempData->i2f2[cb_idx]);
ff_apply_vector_2x2(enc, subX + 2*(k&1), subY + (k&2),
//.........这里部分代码省略.........
示例12: gif_image_write_header
/* GIF header */
static int gif_image_write_header(uint8_t **bytestream,
int width, int height, int loop_count,
uint32_t *palette)
{
int i;
unsigned int v;
bytestream_put_buffer(bytestream, "GIF", 3);
bytestream_put_buffer(bytestream, "89a", 3);
bytestream_put_le16(bytestream, width);
bytestream_put_le16(bytestream, height);
bytestream_put_byte(bytestream, 0xf7); /* flags: global clut, 256 entries */
bytestream_put_byte(bytestream, 0x1f); /* background color index */
bytestream_put_byte(bytestream, 0); /* aspect ratio */
/* the global palette */
if (!palette) {
bytestream_put_buffer(bytestream, (const unsigned char *)gif_clut, 216*3);
for(i=0;i<((256-216)*3);i++)
bytestream_put_byte(bytestream, 0);
} else {
for(i=0;i<256;i++) {
v = palette[i];
bytestream_put_be24(bytestream, v);
}
}
/* update: this is the 'NETSCAPE EXTENSION' that allows for looped animated gif
see http://members.aol.com/royalef/gifabout.htm#net-extension
byte 1 : 33 (hex 0x21) GIF Extension code
byte 2 : 255 (hex 0xFF) Application Extension Label
byte 3 : 11 (hex (0x0B) Length of Application Block
(eleven bytes of data to follow)
bytes 4 to 11 : "NETSCAPE"
bytes 12 to 14 : "2.0"
byte 15 : 3 (hex 0x03) Length of Data Sub-Block
(three bytes of data to follow)
byte 16 : 1 (hex 0x01)
bytes 17 to 18 : 0 to 65535, an unsigned integer in
lo-hi byte format. This indicate the
number of iterations the loop should
be executed.
bytes 19 : 0 (hex 0x00) a Data Sub-block Terminator
*/
/* application extension header */
#ifdef GIF_ADD_APP_HEADER
if (loop_count >= 0 && loop_count <= 65535) {
bytestream_put_byte(bytestream, 0x21);
bytestream_put_byte(bytestream, 0xff);
bytestream_put_byte(bytestream, 0x0b);
bytestream_put_buffer(bytestream, "NETSCAPE2.0", 11); // bytes 4 to 14
bytestream_put_byte(bytestream, 0x03); // byte 15
bytestream_put_byte(bytestream, 0x01); // byte 16
bytestream_put_le16(bytestream, (uint16_t)loop_count);
bytestream_put_byte(bytestream, 0x00); // byte 19
}
#endif
return 0;
}
示例13: ff_rtmp_packet_write
int ff_rtmp_packet_write(URLContext *h, RTMPPacket *pkt,
int chunk_size, RTMPPacket **prev_pkt_ptr,
int *nb_prev_pkt)
{
uint8_t pkt_hdr[16], *p = pkt_hdr;
int mode = RTMP_PS_TWELVEBYTES;
int off = 0;
int written = 0;
int ret;
RTMPPacket *prev_pkt;
int use_delta; // flag if using timestamp delta, not RTMP_PS_TWELVEBYTES
uint32_t timestamp; // full 32-bit timestamp or delta value
if ((ret = ff_rtmp_check_alloc_array(prev_pkt_ptr, nb_prev_pkt,
pkt->channel_id)) < 0)
return ret;
prev_pkt = *prev_pkt_ptr;
//if channel_id = 0, this is first presentation of prev_pkt, send full hdr.
use_delta = prev_pkt[pkt->channel_id].channel_id &&
pkt->extra == prev_pkt[pkt->channel_id].extra &&
pkt->timestamp >= prev_pkt[pkt->channel_id].timestamp;
timestamp = pkt->timestamp;
if (use_delta) {
timestamp -= prev_pkt[pkt->channel_id].timestamp;
}
if (timestamp >= 0xFFFFFF) {
pkt->ts_field = 0xFFFFFF;
} else {
pkt->ts_field = timestamp;
}
if (use_delta) {
if (pkt->type == prev_pkt[pkt->channel_id].type &&
pkt->size == prev_pkt[pkt->channel_id].size) {
mode = RTMP_PS_FOURBYTES;
if (pkt->ts_field == prev_pkt[pkt->channel_id].ts_field)
mode = RTMP_PS_ONEBYTE;
} else {
mode = RTMP_PS_EIGHTBYTES;
}
}
if (pkt->channel_id < 64) {
bytestream_put_byte(&p, pkt->channel_id | (mode << 6));
} else if (pkt->channel_id < 64 + 256) {
bytestream_put_byte(&p, 0 | (mode << 6));
bytestream_put_byte(&p, pkt->channel_id - 64);
} else {
bytestream_put_byte(&p, 1 | (mode << 6));
bytestream_put_le16(&p, pkt->channel_id - 64);
}
if (mode != RTMP_PS_ONEBYTE) {
bytestream_put_be24(&p, pkt->ts_field);
if (mode != RTMP_PS_FOURBYTES) {
bytestream_put_be24(&p, pkt->size);
bytestream_put_byte(&p, pkt->type);
if (mode == RTMP_PS_TWELVEBYTES)
bytestream_put_le32(&p, pkt->extra);
}
}
if (pkt->ts_field == 0xFFFFFF)
bytestream_put_be32(&p, timestamp);
// save history
prev_pkt[pkt->channel_id].channel_id = pkt->channel_id;
prev_pkt[pkt->channel_id].type = pkt->type;
prev_pkt[pkt->channel_id].size = pkt->size;
prev_pkt[pkt->channel_id].timestamp = pkt->timestamp;
prev_pkt[pkt->channel_id].ts_field = pkt->ts_field;
prev_pkt[pkt->channel_id].extra = pkt->extra;
if ((ret = ffurl_write(h, pkt_hdr, p - pkt_hdr)) < 0)
return ret;
written = p - pkt_hdr + pkt->size;
while (off < pkt->size) {
int towrite = FFMIN(chunk_size, pkt->size - off);
if ((ret = ffurl_write(h, pkt->data + off, towrite)) < 0)
return ret;
off += towrite;
if (off < pkt->size) {
uint8_t marker = 0xC0 | pkt->channel_id;
if ((ret = ffurl_write(h, &marker, 1)) < 0)
return ret;
written++;
}
}
return written;
}
示例14: ff_rtmp_packet_write
int ff_rtmp_packet_write(URLContext *h, RTMPPacket *pkt,
int chunk_size, RTMPPacket *prev_pkt)
{
uint8_t pkt_hdr[16], *p = pkt_hdr;
int mode = RTMP_PS_TWELVEBYTES;
int off = 0;
int size = 0;
pkt->ts_delta = pkt->timestamp - prev_pkt[pkt->channel_id].timestamp;
//if channel_id = 0, this is first presentation of prev_pkt, send full hdr.
if (prev_pkt[pkt->channel_id].channel_id &&
pkt->extra == prev_pkt[pkt->channel_id].extra)
{
if (pkt->type == prev_pkt[pkt->channel_id].type &&
pkt->data_size == prev_pkt[pkt->channel_id].data_size)
{
mode = RTMP_PS_FOURBYTES;
if (pkt->ts_delta == prev_pkt[pkt->channel_id].ts_delta)
mode = RTMP_PS_ONEBYTE;
}
else
{
mode = RTMP_PS_EIGHTBYTES;
}
}
if (pkt->channel_id < 64)
{
bytestream_put_byte(&p, pkt->channel_id | (mode << 6));
}
else if (pkt->channel_id < 64 + 256)
{
bytestream_put_byte(&p, 0 | (mode << 6));
bytestream_put_byte(&p, pkt->channel_id - 64);
}
else
{
bytestream_put_byte(&p, 1 | (mode << 6));
bytestream_put_le16(&p, pkt->channel_id - 64);
}
if (mode != RTMP_PS_ONEBYTE)
{
uint32_t timestamp = pkt->timestamp;
if (mode != RTMP_PS_TWELVEBYTES)
timestamp = pkt->ts_delta;
bytestream_put_be24(&p, timestamp >= 0xFFFFFF ? 0xFFFFFF : timestamp);
if (mode != RTMP_PS_FOURBYTES)
{
bytestream_put_be24(&p, pkt->data_size);
bytestream_put_byte(&p, pkt->type);
if (mode == RTMP_PS_TWELVEBYTES)
bytestream_put_le32(&p, pkt->extra);
}
if (timestamp >= 0xFFFFFF)
bytestream_put_be32(&p, timestamp);
}
// save history
prev_pkt[pkt->channel_id].channel_id = pkt->channel_id;
prev_pkt[pkt->channel_id].type = pkt->type;
prev_pkt[pkt->channel_id].data_size = pkt->data_size;
prev_pkt[pkt->channel_id].timestamp = pkt->timestamp;
if (mode != RTMP_PS_TWELVEBYTES)
{
prev_pkt[pkt->channel_id].ts_delta = pkt->ts_delta;
}
else
{
prev_pkt[pkt->channel_id].ts_delta = pkt->timestamp;
}
prev_pkt[pkt->channel_id].extra = pkt->extra;
ffurl_write(h, pkt_hdr, p-pkt_hdr);
size = p - pkt_hdr + pkt->data_size;
while (off < pkt->data_size)
{
int towrite = FFMIN(chunk_size, pkt->data_size - off);
ffurl_write(h, pkt->data + off, towrite);
off += towrite;
if (off < pkt->data_size)
{
uint8_t marker = 0xC0 | pkt->channel_id;
ffurl_write(h, &marker, 1);
size++;
}
}
return size;
}
示例15: xkcd_encode_frame
static int xkcd_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
const AVFrame *pict, int *got_packet)
{
const AVFrame * const picture = pict; /* Actual image data */
/* header_size = header size */
int bytes_in_image, bytes_per_row, total_bytes, i, header_size, ret;
/* pad_bytes_per_row = bytes of null to fill in at the end of a row of image data */
int pad_bytes_per_row = 0;
/* Number of bits per pixel */
int bit_count = avctx->bits_per_coded_sample;
/* buffer_data = data to be buffered, buf = buffer to write to */
uint8_t *buffer_data, *buffer;
/* Cite: BMP encoder */
avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
avctx->coded_frame->key_frame = 1;
/* Number of bytes of image data in a row */
/* (width in pixels * bits per pixel) / 8 to put it in bytes.
Add 7 bits to the width in bits to make sure to have enough
bytes of storage when we divide (making sure when it truncates
in division, it doesn't get rid of what we need) */
/* Cite: BMP encoder */
bytes_per_row = ((int64_t)avctx->width * (int64_t)bit_count + 7LL) >> 3LL;
/* End cite */
/* Bytes at the end of a row that are 'crossed out' */
/* Take the remainder from the above bytes and fill in with
padding by looking at the last two bits after 4 - bytes_per_row.*/
pad_bytes_per_row = (4 - bytes_per_row) & 3;
/* Total bytes in image */
bytes_in_image = avctx->height * (bytes_per_row + pad_bytes_per_row);
header_size = 14;
/* Number of bytes in the entire file */
total_bytes = bytes_in_image + header_size;
/* Cite: BMP encoder */
if ((ret = ff_alloc_packet2(avctx, pkt, total_bytes)) < 0)
return ret;
buffer = pkt->data;
/* End cite */
/* Start building the header */
bytestream_put_byte(&buffer, 'X'); // Filetype
bytestream_put_byte(&buffer, 'K'); // Filetype
bytestream_put_byte(&buffer, 'C'); // Filetype
bytestream_put_byte(&buffer, 'D'); // Filetype
bytestream_put_le32(&buffer, total_bytes); // Size of entire file
bytestream_put_le16(&buffer, avctx->width); // Width of image in pixels
bytestream_put_le16(&buffer, avctx->height); // Height of image in pixels
bytestream_put_le16(&buffer, bit_count); // Bits per pixel
// Start the buffer
buffer_data = picture->data[0];
/* Write the image */
/* Cite: BMP encoder */
for(i = 0; i < avctx->height; i++) {
/* Write line to buffer */
memcpy(buffer, buffer_data, bytes_per_row);
/* Point buffer to the end of the data and start of the padding */
buffer += bytes_per_row;
/* Null out the array which creates padding */
memset(buffer, 0, pad_bytes_per_row);
/* Point buffer to the end of the padding and start of the new data */
buffer += pad_bytes_per_row;
/* Now point to next row */
buffer_data += picture->linesize[0];
}
pkt->flags |= AV_PKT_FLAG_KEY;
*got_packet = 1;
/* End cite */
return 0;
}