本文整理汇总了C++中put_le32函数的典型用法代码示例。如果您正苦于以下问题:C++ put_le32函数的具体用法?C++ put_le32怎么用?C++ put_le32使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了put_le32函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: v210_convert
static void v210_convert(void *frame_bytes, picture_t *pic, int dst_stride)
{
int width = pic->format.i_width;
int height = pic->format.i_height;
int line_padding = dst_stride - ((width * 8 + 11) / 12) * 4;
int h, w;
uint8_t *data = (uint8_t*)frame_bytes;
const uint16_t *y = (const uint16_t*)pic->p[0].p_pixels;
const uint16_t *u = (const uint16_t*)pic->p[1].p_pixels;
const uint16_t *v = (const uint16_t*)pic->p[2].p_pixels;
#define WRITE_PIXELS(a, b, c) \
do { \
val = clip(*a++); \
val |= (clip(*b++) << 10) | \
(clip(*c++) << 20); \
put_le32(&data, val); \
} while (0)
for (h = 0; h < height; h++) {
uint32_t val = 0;
for (w = 0; w < width - 5; w += 6) {
WRITE_PIXELS(u, y, v);
WRITE_PIXELS(y, u, y);
WRITE_PIXELS(v, y, u);
WRITE_PIXELS(y, v, y);
}
if (w < width - 1) {
WRITE_PIXELS(u, y, v);
val = clip(*y++);
if (w == width - 2)
put_le32(&data, val);
#undef WRITE_PIXELS
}
if (w < width - 3) {
val |= (clip(*u++) << 10) | (clip(*y++) << 20);
put_le32(&data, val);
val = clip(*v++) | (clip(*y++) << 10);
put_le32(&data, val);
}
memset(data, 0, line_padding);
data += line_padding;
y += pic->p[0].i_pitch / 2 - width;
u += pic->p[1].i_pitch / 2 - width / 2;
v += pic->p[2].i_pitch / 2 - width / 2;
}
}
示例2: avi_write_idx1
static int avi_write_idx1(AVFormatContext *s)
{
ByteIOContext *pb = s->pb;
AVIContext *avi = s->priv_data;
int64_t idx_chunk;
int i;
char tag[5];
if (!url_is_streamed(pb)) {
AVIStream *avist;
AVIIentry* ie = 0, *tie;
int empty, stream_id = -1;
idx_chunk = ff_start_tag(pb, "idx1");
for(i=0; i<s->nb_streams; i++){
avist= s->streams[i]->priv_data;
avist->entry=0;
}
do {
empty = 1;
for (i=0; i<s->nb_streams; i++) {
avist= s->streams[i]->priv_data;
if (avist->indexes.entry <= avist->entry)
continue;
tie = avi_get_ientry(&avist->indexes, avist->entry);
if (empty || tie->pos < ie->pos) {
ie = tie;
stream_id = i;
}
empty = 0;
}
if (!empty) {
avist= s->streams[stream_id]->priv_data;
avi_stream2fourcc(&tag[0], stream_id,
s->streams[stream_id]->codec->codec_type);
put_tag(pb, &tag[0]);
put_le32(pb, ie->flags);
put_le32(pb, ie->pos);
put_le32(pb, ie->len);
avist->entry++;
}
} while (!empty);
ff_end_tag(pb, idx_chunk);
avi_write_counters(s, avi->riff_id);
}
return 0;
}
示例3: gxf_write_umf_media_description
static int gxf_write_umf_media_description(AVFormatContext *s)
{
GXFContext *gxf = s->priv_data;
ByteIOContext *pb = s->pb;
int64_t pos;
int i, j;
pos = url_ftell(pb);
gxf->umf_media_offset = pos - gxf->umf_start_offset;
for (i = 0; i <= s->nb_streams; ++i) {
GXFStreamContext *sc;
int64_t startpos, curpos;
if (i == s->nb_streams)
sc = &gxf->timecode_track;
else
sc = s->streams[i]->priv_data;
startpos = url_ftell(pb);
put_le16(pb, 0); /* length */
put_le16(pb, sc->media_info);
put_le16(pb, 0); /* reserved */
put_le16(pb, 0); /* reserved */
put_le32(pb, gxf->nb_fields);
put_le32(pb, 0); /* attributes rw, ro */
put_le32(pb, 0); /* mark in */
put_le32(pb, gxf->nb_fields); /* mark out */
put_buffer(pb, ES_NAME_PATTERN, strlen(ES_NAME_PATTERN));
put_be16(pb, sc->media_info);
for (j = strlen(ES_NAME_PATTERN)+2; j < 88; j++)
put_byte(pb, 0);
put_le32(pb, sc->track_type);
put_le32(pb, sc->sample_rate);
put_le32(pb, sc->sample_size);
put_le32(pb, 0); /* reserved */
if (sc == &gxf->timecode_track)
gxf_write_umf_media_timecode(pb, sc); /* 8 0bytes */
else {
AVStream *st = s->streams[i];
switch (st->codec->codec_id) {
case CODEC_ID_MPEG2VIDEO:
gxf_write_umf_media_mpeg(pb, st);
break;
case CODEC_ID_PCM_S16LE:
gxf_write_umf_media_audio(pb, sc);
break;
case CODEC_ID_DVVIDEO:
gxf_write_umf_media_dv(pb, sc);
break;
}
}
curpos = url_ftell(pb);
url_fseek(pb, startpos, SEEK_SET);
put_le16(pb, curpos - startpos);
url_fseek(pb, curpos, SEEK_SET);
}
return url_ftell(pb) - pos;
}
示例4: gxf_write_umf_user_data
static int gxf_write_umf_user_data(ByteIOContext *pb, GXFContext *ctx)
{
int64_t pos = url_ftell(pb);
ctx->umf_user_data_offset = pos - ctx->umf_start_offset;
put_le32(pb, 20);
put_le32(pb, 0);
put_le16(pb, 0);
put_le16(pb, 0);
put_le32(pb, 0);
put_byte(pb, 0);
put_byte(pb, 0);
put_byte(pb, 0);
put_byte(pb, 0);
return 20;
}
示例5: gxf_write_flt_packet
static int gxf_write_flt_packet(ByteIOContext *pb, GXFContext *ctx)
{
int64_t pos = url_ftell(pb);
int i;
gxf_write_packet_header(pb, PKT_FLT);
put_le32(pb, 1000); /* number of fields */
put_le32(pb, 0); /* number of active flt entries */
for (i = 0; i < 1000; ++i) {
put_le32(pb, 0);
}
return updatePacketSize(pb, pos);
}
示例6: swf_write_trailer
static int swf_write_trailer(AVFormatContext *s)
{
SWFContext *swf = s->priv_data;
ByteIOContext *pb = s->pb;
AVCodecContext *enc, *video_enc;
int file_size, i;
video_enc = NULL;
for(i=0;i<s->nb_streams;i++) {
enc = s->streams[i]->codec;
if (enc->codec_type == CODEC_TYPE_VIDEO)
video_enc = enc;
else
av_fifo_free(&swf->audio_fifo);
}
put_swf_tag(s, TAG_END);
put_swf_end_tag(s);
put_flush_packet(s->pb);
/* patch file size and number of frames if not streamed */
if (!url_is_streamed(s->pb) && video_enc) {
file_size = url_ftell(pb);
url_fseek(pb, 4, SEEK_SET);
put_le32(pb, file_size);
url_fseek(pb, swf->duration_pos, SEEK_SET);
put_le16(pb, swf->video_frame_number);
url_fseek(pb, swf->vframes_pos, SEEK_SET);
put_le16(pb, swf->video_frame_number);
url_fseek(pb, file_size, SEEK_SET);
}
return 0;
}
示例7: btd_error_invalid_args
static DBusMessage *set_cumulative_wheel_rev(DBusConnection *conn,
DBusMessage *msg, void *data)
{
struct csc *csc = data;
dbus_uint32_t value;
struct controlpoint_req *req;
uint8_t att_val[5]; /* uint8 opcode + uint32 value */
if (!dbus_message_get_args(msg, NULL, DBUS_TYPE_UINT32, &value,
DBUS_TYPE_INVALID))
return btd_error_invalid_args(msg);
if (csc->pending_req != NULL)
return btd_error_in_progress(msg);
req = g_new(struct controlpoint_req, 1);
req->csc = csc;
req->opcode = SET_CUMULATIVE_VALUE;
req->msg = dbus_message_ref(msg);
csc->pending_req = req;
att_val[0] = SET_CUMULATIVE_VALUE;
put_le32(value, att_val + 1);
gatt_write_char(csc->attrib, csc->controlpoint_val_handle, att_val,
sizeof(att_val), controlpoint_write_cb, req);
return NULL;
}
示例8: avi_write_ix
static int avi_write_ix(AVFormatContext *s)
{
ByteIOContext *pb = s->pb;
AVIContext *avi = s->priv_data;
char tag[5];
char ix_tag[] = "ix00";
int i, j;
assert(!url_is_streamed(pb));
if (avi->riff_id > AVI_MASTER_INDEX_SIZE)
return -1;
for (i=0;i<s->nb_streams;i++) {
AVIStream *avist= s->streams[i]->priv_data;
int64_t ix, pos;
avi_stream2fourcc(&tag[0], i, s->streams[i]->codec->codec_type);
ix_tag[3] = '0' + i;
/* Writing AVI OpenDML leaf index chunk */
ix = url_ftell(pb);
put_tag(pb, &ix_tag[0]); /* ix?? */
put_le32(pb, avist->indexes.entry * 8 + 24);
/* chunk size */
put_le16(pb, 2); /* wLongsPerEntry */
put_byte(pb, 0); /* bIndexSubType (0 == frame index) */
put_byte(pb, 1); /* bIndexType (1 == AVI_INDEX_OF_CHUNKS) */
put_le32(pb, avist->indexes.entry);
/* nEntriesInUse */
put_tag(pb, &tag[0]); /* dwChunkId */
put_le64(pb, avi->movi_list);/* qwBaseOffset */
put_le32(pb, 0); /* dwReserved_3 (must be 0) */
for (j=0; j<avist->indexes.entry; j++) {
AVIIentry* ie = avi_get_ientry(&avist->indexes, j);
put_le32(pb, ie->pos + 8);
put_le32(pb, ((uint32_t)ie->len & ~0x80000000) |
(ie->flags & 0x10 ? 0 : 0x80000000));
}
put_flush_packet(pb);
pos = url_ftell(pb);
/* Updating one entry in the AVI OpenDML master index */
url_fseek(pb, avist->indexes.indx_start - 8, SEEK_SET);
put_tag(pb, "indx"); /* enabling this entry */
url_fskip(pb, 8);
put_le32(pb, avi->riff_id); /* nEntriesInUse */
url_fskip(pb, 16*avi->riff_id);
put_le64(pb, ix); /* qwOffset */
put_le32(pb, pos - ix); /* dwSize */
put_le32(pb, avist->indexes.entry); /* dwDuration */
url_fseek(pb, pos, SEEK_SET);
}
return 0;
}
示例9: gxf_write_umf_material_description
static int gxf_write_umf_material_description(ByteIOContext *pb, GXFContext *ctx)
{
// XXX drop frame
uint32_t timecode =
ctx->nb_fields / (ctx->sample_rate * 3600) % 24 << 24 | // hours
ctx->nb_fields / (ctx->sample_rate * 60) % 60 << 16 | // minutes
ctx->nb_fields / ctx->sample_rate % 60 << 8 | // seconds
ctx->nb_fields % ctx->sample_rate; // fields
put_le32(pb, ctx->flags);
put_le32(pb, ctx->nb_fields); /* length of the longest track */
put_le32(pb, ctx->nb_fields); /* length of the shortest track */
put_le32(pb, 0); /* mark in */
put_le32(pb, ctx->nb_fields); /* mark out */
put_le32(pb, 0); /* timecode mark in */
put_le32(pb, timecode); /* timecode mark out */
put_le64(pb, ctx->fc->timestamp); /* modification time */
put_le64(pb, ctx->fc->timestamp); /* creation time */
put_le16(pb, 0); /* reserved */
put_le16(pb, 0); /* reserved */
put_le16(pb, ctx->audio_tracks);
put_le16(pb, 0); /* timecode track count */
put_le16(pb, 0); /* reserved */
put_le16(pb, ctx->mpeg_tracks);
return 48;
}
示例10: gxf_write_umf_material_description
static int gxf_write_umf_material_description(AVFormatContext *s)
{
GXFContext *gxf = s->priv_data;
ByteIOContext *pb = s->pb;
int timecode_base = gxf->time_base.den == 60000 ? 60 : 50;
// XXX drop frame
uint32_t timecode =
gxf->nb_fields / (timecode_base * 3600) % 24 << 24 | // hours
gxf->nb_fields / (timecode_base * 60) % 60 << 16 | // minutes
gxf->nb_fields / timecode_base % 60 << 8 | // seconds
gxf->nb_fields % timecode_base; // fields
put_le32(pb, gxf->flags);
put_le32(pb, gxf->nb_fields); /* length of the longest track */
put_le32(pb, gxf->nb_fields); /* length of the shortest track */
put_le32(pb, 0); /* mark in */
put_le32(pb, gxf->nb_fields); /* mark out */
put_le32(pb, 0); /* timecode mark in */
put_le32(pb, timecode); /* timecode mark out */
put_le64(pb, s->timestamp); /* modification time */
put_le64(pb, s->timestamp); /* creation time */
put_le16(pb, 0); /* reserved */
put_le16(pb, 0); /* reserved */
put_le16(pb, gxf->audio_tracks);
put_le16(pb, 1); /* timecode track count */
put_le16(pb, 0); /* reserved */
put_le16(pb, gxf->mpeg_tracks);
return 48;
}
示例11: asf_write_index
static int asf_write_index(AVFormatContext *s, ASFIndex *index, uint16_t max, uint32_t count)
{
ByteIOContext *pb = s->pb;
int i;
put_guid(pb, &ff_asf_simple_index_header);
put_le64(pb, 24 + 16 + 8 + 4 + 4 + (4 + 2)*count);
put_guid(pb, &ff_asf_my_guid);
put_le64(pb, ASF_INDEXED_INTERVAL);
put_le32(pb, max);
put_le32(pb, count);
for(i=0; i<count; i++) {
put_le32(pb, index[i].packet_number);
put_le16(pb, index[i].packet_count);
}
return 0;
}
示例12: end_tag
void end_tag(ByteIOContext *pb, offset_t start)
{
offset_t pos;
pos = url_ftell(pb);
url_fseek(pb, start - 4, SEEK_SET);
put_le32(pb, (uint32_t)(pos - start));
url_fseek(pb, pos, SEEK_SET);
}
示例13: avi_write_idx1
static int avi_write_idx1(AVFormatContext *s)
{
ByteIOContext *pb = &s->pb;
AVIContext *avi = s->priv_data;
offset_t idx_chunk;
int i;
char tag[5];
if (!url_is_streamed(pb)) {
AVIIentry* ie = 0, *tie;
int entry[MAX_STREAMS];
int empty, stream_id = -1;
idx_chunk = start_tag(pb, "idx1");
memset(&entry[0], 0, sizeof(entry));
do {
empty = 1;
for (i=0; i<s->nb_streams; i++) {
if (avi->indexes[i].entry <= entry[i])
continue;
tie = avi_get_ientry(&avi->indexes[i], entry[i]);
if (empty || tie->pos < ie->pos) {
ie = tie;
stream_id = i;
}
empty = 0;
}
if (!empty) {
avi_stream2fourcc(&tag[0], stream_id,
s->streams[stream_id]->codec->codec_type);
put_tag(pb, &tag[0]);
put_le32(pb, ie->flags);
put_le32(pb, ie->pos);
put_le32(pb, ie->len);
entry[stream_id]++;
}
} while (!empty);
end_tag(pb, idx_chunk);
avi_write_counters(s, avi->riff_id);
}
return 0;
}
示例14: sox_write_header
static int sox_write_header(AVFormatContext *s)
{
SoXContext *sox = s->priv_data;
ByteIOContext *pb = s->pb;
AVCodecContext *enc = s->streams[0]->codec;
AVMetadataTag *comment;
size_t comment_len = 0, comment_size;
comment = av_metadata_get(s->metadata, "comment", NULL, 0);
if (comment)
comment_len = strlen(comment->value);
comment_size = (comment_len + 7) & ~7;
sox->header_size = SOX_FIXED_HDR + comment_size;
if (enc->codec_id == CODEC_ID_PCM_S32LE) {
put_tag(pb, ".SoX");
put_le32(pb, sox->header_size);
put_le64(pb, 0); /* number of samples */
put_le64(pb, av_dbl2int(enc->sample_rate));
put_le32(pb, enc->channels);
put_le32(pb, comment_size);
} else if (enc->codec_id == CODEC_ID_PCM_S32BE) {
put_tag(pb, "XoS.");
put_be32(pb, sox->header_size);
put_be64(pb, 0); /* number of samples */
put_be64(pb, av_dbl2int(enc->sample_rate));
put_be32(pb, enc->channels);
put_be32(pb, comment_size);
} else {
av_log(s, AV_LOG_ERROR, "invalid codec; use pcm_s32le or pcm_s32be\n");
return -1;
}
if (comment_len)
put_buffer(pb, comment->value, comment_len);
for ( ; comment_size > comment_len; comment_len++)
put_byte(pb, 0);
put_flush_packet(pb);
return 0;
}
示例15: put_uuid_le
static inline void put_uuid_le(const bt_uuid_t *src, void *dst)
{
if (src->type == BT_UUID16)
put_le16(src->value.u16, dst);
else if (src->type == BT_UUID32)
put_le32(src->value.u32, dst);
else
/* Convert from 128-bit BE to LE */
bswap_128(&src->value.u128, dst);
}