本文整理汇总了C++中GF_LOG函数的典型用法代码示例。如果您正苦于以下问题:C++ GF_LOG函数的具体用法?C++ GF_LOG怎么用?C++ GF_LOG使用的例子?那么, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GF_LOG函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: ISOR_ServiceCommand
GF_Err ISOR_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com)
{
Double track_dur, media_dur;
ISOMChannel *ch;
ISOMReader *read;
if (!plug || !plug->priv || !com) return GF_SERVICE_ERROR;
read = (ISOMReader *) plug->priv;
if (com->command_type==GF_NET_SERVICE_INFO) {
u32 tag_len;
const char *tag;
if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_NAME, &tag, &tag_len)==GF_OK) com->info.name = tag;
if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_ARTIST, &tag, &tag_len)==GF_OK) com->info.artist = tag;
if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_ALBUM, &tag, &tag_len)==GF_OK) com->info.album = tag;
if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COMMENT, &tag, &tag_len)==GF_OK) com->info.comment = tag;
if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_TRACK, &tag, &tag_len)==GF_OK) {
com->info.track_info = (((tag[2]<<8)|tag[3]) << 16) | ((tag[4]<<8)|tag[5]);
}
if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COMPOSER, &tag, &tag_len)==GF_OK) com->info.composer = tag;
if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_WRITER, &tag, &tag_len)==GF_OK) com->info.writer = tag;
if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_GENRE, &tag, &tag_len)==GF_OK) {
if (tag[0]) {
com->info.genre = 0;
} else {
com->info.genre = (tag[0]<<8) | tag[1];
}
}
return GF_OK;
}
if (com->command_type==GF_NET_SERVICE_HAS_AUDIO) {
u32 i, count;
count = gf_isom_get_track_count(read->mov);
for (i=0; i<count; i++) {
if (gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_AUDIO) return GF_OK;
}
return GF_NOT_SUPPORTED;
}
if (!com->base.on_channel) return GF_NOT_SUPPORTED;
ch = isor_get_channel(read, com->base.on_channel);
if (!ch) return GF_STREAM_NOT_FOUND;
switch (com->command_type) {
case GF_NET_CHAN_SET_PADDING:
if (!ch->track) return GF_OK;
gf_isom_set_sample_padding(read->mov, ch->track, com->pad.padding_bytes);
return GF_OK;
case GF_NET_CHAN_SET_PULL:
ch->is_pulling = 1;
return GF_OK;
case GF_NET_CHAN_INTERACTIVE:
return GF_OK;
case GF_NET_CHAN_BUFFER:
com->buffer.max = com->buffer.min = 0;
return GF_OK;
case GF_NET_CHAN_DURATION:
if (!ch->track) {
com->duration.duration = 0;
return GF_OK;
}
ch->duration = gf_isom_get_track_duration(read->mov, ch->track);
track_dur = (Double) (s64) ch->duration;
track_dur /= read->time_scale;
if (gf_isom_get_edit_segment_count(read->mov, ch->track)) {
com->duration.duration = (Double) track_dur;
ch->duration = (u32) (track_dur * ch->time_scale);
} else {
/*some file indicate a wrong TrackDuration, get the longest*/
ch->duration = gf_isom_get_media_duration(read->mov, ch->track);
media_dur = (Double) (s64) ch->duration;
media_dur /= ch->time_scale;
com->duration.duration = MAX(track_dur, media_dur);
}
return GF_OK;
case GF_NET_CHAN_PLAY:
if (!ch->is_pulling) return GF_NOT_SUPPORTED;
assert(!ch->is_playing);
isor_reset_reader(ch);
ch->speed = com->play.speed;
ch->start = ch->end = 0;
if (com->play.speed>0) {
if (com->play.start_range>=0) {
ch->start = (u64) (s64) (com->play.start_range * ch->time_scale);
ch->start = check_round(ch, ch->start, com->play.start_range, 1);
}
if (com->play.end_range >= com->play.start_range) {
ch->end = (u64) (s64) (com->play.end_range*ch->time_scale);
ch->end = check_round(ch, ch->end, com->play.end_range, 0);
}
} else if (com->play.speed<0) {
if (com->play.end_range>=com->play.start_range) ch->start = (u64) (s64) (com->play.start_range * ch->time_scale);
if (com->play.end_range >= 0) ch->end = (u64) (s64) (com->play.end_range*ch->time_scale);
}
ch->is_playing = 1;
if (com->play.dash_segment_switch) ch->wait_for_segment_switch = 1;
GF_LOG(GF_LOG_DEBUG, GF_LOG_NETWORK, ("[IsoMedia] Starting channel playback "LLD" to "LLD" (%g to %g)\n", ch->start, ch->end, com->play.start_range, com->play.end_range));
return GF_OK;
case GF_NET_CHAN_STOP:
isor_reset_reader(ch);
return GF_OK;
//.........这里部分代码省略.........
示例2: gf_sm_load_init_isom
GF_Err gf_sm_load_init_isom(GF_SceneLoader *load)
{
u32 i;
GF_BIFSConfig *bc;
GF_ESD *esd;
GF_Err e;
char *scene_msg = "MPEG-4 BIFS Scene Parsing";
if (!load->isom) return GF_BAD_PARAM;
/*load IOD*/
load->ctx->root_od = (GF_ObjectDescriptor *) gf_isom_get_root_od(load->isom);
if (!load->ctx->root_od) {
e = gf_isom_last_error(load->isom);
if (e) return e;
} else if ((load->ctx->root_od->tag != GF_ODF_OD_TAG) && (load->ctx->root_od->tag != GF_ODF_IOD_TAG)) {
gf_odf_desc_del((GF_Descriptor *) load->ctx->root_od);
load->ctx->root_od = NULL;
}
esd = NULL;
/*get root scene stream*/
for (i=0; i<gf_isom_get_track_count(load->isom); i++) {
u32 type = gf_isom_get_media_type(load->isom, i+1);
if (type != GF_ISOM_MEDIA_SCENE) continue;
if (! gf_isom_is_track_in_root_od(load->isom, i+1) ) continue;
esd = gf_isom_get_esd(load->isom, i+1, 1);
if (esd && esd->URLString) {
gf_odf_desc_del((GF_Descriptor *)esd);
esd = NULL;
continue;
}
/*make sure we load the root BIFS stream first*/
if (esd && esd->dependsOnESID && (esd->dependsOnESID!=esd->ESID) ) {
u32 track = gf_isom_get_track_by_id(load->isom, esd->dependsOnESID);
if (gf_isom_get_media_type(load->isom, track) != GF_ISOM_MEDIA_OD) {
gf_odf_desc_del((GF_Descriptor *)esd);
esd = NULL;
continue;
}
}
if (esd->decoderConfig->objectTypeIndication==0x09) scene_msg = "MPEG-4 LASeR Scene Parsing";
break;
}
if (!esd) return GF_OK;
e = GF_OK;
GF_LOG(GF_LOG_INFO, GF_LOG_PARSER, ("%s\n", scene_msg));
/*BIFS: update size & pixel metrics info*/
if (esd->decoderConfig->objectTypeIndication<=2) {
bc = gf_odf_get_bifs_config(esd->decoderConfig->decoderSpecificInfo, esd->decoderConfig->objectTypeIndication);
if (!bc->elementaryMasks && bc->pixelWidth && bc->pixelHeight) {
load->ctx->scene_width = bc->pixelWidth;
load->ctx->scene_height = bc->pixelHeight;
load->ctx->is_pixel_metrics = bc->pixelMetrics;
}
gf_odf_desc_del((GF_Descriptor *) bc);
/*note we don't load the first BIFS AU to avoid storing the BIFS decoder, needed to properly handle quantization*/
}
/*LASeR*/
else if (esd->decoderConfig->objectTypeIndication==0x09) {
load->ctx->is_pixel_metrics = 1;
}
gf_odf_desc_del((GF_Descriptor *) esd);
esd = NULL;
load->process = gf_sm_load_run_isom;
load->done = gf_sm_load_done_isom;
load->suspend = gf_sm_isom_suspend;
return GF_OK;
}
示例3: OSVC_AttachStream
static GF_Err OSVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd)
{
u32 i, count;
s32 res;
OPENSVCFRAME Picture;
int Layer[4];
OSVCDec *ctx = (OSVCDec*) ifcg->privateStack;
/*todo: we should check base layer of this stream is indeed our base layer*/
if (!ctx->ES_ID) {
ctx->ES_ID = esd->ESID;
ctx->width = ctx->height = ctx->out_size = 0;
if (!esd->dependsOnESID) ctx->baseES_ID = esd->ESID;
}
if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
if (!cfg) return GF_NON_COMPLIANT_BITSTREAM;
if (!esd->dependsOnESID) {
ctx->nalu_size_length = cfg->nal_unit_size;
if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR;
}
/*decode all NALUs*/
count = gf_list_count(cfg->sequenceParameterSets);
SetCommandLayer(Layer, 255, 0, &res, 0);//bufindex can be reset without pb
for (i=0; i<count; i++) {
u32 w=0, h=0, sid;
s32 par_n=0, par_d=0;
GF_AVCConfigSlot *slc = (GF_AVCConfigSlot*)gf_list_get(cfg->sequenceParameterSets, i);
#ifndef GPAC_DISABLE_AV_PARSERS
gf_avc_get_sps_info(slc->data, slc->size, &sid, &w, &h, &par_n, &par_d);
#endif
/*by default use the base layer*/
if (!i) {
if ((ctx->width<w) || (ctx->height<h)) {
ctx->width = w;
ctx->height = h;
if ( ((s32)par_n>0) && ((s32)par_d>0) )
ctx->pixel_ar = (par_n<<16) || par_d;
}
}
res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer);
if (res<0) {
GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding SPS %d\n", res));
}
GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] Attach: SPS id=\"%d\" code=\"%d\" size=\"%d\"\n", slc->id, slc->data[0] & 0x1F, slc->size));
}
count = gf_list_count(cfg->pictureParameterSets);
for (i=0; i<count; i++) {
u32 sps_id, pps_id;
GF_AVCConfigSlot *slc = (GF_AVCConfigSlot*)gf_list_get(cfg->pictureParameterSets, i);
gf_avc_get_pps_info(slc->data, slc->size, &pps_id, &sps_id);
res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer);
if (res<0) {
GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d\n", res));
}
GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] Attach: PPS id=\"%d\" code=\"%d\" size=\"%d\" sps_id=\"%d\"\n", pps_id, slc->data[0] & 0x1F, slc->size, sps_id));
}
ctx->state_found = GF_TRUE;
gf_odf_avc_cfg_del(cfg);
} else {
if (ctx->nalu_size_length) {
return GF_NOT_SUPPORTED;
}
ctx->nalu_size_length = 0;
if (!esd->dependsOnESID) {
if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR;
}
ctx->pixel_ar = (1<<16) || 1;
}
ctx->stride = ctx->width + 32;
ctx->CurrentDqId = ctx->MaxDqId = 0;
ctx->out_size = ctx->stride * ctx->height * 3 / 2;
return GF_OK;
}
示例4: gf_enum_directory
//.........这里部分代码省略.........
case '/':
case '\\':
swprintf(path, MAX_PATH, L"%s*", w_dir);
break;
default:
swprintf(path, MAX_PATH, L"%s%c*", w_dir, GF_PATH_SEPARATOR);
break;
}
{
const char* tmpfilter = filter;
gf_utf8_mbstowcs(w_filter, sizeof(w_filter), &tmpfilter);
}
#else
strcpy(path, dir);
if (path[strlen(path)-1] != '/') strcat(path, "/");
#endif
#ifdef WIN32
SearchH= FindFirstFileW(path, &FindData);
if (SearchH == INVALID_HANDLE_VALUE) return GF_IO_ERR;
#if defined (_WIN32_WCE)
_path[strlen(_path)-1] = 0;
#else
path[wcslen(path)-1] = 0;
#endif
while (SearchH != INVALID_HANDLE_VALUE) {
#else
the_dir = opendir(path);
if (the_dir == NULL) {
GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] Cannot open directory %s for enumeration: %d\n", path, errno));
return GF_IO_ERR;
}
the_file = readdir(the_dir);
while (the_file) {
#endif
memset(&file_info, 0, sizeof(GF_FileEnumInfo) );
#if defined (_WIN32_WCE)
if (!wcscmp(FindData.cFileName, _T(".") )) goto next;
if (!wcscmp(FindData.cFileName, _T("..") )) goto next;
#elif defined(WIN32)
if (!wcscmp(FindData.cFileName, L".")) goto next;
if (!wcscmp(FindData.cFileName, L"..")) goto next;
#else
if (!strcmp(the_file->d_name, "..")) goto next;
if (the_file->d_name[0] == '.') goto next;
#endif
#ifdef WIN32
file_info.directory = (FindData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) ? GF_TRUE : GF_FALSE;
if (!enum_directory && file_info.directory) goto next;
if (enum_directory && !file_info.directory) goto next;
#endif
if (filter) {
#if defined (_WIN32_WCE)
short ext[30];
short *sep = wcsrchr(FindData.cFileName, (wchar_t) '.');
if (!sep) goto next;
示例5: svg_drawable_pick
void svg_drawable_pick(GF_Node *node, Drawable *drawable, GF_TraverseState *tr_state)
{
DrawAspect2D asp;
GF_Matrix2D inv_2d;
Fixed x, y;
Bool picked = 0;
GF_Compositor *compositor = tr_state->visual->compositor;
SVGPropertiesPointers backup_props;
GF_Matrix2D backup_matrix;
GF_Matrix mx_3d;
SVGAllAttributes all_atts;
if (!drawable->path) return;
gf_svg_flatten_attributes((SVG_Element *)node, &all_atts);
memcpy(&backup_props, tr_state->svg_props, sizeof(SVGPropertiesPointers));
gf_svg_apply_inheritance(&all_atts, tr_state->svg_props);
if (compositor_svg_is_display_off(tr_state->svg_props)) return;
compositor_svg_apply_local_transformation(tr_state, &all_atts, &backup_matrix, &mx_3d);
memset(&asp, 0, sizeof(DrawAspect2D));
drawable_get_aspect_2d_svg(node, &asp, tr_state);
#ifndef GPAC_DISABLE_3D
if (tr_state->visual->type_3d) {
svg_drawable_3d_pick(drawable, tr_state, &asp);
compositor_svg_restore_parent_transformation(tr_state, &backup_matrix, &mx_3d);
memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers));
return;
}
#endif
gf_mx2d_copy(inv_2d, tr_state->transform);
gf_mx2d_inverse(&inv_2d);
x = tr_state->ray.orig.x;
y = tr_state->ray.orig.y;
gf_mx2d_apply_coords(&inv_2d, &x, &y);
picked = svg_drawable_is_over(drawable, x, y, &asp, tr_state, NULL);
if (picked) {
u32 count, i;
compositor->hit_local_point.x = x;
compositor->hit_local_point.y = y;
compositor->hit_local_point.z = 0;
gf_mx_from_mx2d(&compositor->hit_world_to_local, &tr_state->transform);
gf_mx_from_mx2d(&compositor->hit_local_to_world, &inv_2d);
compositor->hit_node = drawable->node;
compositor->hit_use_dom_events = 1;
compositor->hit_normal.x = compositor->hit_normal.y = 0; compositor->hit_normal.z = FIX_ONE;
compositor->hit_texcoords.x = gf_divfix(x, drawable->path->bbox.width) + FIX_ONE/2;
compositor->hit_texcoords.y = gf_divfix(y, drawable->path->bbox.height) + FIX_ONE/2;
svg_clone_use_stack(compositor, tr_state);
/*not use in SVG patterns*/
compositor->hit_appear = NULL;
/*also stack any VRML sensors present at the current level. If the event is not catched
by a listener in the SVG tree, the event will be forwarded to the VRML tree*/
gf_list_reset(tr_state->visual->compositor->sensors);
count = gf_list_count(tr_state->vrml_sensors);
for (i=0; i<count; i++) {
gf_list_add(tr_state->visual->compositor->sensors, gf_list_get(tr_state->vrml_sensors, i));
}
GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SVG Picking] node %s is under mouse - hit %g %g 0\n", gf_node_get_log_name(drawable->node), FIX2FLT(x), FIX2FLT(y)));
}
compositor_svg_restore_parent_transformation(tr_state, &backup_matrix, &mx_3d);
memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers));
}
示例6: AC3_ProcessData
static GF_Err AC3_ProcessData(GF_MediaDecoder *ifcg,
char *inBuffer, u32 inBufferLength,
u16 ES_ID, u32 *CTS,
char *outBuffer, u32 *outBufferLength,
u8 PaddingBits, u32 mmlevel)
{
short *out_samples;
int i, len, bit_rate;
sample_t level;
A52CTX();
/*check not using scalabilty*/
if (ctx->ES_ID != ES_ID) return GF_BAD_PARAM;
/*if late or seeking don't decode*/
switch (mmlevel) {
case GF_CODEC_LEVEL_SEEK:
case GF_CODEC_LEVEL_DROP:
*outBufferLength = 0;
return GF_OK;
default:
break;
}
if (ctx->out_size > *outBufferLength) {
*outBufferLength = ctx->out_size;
return GF_BUFFER_TOO_SMALL;
}
GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[A52] Decoding AU\n"));
len = a52_syncinfo(inBuffer, &ctx->flags, &ctx->sample_rate, &bit_rate);
if (!len) return GF_NON_COMPLIANT_BITSTREAM;
/*init decoder*/
if (!ctx->out_size) {
ctx->num_channels = ac3_channels[ctx->flags & 7];
if (ctx->flags & A52_LFE) ctx->num_channels++;
ctx->flags |= A52_ADJUST_LEVEL;
ctx->out_size = ctx->num_channels * sizeof(short) * 1536;
*outBufferLength = ctx->out_size;
return GF_BUFFER_TOO_SMALL;
}
level = 1;
if ( a52_frame(ctx->codec, inBuffer, &ctx->flags, &level, 384)) {
GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[A52] Error decoding AU\n" ));
*outBufferLength = 0;
return GF_NON_COMPLIANT_BITSTREAM;
}
out_samples = (short*)outBuffer;
for (i=0; i<6; i++) {
if (a52_block(ctx->codec))
return GF_NON_COMPLIANT_BITSTREAM;
float_to_int(ctx->samples, out_samples + i * 256 * ctx->num_channels, ctx->num_channels);
}
*outBufferLength = 6 * ctx->num_channels * 256 * sizeof(short);
return GF_OK;
}
示例7: ts_amux_new
GF_AbstractTSMuxer * ts_amux_new(GF_AVRedirect * avr, u32 videoBitrateInBitsPerSec, u32 width, u32 height, u32 audioBitRateInBitsPerSec) {
GF_AbstractTSMuxer * ts = gf_malloc( sizeof(GF_AbstractTSMuxer));
memset( ts, 0, sizeof( GF_AbstractTSMuxer));
ts->oc = avformat_alloc_context();
ts->destination = avr->destination;
av_register_all();
ts->oc->oformat = GUESS_FORMAT(NULL, avr->destination, NULL);
if (!ts->oc->oformat)
ts->oc->oformat = GUESS_FORMAT("mpegts", NULL, NULL);
assert( ts->oc->oformat);
#if REDIRECT_AV_AUDIO_ENABLED
ts->audio_st = av_new_stream(ts->oc, avr->audioCodec->id);
{
AVCodecContext * c = ts->audio_st->codec;
c->codec_id = avr->audioCodec->id;
c->codec_type = AVMEDIA_TYPE_AUDIO;
/* put sample parameters */
c->sample_fmt = SAMPLE_FMT_S16;
c->bit_rate = audioBitRateInBitsPerSec;
c->sample_rate = avr->audioSampleRate;
c->channels = 2;
c->time_base.num = 1;
c->time_base.den = 1000;
// some formats want stream headers to be separate
if (ts->oc->oformat->flags & AVFMT_GLOBALHEADER)
c->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
#endif
ts->video_st = av_new_stream(ts->oc, avr->videoCodec->id);
{
AVCodecContext * c = ts->video_st->codec;
c->codec_id = avr->videoCodec->id;
c->codec_type = AVMEDIA_TYPE_VIDEO;
/* put sample parameters */
c->bit_rate = videoBitrateInBitsPerSec;
/* resolution must be a multiple of two */
c->width = width;
c->height = height;
/* time base: this is the fundamental unit of time (in seconds) in terms
of which frame timestamps are represented. for fixed-fps content,
timebase should be 1/framerate and timestamp increments should be
identically 1. */
c->time_base.den = STREAM_FRAME_RATE;
c->time_base.num = 1;
c->gop_size = 12; /* emit one intra frame every twelve frames at most */
c->pix_fmt = STREAM_PIX_FMT;
if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
/* just for testing, we also add B frames */
c->max_b_frames = 2;
}
if (c->codec_id == CODEC_ID_MPEG1VIDEO) {
/* Needed to avoid using macroblocks in which some coeffs overflow.
This does not happen with normal video, it just happens here as
the motion of the chroma plane does not match the luma plane. */
c->mb_decision=2;
}
// some formats want stream headers to be separate
if (ts->oc->oformat->flags & AVFMT_GLOBALHEADER)
c->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
//av_set_pts_info(ts->audio_st, 33, 1, audioBitRateInBitsPerSec);
#ifndef AVIO_FLAG_WRITE
/* set the output parameters (must be done even if no
parameters). */
if (av_set_parameters(ts->oc, NULL) < 0) {
fprintf(stderr, "Invalid output format parameters\n");
return NULL;
}
#endif
dump_format(ts->oc, 0, avr->destination, 1);
GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[AVRedirect] DUMPING to %s...\n", ts->destination));
#if (LIBAVCODEC_VERSION_MAJOR<55)
if (avcodec_open(ts->video_st->codec, avr->videoCodec) < 0) {
#else
if (avcodec_open2(ts->video_st->codec, avr->videoCodec, NULL) < 0) {
#endif
GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] failed to open video codec\n"));
return NULL;
}
#if REDIRECT_AV_AUDIO_ENABLED
#if (LIBAVCODEC_VERSION_MAJOR<55)
if (avcodec_open(ts->audio_st->codec, avr->audioCodec) < 0) {
#else
if (avcodec_open2(ts->audio_st->codec, avr->audioCodec, NULL) < 0) {
#endif
GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] failed to open audio codec\n"));
return NULL;
}
ts->audioMx = gf_mx_new("TS_AudioMx");
#endif
ts->videoMx = gf_mx_new("TS_VideoMx");
ts->tsEncodingThread = gf_th_new("ts_interleave_thread_run");
ts->encode = 1;
ts->audioPackets = NULL;
//.........这里部分代码省略.........
示例8: dc_video_decoder_open
int dc_video_decoder_open(VideoInputFile *video_input_file, VideoDataConf *video_data_conf, int mode, int no_loop, int nb_consumers)
{
s32 ret;
u32 i;
s32 open_res;
AVInputFormat *in_fmt = NULL;
AVDictionary *options = NULL;
AVCodecContext *codec_ctx;
AVCodec *codec;
memset(video_input_file, 0, sizeof(VideoInputFile));
if (video_data_conf->width > 0 && video_data_conf->height > 0) {
char vres[16];
snprintf(vres, sizeof(vres), "%dx%d", video_data_conf->width, video_data_conf->height);
ret = av_dict_set(&options, "video_size", vres, 0);
if (ret < 0) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set video size %s.\n", vres));
return -1;
}
}
if (video_data_conf->framerate > 0) {
char vfr[16];
snprintf(vfr, sizeof(vfr), "%d", video_data_conf->framerate);
ret = av_dict_set(&options, "framerate", vfr, 0);
if (ret < 0) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set video framerate %s.\n", vfr));
return -1;
}
}
if (strlen(video_data_conf->pixel_format)) {
ret = av_dict_set(&options, "pixel_format", video_data_conf->pixel_format, 0);
if (ret < 0) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set pixel format %s.\n", video_data_conf->pixel_format));
return -1;
}
}
#ifndef WIN32
if (strcmp(video_data_conf->v4l2f, "") != 0) {
ret = av_dict_set(&options, "input_format", video_data_conf->v4l2f, 0);
if (ret < 0) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set input format %s.\n", video_data_conf->v4l2f));
return -1;
}
}
#endif
if (video_data_conf->format && strcmp(video_data_conf->format, "") != 0) {
in_fmt = av_find_input_format(video_data_conf->format);
if (in_fmt == NULL) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot find the format %s.\n", video_data_conf->format));
return -1;
}
}
video_input_file->av_fmt_ctx = NULL;
if (video_data_conf->demux_buffer_size) {
char szBufSize[100];
sprintf(szBufSize, "%d", video_data_conf->demux_buffer_size);
ret = av_dict_set(&options, "buffer_size", szBufSize, 0);
if (ret < 0) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Could not set demuxer's input buffer size.\n"));
return -1;
}
}
/* Open video */
open_res = avformat_open_input(&video_input_file->av_fmt_ctx, video_data_conf->filename, in_fmt, options ? &options : NULL);
if ( (open_res < 0) && !stricmp(video_data_conf->filename, "screen-capture-recorder") ) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Buggy screen capture input (open failed with code %d), retrying without specifying resolution\n", open_res));
av_dict_set(&options, "video_size", NULL, 0);
open_res = avformat_open_input(&video_input_file->av_fmt_ctx, video_data_conf->filename, in_fmt, options ? &options : NULL);
}
if ( (open_res < 0) && options) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Error %d opening input - retrying without options\n", open_res));
av_dict_free(&options);
open_res = avformat_open_input(&video_input_file->av_fmt_ctx, video_data_conf->filename, in_fmt, NULL);
}
if (open_res < 0) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot open file %s\n", video_data_conf->filename));
return -1;
}
/* Retrieve stream information */
if (avformat_find_stream_info(video_input_file->av_fmt_ctx, NULL) < 0) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot find stream information\n"));
return -1;
}
av_dump_format(video_input_file->av_fmt_ctx, 0, video_data_conf->filename, 0);
/* Find the first video stream */
video_input_file->vstream_idx = -1;
for (i = 0; i < video_input_file->av_fmt_ctx->nb_streams; i++) {
//.........这里部分代码省略.........
示例9: gf_isom_add_meta_item_extended
GF_Err gf_isom_add_meta_item_extended(GF_ISOFile *file, Bool root_meta, u32 track_num, Bool self_reference, char *resource_path,
const char *item_name, u32 item_id, u32 item_type, const char *mime_type, const char *content_encoding,
GF_ImageItemProperties *image_props,
const char *URL, const char *URN,
char *data, u32 data_len, GF_List *item_extent_refs)
{
u32 i;
GF_Err e;
GF_ItemLocationEntry *location_entry;
GF_ItemInfoEntryBox *infe;
GF_MetaBox *meta;
u32 lastItemID = 0;
if (!self_reference && !resource_path && !data) return GF_BAD_PARAM;
e = CanAccessMovie(file, GF_ISOM_OPEN_WRITE);
if (e) return e;
meta = gf_isom_get_meta(file, root_meta, track_num);
if (!meta) {
GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("Trying to add item, but missing meta box"));
return GF_BAD_PARAM;
}
e = FlushCaptureMode(file);
if (e) return e;
/*check file exists */
if (!URN && !URL && !self_reference && !data) {
FILE *src = gf_fopen(resource_path, "rb");
if (!src) return GF_URL_ERROR;
gf_fclose(src);
}
if (meta->item_infos) {
u32 item_count = gf_list_count(meta->item_infos->item_infos);
for (i = 0; i < item_count; i++) {
GF_ItemInfoEntryBox *e = (GF_ItemInfoEntryBox *)gf_list_get(meta->item_infos->item_infos, i);
if (e->item_ID > lastItemID) lastItemID = e->item_ID;
if (item_id == e->item_ID) {
GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[IsoMedia] Item with id %d already exists, ignoring id\n", item_id));
item_id = 0;
}
}
}
infe = (GF_ItemInfoEntryBox *)infe_New();
if (item_id) {
infe->item_ID = item_id;
}
else {
infe->item_ID = ++lastItemID;
}
/*get relative name*/
if (item_name) {
infe->item_name = gf_strdup(item_name);
}
else if (resource_path) {
if (strrchr(resource_path, GF_PATH_SEPARATOR)) {
infe->item_name = gf_strdup(strrchr(resource_path, GF_PATH_SEPARATOR) + 1);
}
else {
infe->item_name = gf_strdup(resource_path);
}
}
infe->item_type = item_type;
if (mime_type) {
infe->content_type = gf_strdup(mime_type);
}
else {
infe->content_type = gf_strdup("application/octet-stream");
}
if (content_encoding) infe->content_encoding = gf_strdup(content_encoding);
/*Creation of the ItemLocation */
location_entry = (GF_ItemLocationEntry*)gf_malloc(sizeof(GF_ItemLocationEntry));
if (!location_entry) {
gf_isom_box_del((GF_Box *)infe);
return GF_OUT_OF_MEM;
}
memset(location_entry, 0, sizeof(GF_ItemLocationEntry));
location_entry->extent_entries = gf_list_new();
/*Creates an mdat if it does not exist*/
if (!file->mdat) {
file->mdat = (GF_MediaDataBox *)mdat_New();
gf_list_add(file->TopBoxes, file->mdat);
}
/*Creation an ItemLocation Box if it does not exist*/
if (!meta->item_locations) meta->item_locations = (GF_ItemLocationBox *)iloc_New();
gf_list_add(meta->item_locations->location_entries, location_entry);
location_entry->item_ID = infe->item_ID;
if (!meta->item_infos) meta->item_infos = (GF_ItemInfoBox *)iinf_New();
e = gf_list_add(meta->item_infos->item_infos, infe);
if (e) return e;
if (image_props) {
//.........这里部分代码省略.........
示例10: dc_video_decoder_read
int dc_video_decoder_read(VideoInputFile *video_input_file, VideoInputData *video_input_data, int source_number, int use_source_timing, int is_live_capture, const int *exit_signal_addr)
{
#ifdef DASHCAST_DEBUG_TIME_
struct timeval start, end;
long elapsed_time;
#endif
AVPacket packet;
int ret, got_frame, already_locked = 0;
AVCodecContext *codec_ctx;
VideoDataNode *video_data_node;
/* Get a pointer to the codec context for the video stream */
codec_ctx = video_input_file->av_fmt_ctx->streams[video_input_file->vstream_idx]->codec;
/* Read frames */
while (1) {
#ifdef DASHCAST_DEBUG_TIME_
gf_gettimeofday(&start, NULL);
#endif
memset(&packet, 0, sizeof(AVPacket));
ret = av_read_frame(video_input_file->av_fmt_ctx, &packet);
#ifdef DASHCAST_DEBUG_TIME_
gf_gettimeofday(&end, NULL);
elapsed_time = (end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec);
fprintf(stdout, "fps: %f\n", 1000000.0/elapsed_time);
#endif
/* If we demux for the audio thread, send the packet to the audio */
if (video_input_file->av_fmt_ctx_ref_cnt && ((packet.stream_index != video_input_file->vstream_idx) || (ret == AVERROR_EOF))) {
AVPacket *packet_copy = NULL;
if (ret != AVERROR_EOF) {
GF_SAFEALLOC(packet_copy, AVPacket);
memcpy(packet_copy, &packet, sizeof(AVPacket));
}
assert(video_input_file->av_pkt_list);
gf_mx_p(video_input_file->av_pkt_list_mutex);
gf_list_add(video_input_file->av_pkt_list, packet_copy);
gf_mx_v(video_input_file->av_pkt_list_mutex);
if (ret != AVERROR_EOF) {
continue;
}
}
if (ret == AVERROR_EOF) {
if (video_input_file->mode == LIVE_MEDIA && video_input_file->no_loop == 0) {
av_seek_frame(video_input_file->av_fmt_ctx, video_input_file->vstream_idx, 0, 0);
av_free_packet(&packet);
continue;
}
dc_producer_lock(&video_input_data->producer, &video_input_data->circular_buf);
dc_producer_unlock_previous(&video_input_data->producer, &video_input_data->circular_buf);
video_data_node = (VideoDataNode *) dc_producer_produce(&video_input_data->producer, &video_input_data->circular_buf);
video_data_node->source_number = source_number;
/* Flush decoder */
memset(&packet, 0, sizeof(AVPacket));
#ifndef FF_API_AVFRAME_LAVC
avcodec_get_frame_defaults(video_data_node->vframe);
#else
av_frame_unref(video_data_node->vframe);
#endif
avcodec_decode_video2(codec_ctx, video_data_node->vframe, &got_frame, &packet);
if (got_frame) {
dc_producer_advance(&video_input_data->producer, &video_input_data->circular_buf);
return 0;
}
dc_producer_end_signal(&video_input_data->producer, &video_input_data->circular_buf);
dc_producer_unlock(&video_input_data->producer, &video_input_data->circular_buf);
return -2;
}
else if (ret < 0)
{
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot read video frame.\n"));
continue;
}
/* Is this a packet from the video stream? */
if (packet.stream_index == video_input_file->vstream_idx) {
u32 nb_retry = 10;
while (!already_locked) {
if (dc_producer_lock(&video_input_data->producer, &video_input_data->circular_buf) < 0) {
if (!nb_retry) break;
gf_sleep(10);
nb_retry--;
continue;
}
dc_producer_unlock_previous(&video_input_data->producer, &video_input_data->circular_buf);
already_locked = 1;
}
if (!already_locked) {
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[dashcast] Live system dropped a video frame\n"));
continue;
}
video_data_node = (VideoDataNode *) dc_producer_produce(&video_input_data->producer, &video_input_data->circular_buf);
video_data_node->source_number = source_number;
//.........这里部分代码省略.........
示例11: gf_isom_extract_meta_item_extended
GF_EXPORT
GF_Err gf_isom_extract_meta_item_extended(GF_ISOFile *file, Bool root_meta, u32 track_num, u32 item_id, const char *dump_file_name, char **out_data, u32 *out_size, const char **out_mime)
{
GF_BitStream *item_bs;
char szPath[1024];
GF_ItemExtentEntry *extent_entry;
FILE *resource = NULL;
u32 i, count;
GF_ItemLocationEntry *location_entry;
u32 item_num;
char *item_name = NULL;
GF_MetaBox *meta = gf_isom_get_meta(file, root_meta, track_num);
if (!meta || !meta->item_infos || !meta->item_locations) return GF_BAD_PARAM;
if (out_mime) *out_mime = NULL;
item_num = gf_isom_get_meta_item_by_id(file, root_meta, track_num, item_id);
if (item_num) {
GF_ItemInfoEntryBox *item_entry = (GF_ItemInfoEntryBox *)gf_list_get(meta->item_infos->item_infos, item_num - 1);
item_name = item_entry->item_name;
if (out_mime) *out_mime = item_entry->content_type;
}
location_entry = NULL;
count = gf_list_count(meta->item_locations->location_entries);
for (i = 0; i<count; i++) {
location_entry = (GF_ItemLocationEntry *)gf_list_get(meta->item_locations->location_entries, i);
if (location_entry->item_ID == item_id) break;
location_entry = NULL;
}
if (!location_entry) return GF_BAD_PARAM;
/*FIXME*/
if (location_entry->data_reference_index) {
char *item_url = NULL, *item_urn = NULL;
GF_Box *a = (GF_Box *)gf_list_get(meta->file_locations->dref->other_boxes, location_entry->data_reference_index - 1);
if (a->type == GF_ISOM_BOX_TYPE_URL) {
item_url = ((GF_DataEntryURLBox*)a)->location;
}
else if (a->type == GF_ISOM_BOX_TYPE_URN) {
item_url = ((GF_DataEntryURNBox*)a)->location;
item_urn = ((GF_DataEntryURNBox*)a)->nameURN;
}
GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[IsoMedia] Item already outside the ISO file at URL: %s, URN: %s\n", (item_url ? item_url : "N/A"), (item_urn ? item_urn : "N/A")));
return GF_OK;
}
/*don't extract self-reference item*/
count = gf_list_count(location_entry->extent_entries);
if (!location_entry->base_offset && (count == 1)) {
extent_entry = (GF_ItemExtentEntry *)gf_list_get(location_entry->extent_entries, 0);
if (!extent_entry->extent_length
#ifndef GPAC_DISABLE_ISOM_WRITE
&& !extent_entry->original_extent_offset
#endif
) return GF_BAD_PARAM;
}
item_bs = NULL;
if (out_data) {
item_bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
}
else if (dump_file_name) {
strcpy(szPath, dump_file_name);
resource = gf_fopen(szPath, "wb");
item_bs = gf_bs_from_file(resource, GF_BITSTREAM_WRITE);
}
else {
if (item_name) strcpy(szPath, item_name);
else sprintf(szPath, "item_id%02d", item_id);
resource = gf_fopen(szPath, "wb");
item_bs = gf_bs_from_file(resource, GF_BITSTREAM_WRITE);
}
for (i = 0; i<count; i++) {
char buf_cache[4096];
u64 remain;
GF_ItemExtentEntry *extent_entry = (GF_ItemExtentEntry *)gf_list_get(location_entry->extent_entries, i);
gf_bs_seek(file->movieFileMap->bs, location_entry->base_offset + extent_entry->extent_offset);
remain = extent_entry->extent_length;
while (remain) {
u32 cache_size = (remain>4096) ? 4096 : (u32)remain;
gf_bs_read_data(file->movieFileMap->bs, buf_cache, cache_size);
gf_bs_write_data(item_bs, buf_cache, cache_size);
remain -= cache_size;
}
}
if (out_data) {
gf_bs_get_content(item_bs, out_data, out_size);
}
if (resource) {
gf_fclose(resource);
}
gf_bs_del(item_bs);
return GF_OK;
}
示例12: gf_bifs_enc_sf_field
GF_Err gf_bifs_enc_sf_field(GF_BifsEncoder *codec, GF_BitStream *bs, GF_Node *node, GF_FieldInfo *field)
{
GF_Err e;
if (node) {
e = gf_bifs_enc_quant_field(codec, bs, node, field);
if (e != GF_EOS) return e;
}
switch (field->fieldType) {
case GF_SG_VRML_SFBOOL:
GF_BIFS_WRITE_INT(codec, bs, * ((SFBool *)field->far_ptr), 1, "SFBool", NULL);
break;
case GF_SG_VRML_SFCOLOR:
BE_WriteSFFloat(codec, ((SFColor *)field->far_ptr)->red, bs, "color.red");
BE_WriteSFFloat(codec, ((SFColor *)field->far_ptr)->green, bs, "color.green");
BE_WriteSFFloat(codec, ((SFColor *)field->far_ptr)->blue, bs, "color.blue");
break;
case GF_SG_VRML_SFFLOAT:
BE_WriteSFFloat(codec, * ((SFFloat *)field->far_ptr), bs, NULL);
break;
case GF_SG_VRML_SFINT32:
GF_BIFS_WRITE_INT(codec, bs, * ((SFInt32 *)field->far_ptr), 32, "SFInt32", NULL);
break;
case GF_SG_VRML_SFROTATION:
BE_WriteSFFloat(codec, ((SFRotation *)field->far_ptr)->x, bs, "rot.x");
BE_WriteSFFloat(codec, ((SFRotation *)field->far_ptr)->y, bs, "rot.y");
BE_WriteSFFloat(codec, ((SFRotation *)field->far_ptr)->z, bs, "rot.z");
BE_WriteSFFloat(codec, ((SFRotation *)field->far_ptr)->q, bs, "rot.theta");
break;
case GF_SG_VRML_SFSTRING:
if (node && (node->sgprivate->tag==TAG_MPEG4_CacheTexture) && (field->fieldIndex<=2)) {
u32 size, val;
char buf[4096];
FILE *f = gf_f64_open(((SFString*)field->far_ptr)->buffer, "rb");
if (!f) return GF_URL_ERROR;
gf_f64_seek(f, 0, SEEK_END);
size = (u32) gf_f64_tell(f);
val = gf_get_bit_size(size);
GF_BIFS_WRITE_INT(codec, bs, val, 5, "nbBits", NULL);
GF_BIFS_WRITE_INT(codec, bs, size, val, "length", NULL);
gf_f64_seek(f, 0, SEEK_SET);
while (size) {
u32 read = fread(buf, 1, 4096, f);
gf_bs_write_data(bs, buf, read);
size -= read;
}
} else {
u32 i;
char *str = (char *) ((SFString*)field->far_ptr)->buffer;
u32 len = str ? strlen(str) : 0;
u32 val = gf_get_bit_size(len);
GF_BIFS_WRITE_INT(codec, bs, val, 5, "nbBits", NULL);
GF_BIFS_WRITE_INT(codec, bs, len, val, "length", NULL);
for (i=0; i<len; i++) gf_bs_write_int(bs, str[i], 8);
GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] string\t\t%d\t\t%s\n", 8*len, str) );
}
break;
case GF_SG_VRML_SFTIME:
gf_bs_write_double(bs, *((SFTime *)field->far_ptr));
GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] SFTime\t\t%d\t\t%g\n", 64, *((SFTime *)field->far_ptr)));
break;
case GF_SG_VRML_SFVEC2F:
BE_WriteSFFloat(codec, ((SFVec2f *)field->far_ptr)->x, bs, "vec2f.x");
BE_WriteSFFloat(codec, ((SFVec2f *)field->far_ptr)->y, bs, "vec2f.y");
break;
case GF_SG_VRML_SFVEC3F:
BE_WriteSFFloat(codec, ((SFVec3f *)field->far_ptr)->x, bs, "vec3f.x");
BE_WriteSFFloat(codec, ((SFVec3f *)field->far_ptr)->y, bs, "vec3f.y");
BE_WriteSFFloat(codec, ((SFVec3f *)field->far_ptr)->z, bs, "vec3f.z");
break;
case GF_SG_VRML_SFURL:
{
SFURL *url = (SFURL *) field->far_ptr;
GF_BIFS_WRITE_INT(codec, bs, (url->OD_ID>0) ? 1 : 0, 1, "hasODID", "SFURL");
if (url->OD_ID>0) {
GF_BIFS_WRITE_INT(codec, bs, url->OD_ID, 10, "ODID", "SFURL");
} else {
u32 i;
u32 len = url->url ? strlen(url->url) : 0;
u32 val = gf_get_bit_size(len);
GF_BIFS_WRITE_INT(codec, bs, val, 5, "nbBits", NULL);
GF_BIFS_WRITE_INT(codec, bs, len, val, "length", NULL);
for (i=0; i<len; i++) gf_bs_write_int(bs, url->url[i], 8);
GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] string\t\t%d\t\t%s\t\t//SFURL\n", 8*len, url->url));
}
}
break;
case GF_SG_VRML_SFIMAGE:
{
u32 size, i;
SFImage *img = (SFImage *)field->far_ptr;
GF_BIFS_WRITE_INT(codec, bs, img->width, 12, "width", "SFImage");
GF_BIFS_WRITE_INT(codec, bs, img->height, 12, "height", "SFImage");
GF_BIFS_WRITE_INT(codec, bs, img->numComponents - 1, 2, "nbComp", "SFImage");
size = img->width * img->height * img->numComponents;
//.........这里部分代码省略.........
示例13: gf_sc_texture_update_frame
GF_EXPORT
void gf_sc_texture_update_frame(GF_TextureHandler *txh, Bool disable_resync)
{
Bool needs_reload = 0;
u32 size, ts;
s32 ms_until_pres, ms_until_next;
/*already refreshed*/
if ((txh->stream_finished && txh->tx_io) || txh->needs_refresh) return;
if (!txh->stream) {
txh->data = NULL;
return;
}
/*should never happen!!*/
if (txh->needs_release) gf_mo_release_data(txh->stream, 0xFFFFFFFF, 0);
/*check init flag*/
if (!(gf_mo_get_flags(txh->stream) & GF_MO_IS_INIT)) {
needs_reload = 1;
txh->data = NULL;
if (txh->tx_io) {
gf_sc_texture_release(txh);
}
}
txh->data = gf_mo_fetch_data(txh->stream, disable_resync ? GF_MO_FETCH : GF_MO_FETCH_RESYNC, &txh->stream_finished, &ts, &size, &ms_until_pres, &ms_until_next);
if (!(gf_mo_get_flags(txh->stream) & GF_MO_IS_INIT)) {
needs_reload = 1;
} else if (size && txh->size && (size != txh->size)) {
needs_reload = 1;
}
if (needs_reload) {
/*if we had a texture this means the object has changed - delete texture and resetup. Do not skip
texture update as this may lead to an empty rendering pass (blank frame for this object), especially in DASH*/
if (txh->tx_io) {
gf_sc_texture_release(txh);
txh->needs_refresh = 1;
}
if (gf_mo_is_private_media(txh->stream)) {
setup_texture_object(txh, 1);
gf_node_dirty_set(txh->owner, 0, 0);
}
}
/*if no frame or muted don't draw*/
if (!txh->data || !size) {
GF_LOG(GF_LOG_INFO, GF_LOG_COMPOSE, ("[Visual Texture] No output frame available \n"));
/*TODO - check if this is needed */
if (txh->flags & GF_SR_TEXTURE_PRIVATE_MEDIA) {
//txh->needs_refresh = 1;
gf_sc_invalidate(txh->compositor, NULL);
}
return;
}
if (txh->compositor->frame_delay > ms_until_pres)
txh->compositor->frame_delay = ms_until_pres;
/*if setup and same frame return*/
if (txh->tx_io && (txh->stream_finished || (txh->last_frame_time==ts)) ) {
gf_mo_release_data(txh->stream, 0xFFFFFFFF, 0);
txh->needs_release = 0;
if (!txh->stream_finished) {
GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Visual Texture] Same frame fetched (TS %d)\n", ts));
if (txh->compositor->ms_until_next_frame > ms_until_next)
txh->compositor->ms_until_next_frame = ms_until_next;
}
return;
}
txh->stream_finished = 0;
txh->needs_release = 1;
txh->last_frame_time = ts;
txh->size = size;
if (txh->raw_memory) {
gf_mo_get_raw_image_planes(txh->stream, (u8 **) &txh->data, (u8 **) &txh->pU, (u8 **) &txh->pV);
}
if (gf_mo_is_muted(txh->stream)) return;
if (txh->nb_frames) {
s32 push_delay = txh->upload_time / txh->nb_frames;
if (push_delay > ms_until_pres) ms_until_pres = 0;
else ms_until_pres -= push_delay;
}
if (txh->compositor->ms_until_next_frame > ms_until_next)
txh->compositor->ms_until_next_frame = ms_until_next;
if (!txh->tx_io) {
setup_texture_object(txh, 0);
}
/*try to push texture on graphics but don't complain if failure*/
gf_sc_texture_set_data(txh);
txh->needs_refresh = 1;
gf_sc_invalidate(txh->compositor, NULL);
//.........这里部分代码省略.........
示例14: text_Read
/*this is a quicktime specific box - see apple documentation*/
GF_Err text_Read(GF_Box *s, GF_BitStream *bs)
{
u16 pSize;
GF_TextSampleEntryBox *ptr = (GF_TextSampleEntryBox*)s;
gf_bs_read_data(bs, ptr->reserved, 6);
ptr->dataReferenceIndex = gf_bs_read_u16(bs);
ptr->displayFlags = gf_bs_read_u32(bs); /*Display flags*/
ptr->textJustification = gf_bs_read_u32(bs); /*Text justification*/
gf_bs_read_data(bs, ptr->background_color, 6); /*Background color*/
gpp_read_box(bs, &ptr->default_box); /*Default text box*/
gf_bs_read_data(bs, ptr->reserved1, 8); /*Reserved*/
ptr->fontNumber = gf_bs_read_u16(bs); /*Font number*/
ptr->fontFace = gf_bs_read_u16(bs); /*Font face*/
ptr->reserved2 = gf_bs_read_u8(bs); /*Reserved*/
ptr->reserved3 = gf_bs_read_u16(bs); /*Reserved*/
gf_bs_read_data(bs, ptr->foreground_color, 6); /*Foreground color*/
if (ptr->size < 51)
return GF_ISOM_INVALID_FILE;
ptr->size -= 51;
if (!ptr->size)
return GF_OK; /*ffmpeg compatibility with iPod streams: no pascal string*/
pSize = gf_bs_read_u8(bs); /*a Pascal string begins with its size: get textName size*/
ptr->size -= 1;
if (ptr->size < pSize) {
u32 s = pSize;
size_t i = 0;
GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[iso file] text box doesn't use a Pascal string: trying to decode anyway.\n"));
ptr->textName = (char*)gf_malloc((u32) ptr->size + 1 + 1);
do {
char c = (char)s;
if (c == '\0') {
break;
} else if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')) {
ptr->textName[i] = c;
} else {
gf_free(ptr->textName);
ptr->textName = NULL;
GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[iso file] text box doesn't use a Pascal string and contains non-chars. Abort.\n"));
return GF_ISOM_INVALID_FILE;
}
i++;
if (!ptr->size)
break;
ptr->size--;
s = gf_bs_read_u8(bs);
} while (s);
ptr->textName[i] = '\0'; /*Font name*/
GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[iso file] text box doesn't use a Pascal string: \"%s\" detected.\n", ptr->textName));
return GF_OK;
}
if (pSize) {
ptr->textName = (char*) gf_malloc(pSize+1 * sizeof(char));
if (gf_bs_read_data(bs, ptr->textName, pSize) != pSize) {
gf_free(ptr->textName);
ptr->textName = NULL;
return GF_ISOM_INVALID_FILE;
}
ptr->textName[pSize] = '\0'; /*Font name*/
}
ptr->size -= pSize;
return GF_OK;
}
示例15: term_on_media_add
static void term_on_media_add(GF_ClientService *service, GF_Descriptor *media_desc, Bool no_scene_check)
{
u32 i, min_od_id;
GF_MediaObject *the_mo;
GF_Scene *scene;
GF_ObjectManager *odm, *root;
GF_ObjectDescriptor *od;
GF_Terminal *term = service->term;
root = service->owner;
if (!root) {
GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Service %s] has not root, aborting !\n", service->url));
return;
}
if (root->flags & GF_ODM_DESTROYED) {
GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Service %s] root has been scheduled for destruction - aborting !\n", service->url));
return;
}
scene = root->subscene ? root->subscene : root->parentscene;
if (scene->root_od->addon && (scene->root_od->addon->addon_type == GF_ADDON_TYPE_MAIN)) {
no_scene_check = 1;
scene->root_od->flags |= GF_ODM_REGENERATE_SCENE;
}
GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Service %s] %s\n", service->url, media_desc ? "Adding new media object" : "Regenerating scene graph"));
if (!media_desc) {
if (!no_scene_check)
gf_scene_regenerate(scene);
return;
}
switch (media_desc->tag) {
case GF_ODF_OD_TAG:
case GF_ODF_IOD_TAG:
if (root && (root->net_service == service)) {
od = (GF_ObjectDescriptor *) media_desc;
break;
}
default:
gf_odf_desc_del(media_desc);
return;
}
gf_term_lock_net(term, 1);
/*object declared this way are not part of an OD stream and are considered as dynamic*/
/* od->objectDescriptorID = GF_MEDIA_EXTERNAL_ID; */
/*check if we have a mediaObject in the scene not attached and matching this object*/
the_mo = NULL;
odm = NULL;
min_od_id = 0;
for (i=0; i<gf_list_count(scene->scene_objects); i++) {
char *frag, *ext;
GF_ESD *esd;
char *url;
u32 match_esid = 0;
GF_MediaObject *mo = gf_list_get(scene->scene_objects, i);
if ((mo->OD_ID != GF_MEDIA_EXTERNAL_ID) && (min_od_id<mo->OD_ID))
min_od_id = mo->OD_ID;
if (!mo->odm) continue;
/*if object is attached to a service, don't bother looking in a different one*/
if (mo->odm->net_service && (mo->odm->net_service != service)) continue;
/*already assigned object - this may happen since the compositor has no control on when objects are declared by the service,
therefore opening file#video and file#audio may result in the objects being declared twice if the service doesn't
keep track of declared objects*/
if (mo->odm->OD) {
if (od->objectDescriptorID && is_same_od(mo->odm->OD, od)) {
/*reassign OD ID*/
if (mo->OD_ID != GF_MEDIA_EXTERNAL_ID) {
od->objectDescriptorID = mo->OD_ID;
} else {
mo->OD_ID = od->objectDescriptorID;
}
gf_odf_desc_del(media_desc);
gf_term_lock_net(term, 0);
return;
}
continue;
}
if (mo->OD_ID != GF_MEDIA_EXTERNAL_ID) {
if (mo->OD_ID == od->objectDescriptorID) {
the_mo = mo;
odm = mo->odm;
break;
}
continue;
}
if (!mo->URLs.count || !mo->URLs.vals[0].url) continue;
frag = NULL;
ext = strrchr(mo->URLs.vals[0].url, '#');
if (ext) {
frag = strchr(ext, '=');
ext[0] = 0;
}
url = mo->URLs.vals[0].url;
if (!strnicmp(url, "file://localhost", 16)) url += 16;
//.........这里部分代码省略.........