本文整理汇总了C++中ADM_warning函数的典型用法代码示例。如果您正苦于以下问题:C++ ADM_warning函数的具体用法?C++ ADM_warning怎么用?C++ ADM_warning使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了ADM_warning函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: ADM_warning
/**
\fn goToTimeVideo
\brief Seek video to the given time. Must be an exact time.
*/
bool ADM_Composer::goToTimeVideo(uint64_t startTime)
{
uint64_t segTime;
uint32_t seg;
if(false==_segments.convertLinearTimeToSeg(startTime,&seg,&segTime))
{
ADM_warning("Cannot find segment for time %"PRIu64" ms\n",startTime/1000);
return false;
}
// Try to seek...
_SEGMENT *s=_segments.getSegment(seg);
_VIDEOS *v=_segments.getRefVideo(s->_reference);
if(!s->_reference && !segTime && s->_refStartTimeUs<v->firstFramePts)
{
segTime=v->firstFramePts;
ADM_warning("Fixating start time to %"PRIu64" ms\n",segTime/1000);
}
uint64_t to=segTime+s->_refStartTimeUs;
if(false==seektoTime(s->_reference,to))
{
ADM_warning("Cannot seek to beginning of segment %"PRIu32" at %"PRIu64" ms\n",s,to/1000);
return false;
}
_currentSegment=seg;
int64_t newTime=(int64_t)v->lastDecodedPts+(int64_t)s->_startTimeUs-(int64_t)s->_refStartTimeUs;
ADM_info("Seek done, in reference, gone to %"PRIu64" with segment start at %"PRIu64"\n",v->lastDecodedPts,s->_refStartTimeUs);
SET_CURRENT_PTS(newTime);
return true;
}
示例2: switch
bool AUDMEncoder_DcaEnc::initialize (void)
{
int chan_config=0;
switch(wavheader.channels)
{
case 1: chan_config=DCAENC_CHANNELS_MONO;break;
case 2: chan_config=DCAENC_CHANNELS_STEREO;break;
case 6: chan_config=DCAENC_CHANNELS_3FRONT_2REAR_1OV;break;
case 5: chan_config=DCAENC_CHANNELS_3FRONT_2REAR;break;
default:
ADM_warning("Unsupported channel configuration \n");
break;
}
wavheader.byterate=(config.bitrate*1000)>>3;
ADM_info("Starting dcaenc with channels=%d, bitrate=%d\n",wavheader.channels,config.bitrate);
context=dcaenc_create(wavheader.frequency,chan_config,config.bitrate*1000,DCAENC_FLAG_BIGENDIAN );
if(!context)
{
ADM_warning("Cannot create dcaenc context \n");
return false;
}
inputSize=dcaenc_input_size(context);
outputSize=dcaenc_output_size(context);
ADM_info("Converting %d samples to %d bytes\n",inputSize,outputSize);
return true;
}
示例3: ADM_warning
/**
* \fn switchToNextAudioSegment
*
*/
bool ADM_edAudioTrackFromVideo::switchToNextAudioSegment(void)
{
// Try to switch segment
if(_audioSeg+1>=parent->_segments.getNbSegments()) return false;
ADM_warning("Switching to segment %"PRIu32"\n",_audioSeg+1);
_audioSeg++;
_SEGMENT *seg=parent->_segments.getSegment(_audioSeg);
ADM_audioStreamTrack *trk=getTrackAtVideoNumber(seg->_reference);
//
ADM_Audiocodec *codec=NULL;
if(trk)
if(trk->codec)
codec=trk->codec;
if(codec)
{
codec->resetAfterSeek();
}
// Go to beginning of the stream
if(false==trk->stream->goToTime(seg->_refStartTimeUs))
{
ADM_warning("Fail to seek audio to %"PRIu64"ms\n",seg->_refStartTimeUs/1000);
return false;
}
ADM_info("Switched ok to audio segment %"PRIu32", with a ref time=%s\n",
_audioSeg,ADM_us2plain(seg->_refStartTimeUs));
return true;
}
示例4: ADM_warning
/**
\fn displayImage
*/
bool libvaRender::displayImage(ADMImage *pic)
{
// if input is already a VA surface, no need to reupload it...
if(pic->refType==ADM_HW_LIBVA)
{
ADM_vaSurface *img=(ADM_vaSurface *)pic->refDescriptor.refInstance;
admLibVA::putX11Surface(img,info.systemWindowId,displayWidth,displayHeight);
lastSurface=img;
}else
{
if(!mySurface[0] || !mySurface[1])
{
ADM_warning("[VARender] No surface\n");
return false;
}
ADM_vaSurface *dest=mySurface[toggle];
toggle^=1;
if(false==dest->fromAdmImage(pic))
{
ADM_warning("VaRender] Failed to upload pic \n");
return false;
}
admLibVA::putX11Surface(dest,info.systemWindowId,displayWidth,displayHeight);
lastSurface=dest;
}
return true;
}
示例5: ADMImage_To_argbSurface
/**
\fn ADMImage_To_argbSurface
*/
static bool ADMImage_To_argbSurface(ADMImage *pic, IDirect3DSurface9 *surface,ADMColorScalerFull *scaler)
{
D3DLOCKED_RECT lock;
if (ADM_FAILED(IDirect3DSurface9_LockRect(surface,&lock, NULL, 0)))
{
ADM_warning("D3D Cannot lock surface\n");
return false;
}
// RGB
uint8_t *src[3];
uint8_t *dst[3];
pic->GetReadPlanes(src);
dst[0]=(uint8_t *)lock.pBits;
dst[1]=dst[2]=NULL;
int sourcePitch[3],dstPitch[3];
pic->GetPitches(sourcePitch);
dstPitch[0]=lock.Pitch;
dstPitch[1]=dstPitch[2]=0;
scaler-> convertPlanes(sourcePitch,dstPitch, src, dst);
if (ADM_FAILED(IDirect3DSurface9_UnlockRect(surface)))
{
ADM_warning("D3D Cannot unlock surface\n");
return false;
}
return true;
}
示例6: ADMImage_To_yv12Surface
/**
\fn ADMImage_To_yv12Surface
*/
static bool ADMImage_To_yv12Surface(ADMImage *pic, IDirect3DSurface9 *surface)
{
D3DLOCKED_RECT lock;;
if (ADM_FAILED(IDirect3DSurface9_LockRect(surface,&lock, NULL, 0)))
{
ADM_warning("D3D Cannot lock surface\n");
return false;
}
// copy
uint8_t *dst=(uint8_t *)lock.pBits;
int dStride=lock.Pitch;
int width=pic->GetWidth(PLANAR_Y);
int height=pic->GetHeight(PLANAR_Y);
d3dBlit(pic, PLANAR_Y,dst,dStride,width,height);
dst+=height*dStride;
d3dBlit(pic, PLANAR_U,dst,dStride>>1,width>>1,height>>1);
dst+=(height/2)*(dStride/2);
d3dBlit(pic, PLANAR_V,dst,dStride>>1,width>>1,height>>1);
if (ADM_FAILED(IDirect3DSurface9_UnlockRect(surface)))
{
ADM_warning("D3D Cannot unlock surface\n");
return false;
}
return true;
}
示例7: ADM_warning
/**
\fn displayImage_argb
\brief manually do the yv12-> RGB conversion + rescale and the upload to backbuffer
*/
bool dxvaRender::displayImage_argb(ADMImage *pic)
{
IDirect3DSurface9 *bBuffer;
// 1 upload to myYV12 surface
if( ADM_FAILED(IDirect3DDevice9_GetBackBuffer(d3dDevice, 0, 0,
D3DBACKBUFFER_TYPE_MONO,
&bBuffer)))
{
ADM_warning("D3D Cannot create backBuffer\n");
return false;
}
if(!ADMImage_To_argbSurface(pic,bBuffer,scaler))
{
ADM_warning("Image to argb surface failed\n");
return false;
}
IDirect3DDevice9_BeginScene(d3dDevice);
IDirect3DDevice9_EndScene(d3dDevice);
if( ADM_FAILED(IDirect3DDevice9_Present(d3dDevice, &targetRect, 0, 0, 0)))
{
ADM_warning("D3D Present failed\n");
}
return true;
}
示例8: ADM_warning
/**
\fn uploadImage
\brief upload an image to a vdpau surface
*/
bool vdpauVideoFilterDeint::uploadImage(ADMImage *next,VdpVideoSurface surface)
{
if(!next) // empty image
{
ADM_warning("VdpauDeint:No image to upload\n");
return true;
}
if(surface==VDP_INVALID_HANDLE)
{
ADM_error("Surface provided is invalid\n");
return false;
}
// Blit our image to surface
uint32_t pitches[3];
uint8_t *planes[3];
next->GetPitches(pitches);
next->GetReadPlanes(planes);
aprintf("Putting image in surface %d\n",(int)surface);
// Put out stuff in input...
#if VDP_DEBUG
printf("Uploading image to surface %d\n",surfaceIndex%ADM_NB_SURFACES);
#endif
if(VDP_STATUS_OK!=admVdpau::surfacePutBits(
surface,
planes,pitches))
{
ADM_warning("[Vdpau] video surface : Cannot putbits\n");
return false;
}
return true;
}
示例9: ADM_vaSurface
bool ADM_libvaEncoder::setup(void)
{
int width=getWidth();
int height=getHeight();
// Allocate VAImage
for(int i=0;i<VA_ENC_NB_SURFACE;i++)
{
vaSurface[i]=new ADM_vaSurface(NULL,width,height);
if(vaSurface[i]->image)
{
ADM_warning("Cannot allocate surface\n");
return false;
}
}
context=new ADM_vaEncodingContext();
if(!context->init(width,height,VA_ENC_NB_SURFACE,vaSurface))
{
ADM_warning("Cannot initialize vaEncoder context\n");
return false;
}
encodingBuffer=new ADM_vaEncodingBuffer(context,(width*height*400)/256);
return true;
}
示例10: ADM_warning
/**
\fn encode
*/
bool ADM_libvaEncoder::encode (ADMBitstream * out)
{
uint32_t fn;
if(source->getNextFrame(&fn,image)==false)
{
ADM_warning("[LIBVA] Cannot get next image\n");
return false;
}
if(!vaSurface[0]->fromAdmImage(image))
{
ADM_warning("Cannot upload image to surface\n");
return false;
}
//
if(!context->encode(vaSurface[0],out,encodingBuffer))
{
ADM_warning("Error encoding picture\n");
return false;
}
out->len=plane;
out->pts=out->dts=image->Pts;
out->flags=AVI_KEY_FRAME;
return true;
}
示例11: ADM_warning
/**
\fn getPacket
*/
uint8_t ADM_audioStreamConstantChunk::getPacket(uint8_t *buffer,uint32_t *size, uint32_t sizeMax,uint32_t *nbSample,uint64_t *dts)
{
*size=0;
*nbSample=0;
if(sizeMax>=chunkSize)
{
uint32_t mSize;
uint64_t mDts;
if(!access->getPacket(buffer,&mSize,sizeMax,&mDts))
{
ADM_warning("Cant get packet\n");
return 0;
}
ADM_info("Got packet : chunk=%d size=%d dts=%s\n",chunkSize,mSize,ADM_us2plain(mDts));
if(!*size)
*dts=mDts;
*size+=mSize;
*nbSample+=samplesPerChunk;
if(mSize!=chunkSize)
{
ADM_warning("Expected chunk of size =%d, got %d\n",chunkSize,mSize);
}
buffer+=mSize;
sizeMax-=mSize;
}
if(!*size) return 0;
return 1;
}
示例12: ADM_info
/**
\fn init
*/
bool libvaRender::init( GUI_WindowInfo * window, uint32_t w, uint32_t h,renderZoom zoom)
{
ADM_info("[libva]Xv start\n");
info=*window;
if(admLibVA::isOperationnal()==false)
{
ADM_warning("[libva] Not operationnal\n");
return false;
}
for(int i=0;i<2;i++)
{
VASurfaceID surface=admLibVA::allocateSurface(w,h);
if(surface==VA_INVALID)
{
ADM_warning("[libva] cannot allocate surface\n");
return false;
}
mySurface[i]=new ADM_vaSurface(NULL,w,h);
mySurface[i]->surface=surface;
}
baseInit(w,h,zoom);
return true;
}
示例13: xdebug
bool ADM_latm2aac::pushData(int incomingLen,uint8_t *inData,uint64_t dts)
{
// Lookup sync
uint8_t *end=inData+incomingLen;
uint8_t *start=inData;
xdebug("Pushing data %d bytes\n",incomingLen);
while(start<end)
{
int key=(start[0]<<8)+start[1];
if((key & 0xffe0)!=0x56e0)
{
ADM_warning("Sync lost\n");
return true;
}
uint32_t len=start[2]+((key & 0x1f)<<8);
start+=3;
if(start+len>end)
{
ADM_warning("Not enough data, need %d, got %d\n",len,(int)(end-start));
return true;
}
xdebug("Found LATM : size %d\n",len);
demuxLatm(dts,start,len);
dts=ADM_NO_PTS;
// LATM demux
start+=len;
}
xdebug("-- end of this LOAS frame --\n");
return true;
}
示例14: while
/**
\fn fillAudio
\brief Put audio datas until targetDts is reached
*/
bool muxerAvi::fillAudio(uint64_t targetDts)
{
// Now send audio until they all have DTS > lastVideoDts+increment
for(int audioIndex=0;audioIndex<nbAStreams;audioIndex++)
{
ADM_audioStream*a=aStreams[audioIndex];
uint32_t fq=a->getInfo()->frequency;
int nb=0;
audioClock *clk=clocks[audioIndex];
aviAudioPacket *aPacket=audioPackets+audioIndex;
if(true==aPacket->eos) return true;
while(1)
{
if(false==aPacket->present)
{
if(!a->getPacket(aPacket->buffer,
&(aPacket->sizeInBytes),
AUDIO_BUFFER_SIZE,
&(aPacket->nbSamples),
&(aPacket->dts)))
{
ADM_warning("Cannot get audio packet for stream %d\n",audioIndex);
aPacket->eos=true;
break;
}
if(aPacket->dts!=ADM_NO_PTS)
{
aPacket->dts+=audioDelay;
aPacket->dts-=firstPacketOffset;
}
aprintf("[Audio] Packet size %"PRIu32" sample:%"PRIu32" dts:%"PRIu64" target :%"PRIu64"\n",
aPacket->sizeInBytes,aPacket->nbSamples,aPacket->dts,targetDts);
if(aPacket->dts!=ADM_NO_PTS)
if( abs(aPacket->dts-clk->getTimeUs())>32000)
{
ADM_warning("[AviMuxer] Audio skew!\n");
clk->setTimeUs(aPacket->dts);
#warning FIXME add padding
}
aPacket->present=true;
}
// We now have a packet stored
aprintf("Audio packet dts =%s\n",ADM_us2plain(aPacket->dts));
if(aPacket->dts!=ADM_NO_PTS)
if(aPacket->dts>targetDts)
{
aprintf("In the future..\n");
break; // this one is in the future
}
nb=writter.saveAudioFrame(audioIndex,aPacket->sizeInBytes,aPacket->buffer) ;
encoding->pushAudioFrame(aPacket->sizeInBytes);
aprintf("writting audio packet\n");
clk->advanceBySample(aPacket->nbSamples);
aPacket->present=false;
//printf("%u vs %u\n",audioDts/1000,(lastVideoDts+videoIncrement)/1000);
}
}
return true;
}
示例15: ADM_info
/**
\fn removeChunk
\brief
*/
bool ADM_EditorSegment::removeChunk(uint64_t from, uint64_t to)
{
uint32_t startSeg,endSeg;
uint64_t startOffset,endOffset;
ADM_info("Cutting from %"PRIu64" to %"PRIu64" ms\n",from/1000,to/1000);
dump();
if(false==convertLinearTimeToSeg( from,&startSeg,&startOffset))
{
ADM_warning("Cannot get starting point (%"PRIu64" ms\n",from/1000);
return false;
}
if(false==convertLinearTimeToSeg( to,&endSeg,&endOffset))
{
ADM_warning("Cannot get starting point (%"PRIu64" ms\n",from/1000);
return false;
}
ADM_info("Start, seg %"PRIu32" Offset :%"PRIu64" ms\n",startSeg,startOffset);
ADM_info("End , seg %"PRIu32" Offset :%"PRIu64" ms\n",endSeg,endOffset);
ListOfSegments tmp=segments;
if(startSeg==endSeg)
{
// Split the seg int two..
segments.insert(segments.begin()+startSeg+1,*getSegment(startSeg));
endSeg=startSeg+1;
}
_SEGMENT *first=getSegment(startSeg);
// Span over several seg...
// 1- shorten the start segment..
first->_durationUs=startOffset;
// 3- Shorten last segment
_SEGMENT *last=getSegment(endSeg);
last->_refStartTimeUs+=endOffset;
last->_durationUs-=endOffset;
// 2- Kill the segment in between
for(int i=startSeg+1;i<endSeg;i++)
{
segments.erase(segments.begin()+startSeg+1);
}
updateStartTime();
removeEmptySegments();
if(isEmpty())
{
GUI_Error_HIG(QT_TRANSLATE_NOOP("adm","Error"),QT_TRANSLATE_NOOP("adm","You cannot remove *all* the video\n"));
segments=tmp;
updateStartTime();
return false;
}
undoSegments.push_back(tmp);
dump();
return true;
}