本文整理汇总了C++中vdrefptr::asStream方法的典型用法代码示例。如果您正苦于以下问题:C++ vdrefptr::asStream方法的具体用法?C++ vdrefptr::asStream怎么用?C++ vdrefptr::asStream使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类vdrefptr
的用法示例。
在下文中一共展示了vdrefptr::asStream方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: guiPositionHandleCommand
VDPosition guiPositionHandleCommand(WPARAM wParam, IVDPositionControl *pc) {
if (!inputVideo)
return -1;
IVDStreamSource *pVSS = inputVideo->asStream();
switch(HIWORD(wParam)) {
case PCN_START:
pc->SetPosition(pVSS->getStart());
return pVSS->getStart();
case PCN_BACKWARD:
{
VDPosition pos = pc->GetPosition();
if (pos > pVSS->getStart()) {
pc->SetPosition(pos - 1);
return pos - 1;
}
}
break;
case PCN_FORWARD:
{
VDPosition pos = pc->GetPosition();
if (pos < pVSS->getEnd()) {
pc->SetPosition(pos + 1);
return pos + 1;
}
}
break;
case PCN_END:
pc->SetPosition(pVSS->getEnd());
return pVSS->getEnd();
case PCN_KEYPREV:
{
VDPosition lSample = inputVideo->prevKey(pc->GetPosition());
if (lSample < 0) lSample = pVSS->getStart();
pc->SetPosition(lSample);
return lSample;
}
break;
case PCN_KEYNEXT:
{
VDPosition lSample = inputVideo->nextKey(pc->GetPosition());
if (lSample < 0) lSample = pVSS->getEnd();
pc->SetPosition(lSample);
return lSample;
}
break;
}
return -1;
}
示例2: guiPositionInitFromStream
void guiPositionInitFromStream(IVDPositionControl *pc) {
if (!inputVideo) return;
IVDStreamSource *pVSS = inputVideo->asStream();
const VDFraction videoRate(pVSS->getRate());
pc->SetRange(pVSS->getStart(), pVSS->getEnd());
pc->SetFrameRate(videoRate);
}
示例3: AppendAVI
void AppendAVI(const wchar_t *pszFile) {
if (inputAVI) {
IVDStreamSource *pVSS = inputVideo->asStream();
VDPosition lTail = pVSS->getEnd();
if (inputAVI->Append(pszFile)) {
g_project->BeginTimelineUpdate();
FrameSubset& s = g_project->GetTimeline().GetSubset();
s.insert(s.end(), FrameSubsetNode(lTail, pVSS->getEnd() - lTail, false, 0));
g_project->EndTimelineUpdate();
}
}
}
示例4: guiPositionBlit
void guiPositionBlit(HWND hWndClipping, VDPosition lFrame, int w, int h) {
if (lFrame<0) return;
try {
BITMAPINFOHEADER *dcf;
if (!inputVideo)
SendMessage(hWndClipping, CCM_BLITFRAME2, 0, (LPARAM)NULL);
else {
dcf = (BITMAPINFOHEADER *)inputVideo->getDecompressedFormat();
IVDStreamSource *pVSS = inputVideo->asStream();
if (lFrame < pVSS->getStart() || lFrame >= pVSS->getEnd())
SendMessage(hWndClipping, CCM_BLITFRAME2, 0, (LPARAM)NULL);
else {
Pixel32 *tmpmem;
const void *pFrame = inputVideo->getFrame(lFrame);
int dch = abs(dcf->biHeight);
if (w>0 && h>0 && w!=dcf->biWidth && h != dch && (tmpmem = new Pixel32[((w+1)&~1)*h + ((dcf->biWidth+1)&~1)*dch])) {
VBitmap vbt(tmpmem, w, h, 32);
VBitmap vbs(tmpmem+((w+1)&~1)*h, dcf->biWidth, dch, 32);
VBitmap srcbm((void *)pFrame, dcf);
vbs.BitBlt(0, 0, &srcbm, 0, 0, -1, -1);
vbt.StretchBltBilinearFast(0, 0, w, h, &vbs, 0, 0, vbs.w, vbs.h);
VDPixmap px(VDAsPixmap(vbt));
SendMessage(hWndClipping, CCM_BLITFRAME2, 0, (LPARAM)&px);
delete[] tmpmem;
} else
SendMessage(hWndClipping, CCM_BLITFRAME2, 0, (LPARAM)&inputVideo->getTargetFormat());
}
}
} catch(const MyError&) {
_RPT0(0,"Exception!!!\n");
}
}
示例5: AppendAVIAutoscan
void AppendAVIAutoscan(const wchar_t *pszFile) {
wchar_t buf[MAX_PATH];
wchar_t *s = buf, *t;
int count = 0;
if (!inputAVI)
return;
IVDStreamSource *pVSS = inputVideo->asStream();
VDPosition originalCount = pVSS->getEnd();
wcscpy(buf, pszFile);
t = VDFileSplitExt(VDFileSplitPath(s));
if (t>buf)
--t;
try {
for(;;) {
if (!VDDoesPathExist(buf))
break;
if (!inputAVI->Append(buf))
break;
++count;
s = t;
for(;;) {
if (s<buf || !isdigit(*s)) {
memmove(s+2, s+1, sizeof(wchar_t) * wcslen(s));
s[1] = L'1';
++t;
} else {
if (*s == L'9') {
*s-- = L'0';
continue;
}
++*s;
}
break;
}
}
} catch(const MyError& e) {
// if the first segment failed, turn the warning into an error
if (!count)
throw;
// log append errors, but otherwise eat them
VDLog(kVDLogWarning, VDTextAToW(e.gets()));
}
guiSetStatus("Appended %d segments (stopped at \"%s\")", 255, count, VDTextWToA(buf).c_str());
if (count) {
FrameSubset& s = g_project->GetTimeline().GetSubset();
g_project->BeginTimelineUpdate();
s.insert(s.end(), FrameSubsetNode(originalCount, pVSS->getEnd() - originalCount, false, 0));
g_project->EndTimelineUpdate();
}
}