本文整理汇总了C++中IGraphBuilder::Release方法的典型用法代码示例。如果您正苦于以下问题:C++ IGraphBuilder::Release方法的具体用法?C++ IGraphBuilder::Release怎么用?C++ IGraphBuilder::Release使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IGraphBuilder
的用法示例。
在下文中一共展示了IGraphBuilder::Release方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
void main()
{
int option;
IGraphBuilder *pGraph = NULL; //Creem l'apuntador al graf de filtres
IMediaControl *pControl = NULL; //creem l'apuntador a un controlador per ayurar i iniciar el graf
IMediaEvent *pEvent = NULL; // apunta a l'objecte necessari per obtenir events del filter graph manager
//IBaseFilter *pGrabberF = NULL;
//ISampleGrabber *pGrabber = NULL;
HRESULT hr = CoInitialize(NULL); // Inicialitzem la llibreria COM
if ( FAILED(hr) ){
printf("ERROR - Could not initialize COM library");
return;
}
// Create the filter graph manager and query for interfaces.
hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&pGraph);
if (FAILED(hr))
{
printf("ERROR - Could not create the Filter Graph Manager.");
return;
}
hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
hr = pGraph->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
// Build the graph. IMPORTANT: Change this string to a file on your system.
cout<<"introduce 1:chicken 2:futbol 3: video futbol audio chicken: \n";
cin>>option;
switch(option)
{
case 1: hr = pGraph->RenderFile(L"C:\\Users\\Victor\\Downloads\\chicken.wmv", NULL);
break;
case 2: hr = pGraph->RenderFile(L"C:\\Users\\Victor\\Downloads\\futbol.mpg", NULL);
break;
case 3: // Create the Sample Grabber filter.
break;
}
if (SUCCEEDED(hr))
{
// Run the graph.
hr = pControl->Run();
if (SUCCEEDED(hr))
{
// Wait for completion.
long evCode;
pEvent->WaitForCompletion(INFINITE, &evCode);
// Note: Do not use INFINITE in a real application, because it
// can block indefinitely.
}
}
pControl->Release();
pEvent->Release();
pGraph->Release();
CoUninitialize();
}
示例2: play_movie
void play_movie( HWND hwnd )
{
IGraphBuilder *pGraph;
IMediaControl *pMediaControl;
IMediaEvent *pEvent;
IBasicVideo *pBasic;
IVideoWindow *pVidWin = NULL;
RECT grc;
long width, height;
CoInitialize(NULL);
// Create the filter graph manager and query for interfaces.
CoCreateInstance(
CLSID_FilterGraph,
NULL,
CLSCTX_INPROC_SERVER,
IID_IGraphBuilder,
(void **)&pGraph);
pGraph->QueryInterface(IID_IVideoWindow, (void **)&pVidWin);
pGraph->QueryInterface(IID_IMediaControl, (void **)&pMediaControl);
pGraph->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
pGraph->QueryInterface(IID_IBasicVideo, (void**)&pBasic );
// Build the graph. IMPORTANT: Change string to a file on your system.
pGraph->RenderFile(L"e:\\alpha\\running.avi", NULL);
pBasic->GetVideoSize( &width, &height );
printf( "video frames are %d x %d\n", width, height );
pVidWin->put_Owner((OAHWND)hwnd);
pVidWin->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS);
GetClientRect( hwnd, &grc );
pVidWin->SetWindowPosition(10, 10, width, height);
printf( "window is %d x %d\n", grc.right, grc.bottom );
// Run the graph.
pMediaControl->Run();
// Wait for completion.
long evCode;
pEvent->WaitForCompletion(INFINITE, &evCode);
pVidWin->put_Visible(OAFALSE);
pVidWin->put_Owner(NULL);
// Clean up.
pBasic->Release();
pVidWin->Release();
pMediaControl->Release();
pEvent->Release();
pGraph->Release();
CoUninitialize();
}
示例3: playNormalVideo
void playNormalVideo()
{
IGraphBuilder *pGraphBuilder;
IMediaControl *pMediaControl;
IMediaEvent *pMediaEvent;
long eventCode;
CoInitialize(NULL);
CoCreateInstance(CLSID_FilterGraph,
NULL,
CLSCTX_INPROC,
IID_IGraphBuilder,
(LPVOID *)&pGraphBuilder);
pGraphBuilder->QueryInterface(IID_IMediaControl,
(LPVOID *)&pMediaControl);
pGraphBuilder->QueryInterface(IID_IMediaEvent,
(LPVOID *)&pMediaEvent);
pMediaControl->RenderFile(L"Rscreen_shortFinger.avi");
pMediaControl->Run();
FILTER_STATE fs;
HRESULT hr = pMediaControl->GetState(100, (OAFilterState*)&fs);
// The first argument is timeout value.
// If you change the "-1" part into "2",
// WaitForCompletion will timeout after 2 seconds.
pMediaEvent->WaitForCompletion(-1, &eventCode);
switch (eventCode) {
case 0:
printf("timeout\n");
break;
case EC_COMPLETE:
printf("complete\n");
break;
case EC_ERRORABORT:
printf("errorabort\n");
break;
case EC_USERABORT:
printf("userabort\n");
break;
}
hr = pMediaControl->GetState(100, (OAFilterState*)&fs);
pMediaControl->Run();
hr = pMediaControl->GetState(100, (OAFilterState*)&fs);
pMediaEvent->WaitForCompletion(-1, &eventCode);
pMediaControl->Release();
pGraphBuilder->Release();
CoUninitialize();
}
示例4: PlayMovie
void Video::PlayMovie(string path)
{
IGraphBuilder *pGraph = NULL;
IMediaControl *pControl = NULL;
IMediaEvent *pEvent = NULL;
IVideoWindow *pVideo = NULL;
// Initialize the COM library.
CoInitialize(NULL);
// Create the filter graph manager and query for interfaces.
CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&pGraph);
// Build the graph
int len;
int slength = (int)path.length() + 1;
len = MultiByteToWideChar(CP_ACP, 0, path.c_str(), slength, 0, 0);
wchar_t* buf = new wchar_t[len];
MultiByteToWideChar(CP_ACP, 0, path.c_str(), slength, buf, len);
std::wstring r(buf);
delete[] buf;
pGraph->RenderFile(LPCWSTR(r.c_str()), NULL);
// set the owner window
pGraph->QueryInterface(IID_IVideoWindow, (void **) &pVideo);
pVideo->put_Owner((OAHWND)window);
pVideo->put_WindowStyle( WS_CHILD );
pVideo->put_Left(0);
pVideo->put_Top(0);
pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
pGraph->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
pControl->Run();
long evCode;
pEvent->WaitForCompletion(-1, &evCode);
// release controls
pControl->Release();
pEvent->Release();
pGraph->Release();
pVideo->Release();
CoUninitialize();
}
示例5: texture
video::video(std::wstring path): texture(1,1,false,GL_LINEAR,GL_REPEAT), rendered(false), isPlaying(false) {
printf("Opening video \"%ls\"\n", path.c_str());
IGraphBuilder * graph;
IBaseFilter * base;
__int64 clipLength;
CoInitialize(0);
CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC, IID_IGraphBuilder, (void**)&graph);
HRESULT hr = S_OK;
graph->QueryInterface(IID_IMediaControl, (void**)&mediaControl);
graph->QueryInterface(IID_IMediaSeeking, (void**)&mediaSeeking);
grabber = new textureGrabber(0, &hr);
grabber->AddRef();
grabber->QueryInterface(IID_IBaseFilter, (void**)&base);
graph->AddFilter(base, L"peisikVideoSystem OpenGL texture renderer");
hr = graph->RenderFile(path.c_str(), 0);
grabber->setTexture(&texture);
graph->Release();
base->Release();
if(SUCCEEDED(hr) && grabber->width)
{
printf("Video succesfully rendered\n");
mediaSeeking->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME);
mediaSeeking->GetDuration(&clipLength);
length = (long double)(clipLength)/(long double)10000000.0;
__int64 position = 0;
mediaSeeking->SetPositions(&position, AM_SEEKING_AbsolutePositioning, &position, AM_SEEKING_NoPositioning);
rendered = true;
}
else
printf("Couldn't find a working video graph\n");
}
示例6: Cleanup
void CHogVideo::Cleanup()
{
this->UnHog();
if (m_pEvent)
{
IMediaEvent *pEvent = (IMediaEvent*)m_pEvent;
pEvent->Release();
m_pEvent = NULL;
}
if (m_pVideoWindow)
{
IVideoWindow *pVideoWindow = (IVideoWindow*)m_pVideoWindow;
pVideoWindow->Release();
m_pVideoWindow = NULL;
}
if (m_pMediaControl)
{
IMediaControl *pMediaControl = (IMediaControl*)m_pMediaControl;
pMediaControl->Release();
m_pMediaControl = NULL;
}
if (m_pGraph)
{
IGraphBuilder *pGraph = (IGraphBuilder*)m_pGraph;
pGraph->Release();
m_pGraph = NULL;
}
if (m_bCOMInitialized)
{
CoUninitialize();
m_bCOMInitialized = false;
}
}
示例7: main
int main(int argc, char* argv[])
{
ICaptureGraphBuilder2 *pCaptureGraphBuilder = NULL;
IGraphBuilder *pGraphBuilder = NULL;
IBaseFilter *pSource = NULL;
IBaseFilter *pMux = NULL;
IBaseFilter *pVideoCompressor = NULL;
IBaseFilter *pAudioCompressor = NULL;
IAMStreamConfig *pAMStreamConfig = NULL;
IAMVideoCompression *pAMVideoCompression = NULL;
IMediaControl *pControl = NULL;
IMediaSeeking *pSeek = NULL;
IMediaEvent *pEvent = NULL;
HRESULT hr;
DWORD pdwRegister=0;
CoInitialize(NULL);
// Create the capture graph builder.
CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);
// Make the rendering section of the graph.
pCaptureGraphBuilder->SetOutputFileName(
&MEDIASUBTYPE_Avi, // File type.
L"C:\\STDIUE1.avi", // File name.
&pMux, // pointer to the multiplexer.
NULL); // pointer to the file writer.
// Load the source file.
pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder);
pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource);
// Add the compressor filter.
CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **)&pVideoCompressor);
pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor");
// Render the video stream, through the compressor.
pCaptureGraphBuilder->RenderStream(
NULL, // Output pin category
NULL, // Media type
pSource, // Source filter
pVideoCompressor, // Compressor filter
pMux); // Sink filter (the AVI Mux)
/* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **)&pAudioCompressor);
pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/
// Render the audio stream.
pCaptureGraphBuilder->RenderStream(
NULL,
NULL,
pSource,
pAudioCompressor,
pMux);
// Compress at 100k/second data rate.
AM_MEDIA_TYPE *pmt;
pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig);
pAMStreamConfig->GetFormat(&pmt);
if (pmt->formattype == FORMAT_VideoInfo)
{
((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000;
pAMStreamConfig->SetFormat(pmt);
}
// Request key frames every four frames.
pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression);
pAMVideoCompression->put_KeyFrameRate(4);
pAMVideoCompression->Release();
pAMStreamConfig->Release();
// Run the graph.
pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl);
pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
pControl->Run();
printf("Recompressing... \n");
long evCode;
if (SUCCEEDED(hr))
{
REFERENCE_TIME rtTotal, rtNow = 0;
pSeek->GetDuration(&rtTotal);
while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT)
{
//.........这里部分代码省略.........
示例8: SplashThread
//.........这里部分代码省略.........
splashWithMarkers = false;
break;
}
}
int splashHeight = SplashBmp->getHeight();
for (i = 0; splashWithMarkers && (i < splashHeight); ++i)
if(SplashBmp->getRow(i)[0] & 0xFF000000)
{
if (y < 0)
{
y = i-1; // 1 pixel for marker line
splashWithMarkers = true;
} else
{
y = -1;
splashWithMarkers = false;
break;
}
}
TCHAR verString[256] = {0};
TCHAR* mirandaVerString = mir_a2t(szVersion);
mir_sntprintf(verString, SIZEOF(verString), _T("%s%s"), szPrefix, mirandaVerString);
mir_free(mirandaVerString);
LOGFONT lf = {0};
lf.lfHeight = 14;
_tcscpy_s(lf.lfFaceName, _T("Verdana"));
SelectObject(SplashBmp->getDC(), CreateFontIndirect(&lf));
if (!splashWithMarkers)
{
SIZE v_sz = {0,0};
GetTextExtentPoint32(SplashBmp->getDC(), verString, (int)_tcslen(verString), &v_sz);
x = SplashBmp->getWidth()/2-(v_sz.cx/2);
y = SplashBmp->getHeight()-(SplashBmp->getHeight()*(100-90)/100);
}
SetTextColor(SplashBmp->getDC(), (0xFFFFFFFFUL-SplashBmp->getRow(y)[x])&0x00FFFFFFUL);
//SplashBmp->DrawText(verString,SplashBmp->getWidth()/2-(v_sz.cx/2),SplashBmp->getHeight()-30);
SetBkMode(SplashBmp->getDC(), TRANSPARENT);
SplashBmp->DrawText(verString, x, y);
//free (ptr_verString);
}
SetWindowLongPtr(hwndSplash, GWL_EXSTYLE, GetWindowLongPtr(hwndSplash, GWL_EXSTYLE) | WS_EX_LAYERED);
UpdateLayeredWindow(hwndSplash, NULL, &ptDst, &sz, SplashBmp->getDC(), &ptSrc, 0xffffffff, &blend, LWA_ALPHA);
ShowWindow(hwndSplash, SW_SHOWNORMAL);
if (options.fadein)
{
// Fade in
int i;
for (i = 0; i < 255; i += options.fisteps)
{
blend.SourceConstantAlpha = i;
UpdateLayeredWindow(hwndSplash, NULL, &ptDst, &sz, SplashBmp->getDC(), &ptSrc, 0xffffffff, &blend, LWA_ALPHA);
Sleep(1);
}
}
blend.SourceConstantAlpha = 255;
UpdateLayeredWindow(hwndSplash, NULL, &ptDst, &sz, SplashBmp->getDC(), &ptSrc, 0xffffffff, &blend, LWA_ALPHA);
if (DWORD(arg) > 0)
{
if (SetTimer(hwndSplash, 6, DWORD(arg), 0))
{
#ifdef _DEBUG
logMessage(_T("Timer TimeToShow"), _T("set"));
#endif
}
}
else
if (bmodulesloaded)
{
if (SetTimer(hwndSplash, 8, 2000, 0))
{
#ifdef _DEBUG
logMessage(_T("Timer Modules loaded"), _T("set"));
#endif
}
}
// The Message Pump
MSG msg;
while (GetMessage(&msg, NULL, 0, 0) == TRUE) //NULL means every window in the thread; == TRUE means a safe pump.
{
TranslateMessage(&msg);
DispatchMessage(&msg);
}
if (options.playsnd)
{
pControl->Release();
pGraph->Release();
CoUninitialize();
}
ExitThread(0);
return 1;
}
示例9: if
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) {
std::vector<CameraConfig> cfg_list;
int count = getDeviceCount();
if (count==0) return cfg_list;
comInit();
HRESULT hr;
ICaptureGraphBuilder2 *lpCaptureGraphBuilder;
IGraphBuilder *lpGraphBuilder;
IBaseFilter *lpInputFilter;
IAMStreamConfig *lpStreamConfig;
char nDeviceName[255];
WCHAR wDeviceName[255];
for (int cam_id=0;cam_id<count;cam_id++) {
if ((dev_id>=0) && (dev_id!=cam_id)) continue;
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder);
if (FAILED(hr)) // FAILED is a macro that tests the return value
{
printf("ERROR - Could not create the Filter Graph Manager\n");
comUnInit();
return cfg_list;
}
// Create the Filter Graph Manager.
hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder);
if (FAILED(hr))
{
printf("ERROR - Could not add the graph builder!\n");
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder);
if (FAILED(hr))
{
printf("ERROR - Could not set filtergraph\n");
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
memset(wDeviceName, 0, sizeof(WCHAR) * 255);
memset(nDeviceName, 0, sizeof(char) * 255);
hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName);
if (SUCCEEDED(hr)){
hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName);
}else{
printf("ERROR - Could not find specified video device\n");
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig);
if(FAILED(hr)){
printf("ERROR: Couldn't config the stream!\n");
lpInputFilter->Release();
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
CameraConfig cam_cfg;
CameraTool::initCameraConfig(&cam_cfg);
cam_cfg.driver = DRIVER_DEFAULT;
cam_cfg.device = cam_id;
sprintf(cam_cfg.name, "%s", nDeviceName);
int iCount = 0;
int iSize = 0;
hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
std::vector<CameraConfig> fmt_list;
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
GUID lastFormat = MEDIASUBTYPE_None;
for (int iFormat = 0; iFormat < iCount; iFormat+=2)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr)){
if ( pmtConfig->subtype != lastFormat) {
if (fmt_list.size()>0) {
std::sort(fmt_list.begin(), fmt_list.end());
cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
fmt_list.clear();
//.........这里部分代码省略.........
示例10: _tmain
int _tmain(int argc, _TCHAR* argv[])
{
int y;
cin>>y;
//
IGraphBuilder* locGraphBuilder = NULL;
IMediaControl* locMediaControl = NULL;
IBaseFilter* locDemuxer = NULL;
ICustomSource* locCustomSourceSetter = NULL;
HRESULT locHR = S_FALSE;;
CoInitialize(NULL);
locHR = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&locGraphBuilder);
locHR = CoCreateInstance(CLSID_OggDemuxPacketSourceFilter, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&locDemuxer);
locGraphBuilder->AddFilter(locDemuxer, L"Custom Ogg Source");
locDemuxer->QueryInterface(IID_ICustomSource, (void**)&locCustomSourceSetter);
CustomSourceClass* locCustomFileSourceInterface = new CustomSourceClass;
locCustomFileSourceInterface->open("D:\\testfile.ogg");
locCustomSourceSetter->setCustomSourceAndLoad(locCustomFileSourceInterface);
//Do not release, it's not really a COM interface
//locCustomSourceSetter->Release();
IEnumPins* locPinEnum = NULL;
locDemuxer->EnumPins(&locPinEnum);
IPin* locPin = NULL;
ULONG locHowMany = 0;
while (locPinEnum->Next(1, &locPin, &locHowMany) == S_OK) {
locHR = locGraphBuilder->Render(locPin);
locPin->Release();
locPin = NULL;
}
//locHR = locGraphBuilder->RenderFile(L"g:\\a.ogg", NULL);
locHR = locGraphBuilder->QueryInterface(IID_IMediaControl, (void**)&locMediaControl);
locHR = locMediaControl->Run();
IMediaEvent* locMediaEvent = NULL;
locHR = locGraphBuilder->QueryInterface(IID_IMediaEvent, (void**)&locMediaEvent);
HANDLE hEvent;
long evCode, param1, param2;
BOOLEAN bDone = FALSE;
HRESULT hr = S_OK;
hr = locMediaEvent->GetEventHandle((OAEVENT*)&hEvent);
if (FAILED(hr))
{
/* Insert failure-handling code here. */
}
while(!bDone)
{
if (WAIT_OBJECT_0 == WaitForSingleObject(hEvent, 100))
{
while (hr = locMediaEvent->GetEvent(&evCode, ¶m1, ¶m2, 0), SUCCEEDED(hr))
{
//printf("Event code: %#04x\n Params: %d, %d\n", evCode, param1, param2);
cout<<"Event : "<<evCode<<" Params : "<<param1<<", "<<param2<<endl;
locMediaEvent->FreeEventParams(evCode, param1, param2);
bDone = (EC_COMPLETE == evCode);
}
}
}
cout<<"Finished..."<<endl;
int x;
cin>>x;
locMediaControl->Release();
locGraphBuilder->Release();
CoUninitialize();
return 0;
}
示例11: sizeof
int main()
{
// for playing
IGraphBuilder *pGraphBuilder;
ICaptureGraphBuilder2 *pCaptureGraphBuilder2;
IMediaControl *pMediaControl;
IBaseFilter *pDeviceFilter = NULL;
// to select a video input device
ICreateDevEnum *pCreateDevEnum = NULL;
IEnumMoniker *pEnumMoniker = NULL;
IMoniker *pMoniker = NULL;
ULONG nFetched = 0;
// initialize COM
CoInitialize(NULL);
//
// selecting a device
//
// Create CreateDevEnum to list device
CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
// Create EnumMoniker to list VideoInputDevice
pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
&pEnumMoniker, 0);
if (pEnumMoniker == NULL) {
// this will be shown if there is no capture device
printf("no device\n");
return 0;
}
// reset EnumMoniker
pEnumMoniker->Reset();
// get each Moniker
while (pEnumMoniker->Next(1, &pMoniker, &nFetched) == S_OK)
{
IPropertyBag *pPropertyBag;
TCHAR devname[256];
// bind to IPropertyBag
pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void **)&pPropertyBag);
VARIANT var;
// get FriendlyName
var.vt = VT_BSTR;
pPropertyBag->Read(L"FriendlyName", &var, 0);
WideCharToMultiByte(CP_ACP, 0,
var.bstrVal, -1, devname, sizeof(devname), 0, 0);
VariantClear(&var);
printf("%s\r\n", devname);
printf(" select this device ? [y] or [n]\r\n");
int ch = getchar();
// you can start playing by 'y' + return key
// if you press the other key, it will not be played.
if (ch == 'y')
{
// Bind Monkier to Filter
pMoniker->BindToObject(0, 0, IID_IBaseFilter,
(void**)&pDeviceFilter );
}
else
{
getchar();
}
// release
pMoniker->Release();
pPropertyBag->Release();
if (pDeviceFilter != NULL)
{
// go out of loop if getchar() returns 'y'
break;
}
}
if (pDeviceFilter != NULL) {
//
// PLAY
//
// create FilterGraph
CoCreateInstance(CLSID_FilterGraph,
NULL,
CLSCTX_INPROC,
IID_IGraphBuilder,
(LPVOID *)&pGraphBuilder);
// create CaptureGraphBuilder2
CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
IID_ICaptureGraphBuilder2,
(LPVOID *)&pCaptureGraphBuilder2);
//.........这里部分代码省略.........
示例12: main
// A very simple program to capture a webcam & audio to a file using DirectShow
//
int main(int argc, char* argv[])
{
ICaptureGraphBuilder2 *pCaptureGraph = NULL; // Capture graph builder object
IGraphBuilder *pGraph = NULL; // Graph builder object
IMediaControl *pControl = NULL; // Media control object
IFileSinkFilter *pSink = NULL; // File sink object
IBaseFilter *pAudioInputFilter = NULL; // Audio Capture filter
IBaseFilter *pVideoInputFilter = NULL; // Video Capture filter
IBaseFilter *pASFWriter = NULL; // WM ASF File config interface
// Initialize the COM library.
HRESULT hr = CoInitialize(NULL);
if (FAILED(hr))
{
// We’ll send our error messages to the console.
printf("ERROR - Could not initialize COM library");
return hr;
}
// Create the filter graph manager and query for interfaces.
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER,
IID_ICaptureGraphBuilder2, (void **)&pCaptureGraph);
if (FAILED(hr)) // FAILED is a macro that tests the return value
{
printf("ERROR - Could not create the Filter Graph Manager.");
return hr;
}
// Use a method of the capture graph builder
// To create an output path for the stream
hr = pCaptureGraph->SetOutputFileName(&MEDIASUBTYPE_Asf,
L"C:\\MyWebcam.ASF", &pASFWriter, &pSink);
// Now configure the ASF Writer
// Present the property pages for this filter
hr = ShowFilterPropertyPages(pASFWriter);
// Now get the filter graph manager
// That's part of the capture graph builder
hr = pCaptureGraph->GetFiltergraph(&pGraph);
// Using QueryInterface on the graph builder,
// Get the Media Control object.
hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
if (FAILED(hr))
{
printf("ERROR - Could not create the Media Control object.");
pGraph->Release(); // Clean up after ourselves.
CoUninitialize(); // And uninitalize COM
return hr;
}
// Get an AudioCapture filter.
// But there are several to choose from
// So we need to enumerate them, and pick one.
// Then add the audio capture filter to the filter graph.
hr = GetAudioInputFilter(&pAudioInputFilter, L"Logitech");
if (SUCCEEDED(hr)) {
hr = pGraph->AddFilter(pAudioInputFilter, L"Webcam Audio Capture");
}
// Now create the video input filter from the webcam
hr = GetVideoInputFilter(&pVideoInputFilter, L"Logitech");
if (SUCCEEDED(hr)) {
hr = pGraph->AddFilter(pVideoInputFilter, L"Webcam Video Capture");
}
// Add a video renderer
//IBaseFilter *pVideoRenderer = NULL;
//hr = AddFilterByCLSID(pGraph, CLSID_VideoRenderer, L"Video Renderer", &pVideoRenderer);
// Use another method of the capture graph builder
// To provide a render path for video preview
IBaseFilter *pIntermediate = NULL;
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
pVideoInputFilter, NULL, NULL);
// Now add the video capture to the output file
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
pVideoInputFilter, NULL, pASFWriter);
// And do the same for the audio
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio,
pAudioInputFilter, NULL, pASFWriter);
if (SUCCEEDED(hr))
{
// Run the graph.
hr = pControl->Run();
if (SUCCEEDED(hr))
{
// Wait patiently for completion of the recording
wprintf(L"Started recording...press Enter to stop recording.\n");
// Wait for completion.
char ch;
ch = getchar(); // We wait for keyboard input
}
//.........这里部分代码省略.........
示例13: CheckMediaType
//.........这里部分代码省略.........
if (regErr != ERROR_SUCCESS || dwType != REG_BINARY) {
return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK;
}
if (buf[2] >= 0x0b) { // Third byte is the major version number
doPostProcessing = true;
}
}
if (doPostProcessing) {
DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: input format disabled or not supported. Trying to maintain in the graph..."));
IFilterMapper2 *pMapper = NULL;
IEnumMoniker *pEnum = NULL;
HRESULT hr = CoCreateInstance(CLSID_FilterMapper2,
NULL, CLSCTX_INPROC, IID_IFilterMapper2,
(void **) &pMapper);
if (FAILED(hr)) {
// Error handling omitted for clarity.
}
GUID arrayInTypes[2];
arrayInTypes[0] = mt->majortype;//MEDIATYPE_Video;
arrayInTypes[1] = mt->subtype;//MEDIASUBTYPE_dvsd;
hr = pMapper->EnumMatchingFilters(
&pEnum,
0, // Reserved.
TRUE, // Use exact match?
MERIT_DO_NOT_USE + 1, // Minimum merit.
TRUE, // At least one input pin?
1, // Number of major type/subtype pairs for input.
arrayInTypes, // Array of major type/subtype pairs for input.
NULL, // Input medium.
NULL, // Input pin category.
FALSE, // Must be a renderer?
TRUE, // At least one output pin?
0, // Number of major type/subtype pairs for output.
NULL, // Array of major type/subtype pairs for output.
NULL, // Output medium.
NULL); // Output pin category.
// Enumerate the monikers.
IMoniker *pMoniker;
ULONG cFetched;
while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK) {
IPropertyBag *pPropBag = NULL;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void **)&pPropBag);
if (SUCCEEDED(hr)) {
// To retrieve the friendly name of the filter, do the following:
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
if (SUCCEEDED(hr)) {
if (varName.pbstrVal == NULL || _strnicmp(FFDSHOW_NAME_L, varName.bstrVal, 22) != 0) {
// Display the name in your UI somehow.
DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: compatible filter found (%s)"), varName.pbstrVal);
hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pCompatibleFilter);
}
}
// Now add the filter to the graph. Remember to release pFilter later.
IFilterGraph *pGraph = NULL;
fv->deci->getGraph(&pGraph);
IGraphBuilder *pGraphBuilder = NULL;
hr = pGraph->QueryInterface(IID_IGraphBuilder, (void **)&pGraphBuilder);
if (hr == S_OK) {
pGraphBuilder->AddFilter(pCompatibleFilter, varName.bstrVal);
} else {
pCompatibleFilter->Release();
pCompatibleFilter = NULL;
}
// Clean up.
VariantClear(&varName);
pGraphBuilder->Release();
pPropBag->Release();
}
pMoniker->Release();
if (pCompatibleFilter != NULL) {
break;
}
}
// Clean up.
pMapper->Release();
pEnum->Release();
}
}
if (pCompatibleFilter != NULL) {
return S_OK;
}
return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK;
}
示例14: Open
// Open multimedia stream.
BOOL CDShow::Open(ZString& pFileName, IDirectDraw7 *pDD)
{
// Multimedia stream pointer
IAMMultiMediaStream *pAMStream;
IGraphBuilder *pGb; // 7/10 #110
IEnumFilters *pEfs;
IBasicAudio *pBa;
//7/29/09 we can now do stuff while the video plays
CoInitializeEx(NULL,COINIT_MULTITHREADED);
// Create Multimedia stream object
if (FAILED(CoCreateInstance(CLSID_AMMultiMediaStream, NULL, CLSCTX_INPROC_SERVER,
IID_IAMMultiMediaStream, (void **)&pAMStream)))
{
// Return FALSE to let caller know we failed.
return FALSE;
}
// Initialize Multimedia stream object
if (FAILED(pAMStream->Initialize(STREAMTYPE_READ, 0, NULL)))
{
// There are alot of possiblities to fail.....
return FALSE;
}
// Add primary video stream.
if (FAILED((pAMStream->AddMediaStream(pDD, &MSPID_PrimaryVideo, 0, NULL))))
{
return FALSE;
}
// Add default sound render to primary video stream,
// so sound will be played back automatically.
if (FAILED(pAMStream->AddMediaStream(NULL, &MSPID_PrimaryAudio, AMMSF_ADDDEFAULTRENDERER, NULL)))
{
// Return FALSE to let caller know we failed.
return FALSE;
}
// Convert filename to UNICODE.
// Notice the safe way to get the actual size of a string.
WCHAR wPath[MAX_PATH];
MultiByteToWideChar(CP_ACP, 0, pFileName, -1, wPath, sizeof(wPath)/sizeof(wPath[0]));
// Build the filter graph for our multimedia stream.
if (FAILED((pAMStream->OpenFile(wPath, 0))))
{
// Return FALSE to let caller know we failed.
return FALSE;
}
//7/10 #110
FILTER_INFO FilterInfo;
pAMStream->GetFilterGraph(&pGb);
pGb->EnumFilters(&pEfs);
IBaseFilter *pFilter;
unsigned long cFetched;
while(pEfs->Next(1, &pFilter, &cFetched) == S_OK) {
FILTER_INFO FilterInfo;
pFilter->QueryFilterInfo(&FilterInfo);
char szName[MAX_FILTER_NAME];
long cch = WideCharToMultiByte(CP_ACP,0,FilterInfo.achName,MAX_FILTER_NAME,szName,MAX_FILTER_NAME,0,0);
if (cch > 0) {
if (!strcmp("WMAudio Decoder DMO",szName)) {
// set the volume to music level
FilterInfo.pGraph->QueryInterface(IID_IBasicAudio,(void**)&pBa);
HKEY hKey;
DWORD dwResult = 0;
if (ERROR_SUCCESS == ::RegOpenKeyEx(HKEY_LOCAL_MACHINE, ALLEGIANCE_REGISTRY_KEY_ROOT,0, KEY_READ, &hKey)) {
DWORD dwSize = sizeof(dwResult);
DWORD dwType = REG_DWORD;
::RegQueryValueEx(hKey, "MusicGain", NULL, &dwType, (BYTE*)&dwResult, &dwSize);
::RegCloseKey(hKey);
if (dwType != REG_DWORD)
dwResult = 0;
}
long vol = (dwResult * -1) * 100;
if (vol < -5000) {
vol = -10000;
}
pBa->put_Volume(vol);
pBa->Release();
}
if (FilterInfo.pGraph != NULL)
FilterInfo.pGraph->Release();
pFilter->Release();
}
}
pEfs->Release();
pGb->Release();
// Assign member to temperary stream pointer.
m_pMMStream = pAMStream;
// Add a reference to the file
//.........这里部分代码省略.........
示例15: _tmain
int _tmain(int argc, _TCHAR* argv[])
{
IGraphBuilder *pGraph = NULL;
IMediaControl *pControl = NULL;
IMediaEvent *pEvent = NULL;
//Get some param--------------
HRESULT hr1;
IBasicVideo *pVideo=NULL;
IBasicAudio *pAudio=NULL;
IVideoWindow *pWindow=NULL;
IMediaSeeking *pSeeking=NULL;
// Init COM
HRESULT hr = CoInitialize(NULL);
if (FAILED(hr)){
printf("Error - Can't init COM.");
return -1;
}
// Create FilterGraph
hr=CoCreateInstance(CLSID_FilterGraph, NULL,CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void **)&pGraph);
if (FAILED(hr)){
printf("Error - Can't create Filter Graph.");
return -1;
}
// Query Interface
hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
hr = pGraph->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
// RenderFile
hr = pGraph->RenderFile(L"cuc_ieschool.mov", NULL);
if (FAILED(hr)){
printf("Error - Can't Render File.");
return -1;
}
#if OUTPUT_INFO
//Get some information----------
long video_w=0,video_h=0,video_bitrate=0,audio_volume=0;
long long duration_1=0,position_1=0;
REFTIME avgtimeperframe=0;
float framerate=0,duration_sec=0,progress=0,position_sec=0;
//Video
hr1=pGraph->QueryInterface(IID_IBasicVideo, (void **)&pVideo);
pVideo->get_VideoWidth(&video_w);
pVideo->get_VideoHeight(&video_h);
pVideo->get_AvgTimePerFrame(&avgtimeperframe);
framerate=1/avgtimeperframe;
//pVideo->get_BitRate(&video_bitrate);
//Audio
hr1=pGraph->QueryInterface(IID_IBasicAudio, (void **)&pAudio);
//Mute
//pAudio->put_Volume(-10000);
printf("Some Information:\n");
printf("Video Resolution:\t%dx%d\n",video_w,video_h);
printf("Video Framerate:\t%.3f\n",framerate);
//Window
hr1=pGraph->QueryInterface(IID_IVideoWindow, (void **)&pWindow);
pWindow->put_Caption(L"Simplest DirectShow Player");
//pWindow->put_Width(480);
//pWindow->put_Height(272);
//Seek
hr1=pGraph->QueryInterface(IID_IMediaSeeking, (void **)&pSeeking);
pSeeking->GetDuration(&duration_1);
//time unit:100ns=0.0000001s
duration_sec=(float)duration_1/10000000.0;
printf("Duration:\t%.2f s\n",duration_sec);
//pSeeking->SetPositions();
//PlayBack Rate
//pSeeking->SetRate(2.0);
//Show Filter in FilterGpagh
show_filters_in_filtergraph(pGraph);
//----------------------
#endif
printf("Progress Info\n");
printf("Position\tProgress\n");
if (SUCCEEDED(hr)){
// Run
hr = pControl->Run();
if (SUCCEEDED(hr)){
long evCode=0;
//pEvent->WaitForCompletion(INFINITE, &evCode);
while(evCode!=EC_COMPLETE){
//Info
#if OUTPUT_INFO
pSeeking->GetCurrentPosition(&position_1);
position_sec=(float)position_1/10000000.0;
progress=position_sec*100/duration_sec;
printf("%7.2fs\t%5.2f%%\n",position_sec,progress);
#endif
//1000ms
pEvent->WaitForCompletion(1000, &evCode);
}
}
}
// Release resource
pControl->Release();
pEvent->Release();
pGraph->Release();
//.........这里部分代码省略.........