本文整理汇总了C++中IGraphBuilder::AddFilter方法的典型用法代码示例。如果您正苦于以下问题:C++ IGraphBuilder::AddFilter方法的具体用法?C++ IGraphBuilder::AddFilter怎么用?C++ IGraphBuilder::AddFilter使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IGraphBuilder
的用法示例。
在下文中一共展示了IGraphBuilder::AddFilter方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: texture
video::video(std::wstring path): texture(1,1,false,GL_LINEAR,GL_REPEAT), rendered(false), isPlaying(false) {
printf("Opening video \"%ls\"\n", path.c_str());
IGraphBuilder * graph;
IBaseFilter * base;
__int64 clipLength;
CoInitialize(0);
CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC, IID_IGraphBuilder, (void**)&graph);
HRESULT hr = S_OK;
graph->QueryInterface(IID_IMediaControl, (void**)&mediaControl);
graph->QueryInterface(IID_IMediaSeeking, (void**)&mediaSeeking);
grabber = new textureGrabber(0, &hr);
grabber->AddRef();
grabber->QueryInterface(IID_IBaseFilter, (void**)&base);
graph->AddFilter(base, L"peisikVideoSystem OpenGL texture renderer");
hr = graph->RenderFile(path.c_str(), 0);
grabber->setTexture(&texture);
graph->Release();
base->Release();
if(SUCCEEDED(hr) && grabber->width)
{
printf("Video succesfully rendered\n");
mediaSeeking->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME);
mediaSeeking->GetDuration(&clipLength);
length = (long double)(clipLength)/(long double)10000000.0;
__int64 position = 0;
mediaSeeking->SetPositions(&position, AM_SEEKING_AbsolutePositioning, &position, AM_SEEKING_NoPositioning);
rendered = true;
}
else
printf("Couldn't find a working video graph\n");
}
示例2: eHandler
Camera::Camera(bool Show,bool Start) : eHandler(this),_realData(false),_UpdateWindow(Show),_LastData(0),_CurData(0) {
DWORD no;
IGraphBuilder *graph = 0;
ctrl = 0;
ICreateDevEnum *devs = 0;
IEnumMoniker *cams = 0;
IMoniker *mon = 0;
IBaseFilter *cam = 0;
IEnumPins *pins = 0;
IPin *pin = 0;
IEnumFilters *fil = 0;
IBaseFilter *rnd = 0;
IMemInputPin *mem = 0;
curCamera = this;
_isOn = Start;
CoCreateInstance( CLSID_FilterGraph, 0, CLSCTX_INPROC,IID_IGraphBuilder, (void **)&graph );
graph->QueryInterface( IID_IMediaControl, (void **)&ctrl );
CoCreateInstance (CLSID_SystemDeviceEnum, 0, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &devs);
devs->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &cams, 0);
cams->Next (1,&mon,0); // get first found capture device (webcam)
mon->BindToObject(0,0,IID_IBaseFilter, (void**)&cam);
graph->AddFilter(cam, L"Capture Source"); // add web cam to graph as source
cam->EnumPins(&pins); // we need output pin to autogenerate rest of the graph
pins->Next(1,&pin, 0); // via graph->Render
graph->Render(pin); // graph builder now builds whole filter chain including MJPG decompression on some webcams
graph->EnumFilters(&fil); // from all newly added filters
fil->Next(1,&rnd,0); // we find last one (renderer)
rnd->EnumPins(&pins); // because data we are intersted in are pumped to renderers input pin
pins->Next(1,&pin, 0); // via Receive member of IMemInputPin interface
pin->QueryInterface(IID_IMemInputPin,(void**)&mem);
DsHook(mem,6,Receive); // so we redirect it to our own proc to grab image data
if (Start) this->Start();
}
示例3: _tmain
int _tmain(int argc, _TCHAR* argv[])
{
int y;
cin>>y;
//
IGraphBuilder* locGraphBuilder = NULL;
IMediaControl* locMediaControl = NULL;
IBaseFilter* locDemuxer = NULL;
ICustomSource* locCustomSourceSetter = NULL;
HRESULT locHR = S_FALSE;;
CoInitialize(NULL);
locHR = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&locGraphBuilder);
locHR = CoCreateInstance(CLSID_OggDemuxPacketSourceFilter, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&locDemuxer);
locGraphBuilder->AddFilter(locDemuxer, L"Custom Ogg Source");
locDemuxer->QueryInterface(IID_ICustomSource, (void**)&locCustomSourceSetter);
CustomSourceClass* locCustomFileSourceInterface = new CustomSourceClass;
locCustomFileSourceInterface->open("D:\\testfile.ogg");
locCustomSourceSetter->setCustomSourceAndLoad(locCustomFileSourceInterface);
//Do not release, it's not really a COM interface
//locCustomSourceSetter->Release();
IEnumPins* locPinEnum = NULL;
locDemuxer->EnumPins(&locPinEnum);
IPin* locPin = NULL;
ULONG locHowMany = 0;
while (locPinEnum->Next(1, &locPin, &locHowMany) == S_OK) {
locHR = locGraphBuilder->Render(locPin);
locPin->Release();
locPin = NULL;
}
//locHR = locGraphBuilder->RenderFile(L"g:\\a.ogg", NULL);
locHR = locGraphBuilder->QueryInterface(IID_IMediaControl, (void**)&locMediaControl);
locHR = locMediaControl->Run();
IMediaEvent* locMediaEvent = NULL;
locHR = locGraphBuilder->QueryInterface(IID_IMediaEvent, (void**)&locMediaEvent);
HANDLE hEvent;
long evCode, param1, param2;
BOOLEAN bDone = FALSE;
HRESULT hr = S_OK;
hr = locMediaEvent->GetEventHandle((OAEVENT*)&hEvent);
if (FAILED(hr))
{
/* Insert failure-handling code here. */
}
while(!bDone)
{
if (WAIT_OBJECT_0 == WaitForSingleObject(hEvent, 100))
{
while (hr = locMediaEvent->GetEvent(&evCode, ¶m1, ¶m2, 0), SUCCEEDED(hr))
{
//printf("Event code: %#04x\n Params: %d, %d\n", evCode, param1, param2);
cout<<"Event : "<<evCode<<" Params : "<<param1<<", "<<param2<<endl;
locMediaEvent->FreeEventParams(evCode, param1, param2);
bDone = (EC_COMPLETE == evCode);
}
}
}
cout<<"Finished..."<<endl;
int x;
cin>>x;
locMediaControl->Release();
locGraphBuilder->Release();
CoUninitialize();
return 0;
}
示例4: main
int main(int argc, char* argv[])
{
ICaptureGraphBuilder2 *pCaptureGraphBuilder = NULL;
IGraphBuilder *pGraphBuilder = NULL;
IBaseFilter *pSource = NULL;
IBaseFilter *pMux = NULL;
IBaseFilter *pVideoCompressor = NULL;
IBaseFilter *pAudioCompressor = NULL;
IAMStreamConfig *pAMStreamConfig = NULL;
IAMVideoCompression *pAMVideoCompression = NULL;
IMediaControl *pControl = NULL;
IMediaSeeking *pSeek = NULL;
IMediaEvent *pEvent = NULL;
HRESULT hr;
DWORD pdwRegister=0;
CoInitialize(NULL);
// Create the capture graph builder.
CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);
// Make the rendering section of the graph.
pCaptureGraphBuilder->SetOutputFileName(
&MEDIASUBTYPE_Avi, // File type.
L"C:\\STDIUE1.avi", // File name.
&pMux, // pointer to the multiplexer.
NULL); // pointer to the file writer.
// Load the source file.
pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder);
pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource);
// Add the compressor filter.
CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **)&pVideoCompressor);
pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor");
// Render the video stream, through the compressor.
pCaptureGraphBuilder->RenderStream(
NULL, // Output pin category
NULL, // Media type
pSource, // Source filter
pVideoCompressor, // Compressor filter
pMux); // Sink filter (the AVI Mux)
/* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **)&pAudioCompressor);
pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/
// Render the audio stream.
pCaptureGraphBuilder->RenderStream(
NULL,
NULL,
pSource,
pAudioCompressor,
pMux);
// Compress at 100k/second data rate.
AM_MEDIA_TYPE *pmt;
pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig);
pAMStreamConfig->GetFormat(&pmt);
if (pmt->formattype == FORMAT_VideoInfo)
{
((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000;
pAMStreamConfig->SetFormat(pmt);
}
// Request key frames every four frames.
pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression);
pAMVideoCompression->put_KeyFrameRate(4);
pAMVideoCompression->Release();
pAMStreamConfig->Release();
// Run the graph.
pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl);
pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
pControl->Run();
printf("Recompressing... \n");
long evCode;
if (SUCCEEDED(hr))
{
REFERENCE_TIME rtTotal, rtNow = 0;
pSeek->GetDuration(&rtTotal);
while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT)
{
//.........这里部分代码省略.........
示例5: main
int main(int argc, char* argv[])
{
IGraphBuilder *pGraph = NULL;
ICaptureGraphBuilder2 *pBuilder = NULL;
IBaseFilter *pSrc = NULL;
IBaseFilter *ppf = NULL;
IFileSinkFilter *pSink = NULL;
IMediaControl *pMC = NULL;
HRESULT hr;
CoInitialize (NULL);
// Create the filter graph.
CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC,
IID_IGraphBuilder, (void **)&pGraph);
// Create the capture graph builder.
CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
IID_ICaptureGraphBuilder2, (void **)&pBuilder);
pBuilder->SetFiltergraph(pGraph);
pSrc=GetAudioDevice ();
// add the first audio filter in the list
pGraph->AddFilter(pSrc, L"Video Capture");
/* pBuilder->SetOutputFileName(
&MEDIASUBTYPE_Avi,
L"C:\\Example.avi",
&ppf,
&pSink);*/
// pBuilder->AllocCapFile (L"C:\\temp.avi", _MAX_PATH);
pBuilder->RenderStream(
&PIN_CATEGORY_CAPTURE, // Pin category
&MEDIATYPE_Audio, // Media type
pSrc, // Capture filter
NULL, // Compression filter (optional)
ppf // Multiplexer or renderer filter
);
REFERENCE_TIME rtStart = 20000000,
rtStop = 50000000;
/* pBuilder->ControlStream(
&PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Audio,
pSrc, // Source filter
&rtStart, // Start time
&rtStop, // Stop time
0, // Start cookie
0 // Stop cookie
);*/
pGraph->QueryInterface (IID_IMediaControl, (void **) &pMC);
pMC->Run ();
MessageBox (NULL, "Stop Recording", NULL, NULL);
pMC->Stop ();
/* CProgress *pProg = new CProgress(TEXT(""), NULL, &hr);
IAMCopyCaptureFileProgress *pIProg = NULL;
hr = pProg->QueryInterface(IID_IAMCopyCaptureFileProgress,
(void **)&pIProg);
//pBuilder->CopyCaptureFile (L"C:\\temp.avi", L"C:\\final.avi", TRUE, pIProg);*/
CoUninitialize ();
return 0;
}
示例6: sizeof
//.........这里部分代码省略.........
// Query the capture filter for the IAMCameraControl interface.
IAMCameraControl *pCameraControl = 0;
hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl);
if (FAILED(hr))
{
// The device does not support IAMCameraControl
}
else
{
long Min, Max, Step, Default, Flags, Val;
// Get the range and default values
hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr))
{
hr = pCameraControl->Set(CameraControl_Exposure, -11, CameraControl_Flags_Manual ); // Min = -11, Max = 1, Step = 1
hr = pCameraControl->Set(CameraControl_Focus, 12, CameraControl_Flags_Manual );
}
}
// Query the capture filter for the IAMVideoProcAmp interface.
IAMVideoProcAmp *pProcAmp = 0;
hr = pDeviceFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
if (FAILED(hr))
{
// The device does not support IAMVideoProcAmp
}
else
{
long Min, Max, Step, Default, Flags, Val;
// Get the range and default values
hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_BacklightCompensation, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_Sharpness, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr))
{
hr = pProcAmp->Set(VideoProcAmp_Brightness, 142, VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_BacklightCompensation, 0, VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_Contrast, 4, VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_Sharpness, 0, VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_WhiteBalance, 2800, VideoProcAmp_Flags_Manual);
}
}
//============================================================
//=========== END MY CODE ======================================
//=============================================================
hr = S_OK;
CTransformer* trans = new CTransformer( "Dif trans", 0, CLSID_DIFFilter, &hr );
IBaseFilter * ttt = 0;
trans->QueryInterface(IID_IBaseFilter, (LPVOID *)&ttt);
// set FilterGraph
hr = pCaptureGraphBuilder2->SetFiltergraph(pGraphBuilder);
// get MediaControl interface
hr = pGraphBuilder->QueryInterface(IID_IMediaControl,
(LPVOID *)&pMediaControl);
// add device filter to FilterGraph
hr = pGraphBuilder->AddFilter(ttt, L"Dif trans");
hr = pGraphBuilder->AddFilter(pDeviceFilter, L"Device Filter");
// create Graph
hr = pCaptureGraphBuilder2->RenderStream(&PIN_CATEGORY_CAPTURE,
NULL, pDeviceFilter, NULL, NULL);
// start playing
hr = pMediaControl->Run();
// to block execution
// without this messagebox, the graph will be stopped immediately
MessageBox(NULL,
"Block Execution",
"Block",
MB_OK);
// release
pMediaControl->Release();
pCaptureGraphBuilder2->Release();
pGraphBuilder->Release();
}
// release
pEnumMoniker->Release();
pCreateDevEnum->Release();
// finalize COM
CoUninitialize();
return 0;
}
示例7: main
// A very simple program to capture a webcam & audio to a file using DirectShow
//
int main(int argc, char* argv[])
{
ICaptureGraphBuilder2 *pCaptureGraph = NULL; // Capture graph builder object
IGraphBuilder *pGraph = NULL; // Graph builder object
IMediaControl *pControl = NULL; // Media control object
IFileSinkFilter *pSink = NULL; // File sink object
IBaseFilter *pAudioInputFilter = NULL; // Audio Capture filter
IBaseFilter *pVideoInputFilter = NULL; // Video Capture filter
IBaseFilter *pASFWriter = NULL; // WM ASF File config interface
// Initialize the COM library.
HRESULT hr = CoInitialize(NULL);
if (FAILED(hr))
{
// We’ll send our error messages to the console.
printf("ERROR - Could not initialize COM library");
return hr;
}
// Create the filter graph manager and query for interfaces.
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER,
IID_ICaptureGraphBuilder2, (void **)&pCaptureGraph);
if (FAILED(hr)) // FAILED is a macro that tests the return value
{
printf("ERROR - Could not create the Filter Graph Manager.");
return hr;
}
// Use a method of the capture graph builder
// To create an output path for the stream
hr = pCaptureGraph->SetOutputFileName(&MEDIASUBTYPE_Asf,
L"C:\\MyWebcam.ASF", &pASFWriter, &pSink);
// Now configure the ASF Writer
// Present the property pages for this filter
hr = ShowFilterPropertyPages(pASFWriter);
// Now get the filter graph manager
// That's part of the capture graph builder
hr = pCaptureGraph->GetFiltergraph(&pGraph);
// Using QueryInterface on the graph builder,
// Get the Media Control object.
hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
if (FAILED(hr))
{
printf("ERROR - Could not create the Media Control object.");
pGraph->Release(); // Clean up after ourselves.
CoUninitialize(); // And uninitalize COM
return hr;
}
// Get an AudioCapture filter.
// But there are several to choose from
// So we need to enumerate them, and pick one.
// Then add the audio capture filter to the filter graph.
hr = GetAudioInputFilter(&pAudioInputFilter, L"Logitech");
if (SUCCEEDED(hr)) {
hr = pGraph->AddFilter(pAudioInputFilter, L"Webcam Audio Capture");
}
// Now create the video input filter from the webcam
hr = GetVideoInputFilter(&pVideoInputFilter, L"Logitech");
if (SUCCEEDED(hr)) {
hr = pGraph->AddFilter(pVideoInputFilter, L"Webcam Video Capture");
}
// Add a video renderer
//IBaseFilter *pVideoRenderer = NULL;
//hr = AddFilterByCLSID(pGraph, CLSID_VideoRenderer, L"Video Renderer", &pVideoRenderer);
// Use another method of the capture graph builder
// To provide a render path for video preview
IBaseFilter *pIntermediate = NULL;
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
pVideoInputFilter, NULL, NULL);
// Now add the video capture to the output file
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
pVideoInputFilter, NULL, pASFWriter);
// And do the same for the audio
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio,
pAudioInputFilter, NULL, pASFWriter);
if (SUCCEEDED(hr))
{
// Run the graph.
hr = pControl->Run();
if (SUCCEEDED(hr))
{
// Wait patiently for completion of the recording
wprintf(L"Started recording...press Enter to stop recording.\n");
// Wait for completion.
char ch;
ch = getchar(); // We wait for keyboard input
}
//.........这里部分代码省略.........
示例8: CheckMediaType
//.........这里部分代码省略.........
if (regErr != ERROR_SUCCESS || dwType != REG_BINARY) {
return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK;
}
if (buf[2] >= 0x0b) { // Third byte is the major version number
doPostProcessing = true;
}
}
if (doPostProcessing) {
DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: input format disabled or not supported. Trying to maintain in the graph..."));
IFilterMapper2 *pMapper = NULL;
IEnumMoniker *pEnum = NULL;
HRESULT hr = CoCreateInstance(CLSID_FilterMapper2,
NULL, CLSCTX_INPROC, IID_IFilterMapper2,
(void **) &pMapper);
if (FAILED(hr)) {
// Error handling omitted for clarity.
}
GUID arrayInTypes[2];
arrayInTypes[0] = mt->majortype;//MEDIATYPE_Video;
arrayInTypes[1] = mt->subtype;//MEDIASUBTYPE_dvsd;
hr = pMapper->EnumMatchingFilters(
&pEnum,
0, // Reserved.
TRUE, // Use exact match?
MERIT_DO_NOT_USE + 1, // Minimum merit.
TRUE, // At least one input pin?
1, // Number of major type/subtype pairs for input.
arrayInTypes, // Array of major type/subtype pairs for input.
NULL, // Input medium.
NULL, // Input pin category.
FALSE, // Must be a renderer?
TRUE, // At least one output pin?
0, // Number of major type/subtype pairs for output.
NULL, // Array of major type/subtype pairs for output.
NULL, // Output medium.
NULL); // Output pin category.
// Enumerate the monikers.
IMoniker *pMoniker;
ULONG cFetched;
while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK) {
IPropertyBag *pPropBag = NULL;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void **)&pPropBag);
if (SUCCEEDED(hr)) {
// To retrieve the friendly name of the filter, do the following:
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
if (SUCCEEDED(hr)) {
if (varName.pbstrVal == NULL || _strnicmp(FFDSHOW_NAME_L, varName.bstrVal, 22) != 0) {
// Display the name in your UI somehow.
DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: compatible filter found (%s)"), varName.pbstrVal);
hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pCompatibleFilter);
}
}
// Now add the filter to the graph. Remember to release pFilter later.
IFilterGraph *pGraph = NULL;
fv->deci->getGraph(&pGraph);
IGraphBuilder *pGraphBuilder = NULL;
hr = pGraph->QueryInterface(IID_IGraphBuilder, (void **)&pGraphBuilder);
if (hr == S_OK) {
pGraphBuilder->AddFilter(pCompatibleFilter, varName.bstrVal);
} else {
pCompatibleFilter->Release();
pCompatibleFilter = NULL;
}
// Clean up.
VariantClear(&varName);
pGraphBuilder->Release();
pPropBag->Release();
}
pMoniker->Release();
if (pCompatibleFilter != NULL) {
break;
}
}
// Clean up.
pMapper->Release();
pEnum->Release();
}
}
if (pCompatibleFilter != NULL) {
return S_OK;
}
return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK;
}
示例9: if
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) {
std::vector<CameraConfig> cfg_list;
int count = getDeviceCount();
if (count==0) return cfg_list;
comInit();
HRESULT hr;
ICaptureGraphBuilder2 *lpCaptureGraphBuilder;
IGraphBuilder *lpGraphBuilder;
IBaseFilter *lpInputFilter;
IAMStreamConfig *lpStreamConfig;
char nDeviceName[255];
WCHAR wDeviceName[255];
for (int cam_id=0;cam_id<count;cam_id++) {
if ((dev_id>=0) && (dev_id!=cam_id)) continue;
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder);
if (FAILED(hr)) // FAILED is a macro that tests the return value
{
printf("ERROR - Could not create the Filter Graph Manager\n");
comUnInit();
return cfg_list;
}
// Create the Filter Graph Manager.
hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder);
if (FAILED(hr))
{
printf("ERROR - Could not add the graph builder!\n");
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder);
if (FAILED(hr))
{
printf("ERROR - Could not set filtergraph\n");
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
memset(wDeviceName, 0, sizeof(WCHAR) * 255);
memset(nDeviceName, 0, sizeof(char) * 255);
hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName);
if (SUCCEEDED(hr)){
hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName);
}else{
printf("ERROR - Could not find specified video device\n");
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig);
if(FAILED(hr)){
printf("ERROR: Couldn't config the stream!\n");
lpInputFilter->Release();
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
CameraConfig cam_cfg;
CameraTool::initCameraConfig(&cam_cfg);
cam_cfg.driver = DRIVER_DEFAULT;
cam_cfg.device = cam_id;
sprintf(cam_cfg.name, "%s", nDeviceName);
int iCount = 0;
int iSize = 0;
hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
std::vector<CameraConfig> fmt_list;
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
GUID lastFormat = MEDIASUBTYPE_None;
for (int iFormat = 0; iFormat < iCount; iFormat+=2)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr)){
if ( pmtConfig->subtype != lastFormat) {
if (fmt_list.size()>0) {
std::sort(fmt_list.begin(), fmt_list.end());
cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
fmt_list.clear();
//.........这里部分代码省略.........