本文整理汇总了C++中IFilterGraph类的典型用法代码示例。如果您正苦于以下问题:C++ IFilterGraph类的具体用法?C++ IFilterGraph怎么用?C++ IFilterGraph使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了IFilterGraph类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: cObjectLock
//
// Run
//
STDMETHODIMP CDVBSub::Run( REFERENCE_TIME tStart )
{
CAutoLock cObjectLock( m_pLock );
LogDebug( "CDVBSub::Run" );
m_startTimestamp = tStart;
HRESULT hr = CBaseFilter::Run( tStart );
if( hr != S_OK )
{
LogDebug( "CDVBSub::Run - BaseFilter returned %i", hr );
return hr;
}
// Get media seeking interface if missing
if( !m_pIMediaSeeking )
{
IFilterGraph *pGraph = GetFilterGraph();
if( pGraph )
{
pGraph->QueryInterface( &m_pIMediaSeeking );
pGraph->Release();
}
}
LogDebug( "CDVBSub::Run - done" );
return hr;
}
示例2: DECLARE_PTR
HRESULT STDMETHODCALLTYPE CVCamPin::SetFormat(AM_MEDIA_TYPE *pmt)
{
DECLARE_PTR(VIDEOINFOHEADER, pvi, m_mt.pbFormat);
m_mt = *pmt;
IPin* pin;
ConnectedTo(&pin);
if(pin)
{
IFilterGraph *pGraph = m_pParent->GetGraph();
pGraph->Reconnect(this);
}
return S_OK;
}
示例3: ASSERT
//
// OnConnect
//
// Called when the property page connects to a filter
//
HRESULT CProgramProperties::OnConnect(IUnknown *pUnknown)
{
ASSERT(m_pProgram == NULL);
CheckPointer(pUnknown,E_POINTER);
HRESULT hr = pUnknown->QueryInterface(IID_IMpeg2PsiParser, (void **) &m_pProgram);
if(FAILED(hr))
{
return E_NOINTERFACE;
}
ASSERT(m_pProgram);
IBaseFilter * pParserFilter ;
hr = m_pProgram->QueryInterface(IID_IBaseFilter, (void **) &pParserFilter);
RETURN_FALSE_IF_FAILED(TEXT("CProgramProperties::OnUpdate() QueryInterface() failed."), hr);
FILTER_INFO Info;
IFilterGraph * pGraph;
hr = pParserFilter->QueryFilterInfo(&Info);
RETURN_FALSE_IF_FAILED(TEXT("CProgramProperties::OnUpdate() QueryFilterInfo() failed."), hr);
pGraph = Info.pGraph;
pParserFilter->Release();
hr = pGraph->QueryInterface(IID_IGraphBuilder, (void **) & m_pGraphBuilder);
RETURN_FALSE_IF_FAILED(TEXT("CProgramProperties::OnUpdate() QueryInterface() failed."), hr);
// get demux filter
hr = GetDemuxFilter(pGraph, &m_pDemux);
RETURN_FALSE_IF_FAILED(TEXT("CProgramProperties::OnUpdate() GetDemuxFilter() failed."), hr);
pGraph->Release();
// if there is no streaming, the following variables will not be initialized.
if(m_pDemux != NULL && m_pGraphBuilder != NULL){
hr = m_pGraphBuilder->QueryInterface(IID_IMediaControl, (void **) & m_pMediaControl);
RETURN_FALSE_IF_FAILED( TEXT(" CProgramProperties::OnUpdate():Failed to QI IMediaControl."), hr);
// Get the initial Program value
m_pProgram->GetTransportStreamId( &m_stream_id);
m_pProgram->GetPatVersionNumber( &m_pat_version);
m_pProgram->GetCountOfPrograms( &m_number_of_programs );
}
if(!OnUpdate())
return FALSE;
return NOERROR;
}
示例4: DECLARE_PTR
HRESULT STDMETHODCALLTYPE CVCamStream::SetFormat(AM_MEDIA_TYPE *pmt)
{
//m_mt is the media type of the pin (CMediaType)
DECLARE_PTR(VIDEOINFOHEADER, pvi, m_mt.pbFormat); // Can probably change this to something more familiar
m_mt = *pmt;
IPin* pin;
ConnectedTo(&pin);
// if we are currently connected to a pin...
if (pin)
{
IFilterGraph *pGraph = m_pParent->GetGraph(); // get the graph of the parent (What does that do?)
pGraph->Reconnect(this); // connect the graph
}
return S_OK;
}
示例5: cAutoLock
HRESULT STDMETHODCALLTYPE UVCamStream::SetFormat(AM_MEDIA_TYPE *pmt)
{
if(!pmt) return E_POINTER;
CAutoLock cAutoLock(m_pFilter->pStateLock());
DECLARE_PTR(VIDEOINFOHEADER, pvi, m_mt.pbFormat);
m_mt = *pmt;
IPin* pin;
ConnectedTo(&pin);
if(pin)
{
IFilterGraph *pGraph = m_pParent->GetGraph();
pGraph->Reconnect(this);
}
return S_OK;
}
示例6: LogCurrentPosition
void CSubtitlePin::LogCurrentPosition()
{
IFilterGraph* pGraph = m_pTsReaderFilter->GetFilterGraph();
IMediaSeeking* pMediaSeeking( NULL );
if( pGraph )
{
pGraph->QueryInterface( &pMediaSeeking );
pGraph->Release();
}
LONGLONG pos( 0 );
pMediaSeeking->GetCurrentPosition( &pos );
//pMediaSeeking->Release();
float fPos = (float)pos;
fPos = ( ( fPos / 10000000 ) );
LogDebug("sub current position %f", fPos );
}
示例7: vcamLog
HRESULT STDMETHODCALLTYPE CVCamStream::SetFormat(AM_MEDIA_TYPE *pmt)
{
vcamLog(50, "CVCamStream::SetFormat");
//DECLARE_PTR(VIDEOINFOHEADER, pvi, m_mt.pbFormat);
m_mt = *pmt;
IPin* pin;
ConnectedTo(&pin);
if(pin)
{
IFilterGraph *pGraph = m_pParent->GetGraph();
pGraph->Reconnect(this);
vcamLog(50, " CVCamStream::SetFormat returning S_OK (reconnected to graph)");
} else {
vcamLog(50, " CVCamStream::SetFormat returning S_OK (pin not connected)");
}
SafeRelease(&pin);
return S_OK;
}
示例8: XN_METHOD_CHECK_POINTER
HRESULT STDMETHODCALLTYPE XnVideoStream::SetFormat(AM_MEDIA_TYPE *pmt)
{
XN_METHOD_START;
XN_METHOD_CHECK_POINTER(pmt);
if (pmt == NULL)
{
XN_METHOD_RETURN(E_INVALIDARG);
}
xnLogVerbose(XN_MASK_FILTER, "SetFormat was called");
// check if this format is supported
CMediaType mediaType(*pmt);
int index = FindCapability(mediaType);
if (index == -1)
{
XN_METHOD_RETURN(VFW_E_INVALIDMEDIATYPE);
}
// keep previous one (so we can rollback)
int prevPreferred = m_nPreferredMode;
// set the preferred mode
m_nPreferredMode = index;
// try to reconnect (if needed)
IPin* pin;
ConnectedTo(&pin);
if (pin)
{
IFilterGraph *pGraph = ((XnVideoSource*)m_pFilter)->GetGraph();
HRESULT hr = pGraph->Reconnect(this);
if (FAILED(hr))
{
// rollback
m_nPreferredMode = prevPreferred;
XN_METHOD_RETURN(hr);
}
}
XN_METHOD_RETURN(S_OK);
}
示例9: ConnectedTo
HRESULT STDMETHODCALLTYPE CVCamStream::SetFormat(AM_MEDIA_TYPE *pmt)
{
// this is them saying you "must" use this type from now on...unless pmt is NULL that "means" reset...LODO handle it someday...
if(!pmt) {
return S_OK; // *sure* we reset..yeah...sure we did...
}
if(CheckMediaType((CMediaType *) pmt) != S_OK) {
return E_FAIL; // just in case :P [FME...]
}
m_mt = *pmt;
IPin* pin;
ConnectedTo(&pin);
if(pin)
{
IFilterGraph *pGraph = m_pParent->GetGraph();
pGraph->Reconnect(this);
}
return S_OK;
}
示例10: cAutoLock
// sets fps, size, (etc.) maybe, or maybe just saves it away for later use...
HRESULT STDMETHODCALLTYPE CPushPinDesktop::SetFormat(AM_MEDIA_TYPE *pmt)
{
CAutoLock cAutoLock(m_pFilter->pStateLock());
// I *think* it can go back and forth, then. You can call GetStreamCaps to enumerate, then call
// SetFormat, then later calls to GetMediaType/GetStreamCaps/EnumMediatypes will all "have" to just give this one
// though theoretically they could also call EnumMediaTypes, then SetMediaType, and not call SetFormat
// does flash call both? what order for flash/ffmpeg/vlc calling both?
// LODO update msdn
// "they" [can] call this...see msdn for SetFormat
// NULL means reset to default type...
if(pmt != NULL)
{
if(pmt->formattype != FORMAT_VideoInfo) // same as {CLSID_KsDataTypeHandlerVideo}
return E_FAIL;
// LODO I should do more here...http://msdn.microsoft.com/en-us/library/dd319788.aspx I guess [meh]
// LODO should fail if we're already streaming... [?]
VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pmt->pbFormat;
if(CheckMediaType((CMediaType *) pmt) != S_OK) {
return E_FAIL; // just in case :P [did skype get here once?]
}
// for FMLE's benefit, only accept a setFormat of our "final" width [force setting via registry I guess, otherwise it only shows 80x60 whoa!]
// flash media live encoder uses setFormat to determine widths [?] and then only displays the smallest? oh man that is messed up
if( pvi->bmiHeader.biWidth != getCaptureDesiredFinalWidth() ||
pvi->bmiHeader.biHeight != getCaptureDesiredFinalHeight())
{
return E_INVALIDARG;
}
// ignore other things like cropping requests for now...
// now save it away...for being able to re-offer it later. We could use SetMediaType but we're just being lazy and re-using m_mt for many things I guess
m_mt = *pmt;
// The frame rate at which your filter should produce data is determined by the AvgTimePerFrame field of VIDEOINFOHEADER
if(pvi->AvgTimePerFrame)
m_rtFrameLength = pvi->AvgTimePerFrame; // allow them to set whatever fps they request, i.e. if it's less than the max default. VLC command line can specify this, for instance...
// also setup scaling here, as WFMLE and ffplay and VLC all get here...
m_rScreen.right = m_rScreen.left + pvi->bmiHeader.biWidth; // allow them to set whatever "scaling size" they want [set m_rScreen is negotiated right here]
m_rScreen.bottom = m_rScreen.top + pvi->bmiHeader.biHeight;
}
IPin* pin;
ConnectedTo(&pin);
if(pin)
{
IFilterGraph *pGraph = m_pParent->GetGraph();
HRESULT res = pGraph->Reconnect(this);
if(res != S_OK) // LODO check first, and then just re-use the old one?
return res; // else return early...not really sure how to handle this...since we already set m_mt...but it's a pretty rare case I think...
// plus ours is a weird case...
} else {
// graph hasn't been built yet...
// so we're ok with "whatever" format they pass us, we're just in the setup phase...
}
// success of some type
if(pmt == NULL) {
m_bFormatAlreadySet = false;
} else {
m_bFormatAlreadySet = true;
}
return S_OK;
}
开发者ID:dkaminski,项目名称:screen-capture-recorder-to-video-windows-free,代码行数:74,代码来源:PushSourceDesktopAccessories.cpp
示例11: DebugLog
HRESULT CWavPackDSSplitter::TryToLoadCorrectionFile()
{
// Here is the nasty hacky stuff :>
HRESULT hr = S_FALSE;
IPin *pPinOutSrc = NULL;
IFileSourceFilter *pFSF = NULL;
LPOLESTR pszFileName = NULL;
IBaseFilter* pSrcFilterCorr = NULL;
IFileSourceFilter* pFSFCorr = NULL;
IEnumPins *pEnum = NULL;
IPin *pPinNew = NULL;
BOOL bCorrectionFileLoaded = FALSE;
IEnumFilters* pEnumFilers = NULL;
BOOL bSrcFileAlreadyLoaded = FALSE;
DebugLog("===> Entering CWavPackDSSplitter::TryToLoadCorrectionFile... 0x%08X", GetCurrentThreadId());
if((m_bDontTryToLoadCorrectionFileAgain == TRUE) ||
(m_pInputPinCorr == NULL) ||
(m_pInputPinCorr->IsConnected() == TRUE))
{
DebugLog("<=== Leaving CWavPackDSSplitter::TryToLoadCorrectionFile already loaded ?... 0x%08X", GetCurrentThreadId());
return hr;
}
if((m_pInputPin->m_pWavPackParser->first_wphdr.flags & HYBRID_FLAG) != HYBRID_FLAG)
{
// Not an hybrid file, don't even try
m_bDontTryToLoadCorrectionFileAgain = TRUE;
DebugLog("<=== Leaving CWavPackDSSplitter::TryToLoadCorrectionFile not hybrid... 0x%08X", GetCurrentThreadId());
return hr;
}
#define IF_FAIL_BREAK(x) if(FAILED(x)) { break; }
do {
hr = m_pInputPin->ConnectedTo(&pPinOutSrc);
IF_FAIL_BREAK(hr);
// Get a pointer on the source filter
PIN_INFO pi;
pi.pFilter = NULL;
hr = pPinOutSrc->QueryPinInfo(&pi);
IF_FAIL_BREAK(hr);
// Get source filter IFileSourceFilter interface
hr = pi.pFilter->QueryInterface(IID_IFileSourceFilter, (void **)&pFSF);
IF_FAIL_BREAK(hr);
// Get filename
hr = pFSF->GetCurFile(&pszFileName, NULL);
IF_FAIL_BREAK(hr);
// Create correction file filename
WCHAR pszFileNameC[MAX_PATH];
ZeroMemory(pszFileNameC, sizeof(WCHAR)*MAX_PATH);
int cch = lstrlenW(pszFileName);
CopyMemory(pszFileNameC, pszFileName, cch*sizeof(WCHAR));
pszFileNameC[cch] = 'c';
IFilterGraph* pFG = GetFilterGraph();
// Search in the graph in case the source filter with correction file already exist
hr = pFG->EnumFilters(&pEnumFilers);
IF_FAIL_BREAK(hr);
while(pEnumFilers->Next(1, &pSrcFilterCorr, 0) == S_OK)
{
HRESULT lhr;
lhr = pSrcFilterCorr->QueryInterface(IID_IFileSourceFilter, (void**)&pFSFCorr);
if(SUCCEEDED(lhr))
{
LPOLESTR pszFileNameCandidate = NULL;
pFSFCorr->GetCurFile(&pszFileNameCandidate, NULL);
if(memcmp(pszFileNameCandidate,pszFileNameC,(cch+1)*sizeof(WCHAR)) == 0)
{
// This is the good file
bSrcFileAlreadyLoaded = TRUE;
if(pszFileNameCandidate != NULL)
{
CoTaskMemFree(pszFileNameCandidate);
}
break;
}
if(pszFileNameCandidate != NULL)
{
CoTaskMemFree(pszFileNameCandidate);
}
}
pSrcFilterCorr->Release();
pSrcFilterCorr = NULL;
}
if(bSrcFileAlreadyLoaded == FALSE)
{
// Create new file source filter
hr = CoCreateInstance(CLSID_AsyncReader,
NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter,
(void**)&pSrcFilterCorr);
IF_FAIL_BREAK(hr);
//.........这里部分代码省略.........
示例12: PushDataThread
DWORD WINAPI PushDataThread(PVOID param)
{
PushParam* pushParam = (PushParam*)param;
HANDLE PushSemaphore = pushParam->PushSemaphore;
HANDLE PushDataMutex = pushParam->PushDataMutex;
NetReceiveFilter* filter = pushParam->filter;
std::map<REFERENCE_TIME, IMediaSample*>& SampleList = *pushParam->SampleList;
delete pushParam;
REFERENCE_TIME startTime,endTime;
CRefTime streamTime(LONG(0)),lastStreamTime(LONG(0));
bool first = true;
AM_MEDIA_TYPE mediaType;
IMediaSample* sample ;
while (SampleList.size() == 0) //等待足够多的数据
{
WaitForSingleObject(PushSemaphore,INFINITE);
}
CBasePin* pin = filter->GetPin(0);
pin->ConnectionMediaType(&mediaType);
IFilterGraph* filterGraph = filter->GetFilterGraph();
ComReleaser filterGraphReleaser(filterGraph);
HRESULT hr;
IMediaControl* mediaControl;
hr = filterGraph->QueryInterface(IID_IMediaControl, (void**)&mediaControl);
if(FAILED(hr))
{
ErrorPrint("Get media control error", hr);
return false;
}
ComReleaser mediaControlReleaser(mediaControl);
while (true)
{
WaitForSingleObject(PushDataMutex, INFINITE);
if (filter->getPlayMode() == 0) // 如果只是尽快播放,则不考虑时间戳,而且一次一sample的往下传
{
if (SampleList.size() == 0)
{
ReleaseMutex(PushDataMutex);
while (SampleList.size() == 0)
{
WaitForSingleObject(PushSemaphore,INFINITE);
}
WaitForSingleObject(PushDataMutex, INFINITE);
}
sample = SampleList.begin()->second;
}
else if (filter->getPlayMode() == 1) //需要考虑时间戳
{
NetReceiveFilter::State state = filter->getState();
if (SampleList.size() == 0)
{
g_ReferenceTimeFilter->pauseTime(); //暂停时钟
ReleaseMutex(PushDataMutex);
while (SampleList.size() == 0) //等待足够多的数据
{
WaitForSingleObject(PushSemaphore,INFINITE);
}
WaitForSingleObject(PushDataMutex, INFINITE);
g_ReferenceTimeFilter->startTime(); //启动时钟
}
if (state == NetReceiveFilter::Stopped)
{
ReleaseMutex(PushDataMutex);
Sleep(50);
continue;
}
if(g_ReferenceTimeFilter->isStop())
{
ReleaseMutex(PushDataMutex);
Sleep(50);
continue;
}
sample = SampleList.begin()->second;
sample->GetTime(&startTime,&endTime);
filter->StreamTime(streamTime); //得到当前的流时间
g_ReferenceTimeFilter->GetTime(&startTime);
g_ReferenceTimeFilter->GetTime(&endTime);
if (mediaType.majortype == MEDIATYPE_Video)
{
int a = 0;
}
else
{
int b = 0;
}
if(state != NetReceiveFilter::Paused) //pause时不修正
{
//.........这里部分代码省略.........
示例13: Close
HRESULT fsPartMediaPlayer::Open(HANDLE hFile, UINT64 uMaxAvail)
{
HRESULT hr;
Close ();
m_stream.Attach (hFile, uMaxAvail);
if (m_stream.Get_MediaType () == NULL)
return E_FAIL;
m_reader.Set_MediaType (m_stream.Get_MediaType ());
RIF (CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC,
IID_IGraphBuilder, (void**) &m_pGB));
IFilterGraph* pFG = NULL;
RIF (m_pGB->QueryInterface (IID_IFilterGraph, (void**)&pFG));
RIF (pFG->AddFilter (&m_reader, NULL));
pFG->Release ();
RIF (m_pGB->Render (m_reader.GetPin (0)));
RIF (m_pGB->QueryInterface (IID_IMediaControl, (void**)&m_pMC));
RIF (m_pGB->QueryInterface (IID_IMediaEventEx, (void**)&m_pME));
RIF (m_pGB->QueryInterface (IID_IMediaSeeking, (void**)&m_pMS));
m_pGB->QueryInterface (IID_IVideoWindow, (void**)&m_pVW);
m_pGB->QueryInterface (IID_IBasicAudio, (void**)&m_pBA);
IBasicVideo* pBV = NULL;
m_pGB->QueryInterface (IID_IBasicVideo, (void**) &pBV);
if (pBV != NULL)
{
long nW, nH;
pBV->get_VideoWidth (&nW);
pBV->get_VideoHeight (&nH);
m_fVideoRatio = (double)nW / nH;
pBV->Release ();
}
else
{
m_fVideoRatio = 1;
}
if (m_pVW)
{
if (FAILED (m_pVW->put_MessageDrain ((OAHWND)m_hOutWnd)))
{
SAFE_RELEASE (m_pVW);
}
}
RIF (m_pME->SetNotifyWindow ((OAHWND)m_hOutWnd, WM_VIDEONOTIFY, LONG(this)));
if (m_pVW)
{
m_pVW->put_Visible (OAFALSE);
m_pVW->put_WindowStyle (WS_CHILD);
m_pVW->put_Owner ((OAHWND)m_hOutWnd);
AutoSize ();
m_pVW->put_Visible (OATRUE);
m_pVW->SetWindowForeground (-1);
}
m_state = VFPS_STOPPED;
return S_OK;
}
示例14: SetThreadName
DWORD WINAPI CBDReaderFilter::CommandThread()
{
SetThreadName(-1, "BDReader_COMMAND");
IFilterGraph* pGraph = NULL;
pGraph = GetFilterGraph();
if (pGraph)
{
pGraph->QueryInterface(&m_pMediaSeeking);
pGraph->Release();
}
HANDLE handles[2];
handles[0] = m_hStopCommandThreadEvent;
handles[1] = m_hCommandEvent;
if (m_pMediaSeeking)
{
while(1)
{
//DWORD result = WaitForMultipleObjects(2, handles, false, 40);
DWORD result = WaitForMultipleObjects(2, handles, false, INFINITE);
if (result == WAIT_OBJECT_0) // exit event
{
LogDebug("CBDReaderFilter::Command thread: closing down");
return 0;
}
/*
else if (result == WAIT_TIMEOUT)
{
LONGLONG pos = 0;
HRESULT hr = m_pMediaSeeking->GetCurrentPosition(&pos);
if (SUCCEEDED(hr))
{
lib.ProvideUserInput(CONVERT_DS_90KHz(pos), BD_VK_NONE);
}
}
*/
else if (result == WAIT_OBJECT_0 + 1) // command in queue
{
LONGLONG posEnd = 0;
LONGLONG zeroPos = 0;
m_pMediaSeeking->GetDuration(&posEnd);
ivecCommandQueue it;
DS_CMD cmd;
{ // just fetch the command and release the lock
CAutoLock lock(&m_csCommandQueue);
it = m_commandQueue.begin();
cmd = (*it);
m_commandQueue.erase(it);
if (m_commandQueue.empty())
{
ResetEvent(m_hCommandEvent);
}
}
switch (cmd.id)
{
case REBUILD:
{
LogDebug("CBDReaderFilter::Command thread: issue rebuild!");
LONGLONG pos = 0;
if (cmd.refTime.m_time < 0)
{
CAutoLock lock(&m_csClock);
pos = m_rtCurrentTime;
}
else
pos = cmd.refTime.m_time;
m_eRebuild.Reset();
TriggerOnMediaChanged();
m_eRebuild.Wait();
if (m_bRebuildOngoing)
{
LogDebug("CBDReaderFilter::Command thread: graph rebuild has failed?");
return 0;
}
m_bUpdateStreamPositionOnly = true;
LogDebug("CBDReaderFilter::Command thread: seek - pos: %06.3f (rebuild)", cmd.refTime.Millisecs() / 1000.0);
m_pMediaSeeking->SetPositions(&pos, AM_SEEKING_AbsolutePositioning | AM_SEEKING_FakeSeek, &posEnd, AM_SEEKING_NoPositioning);
m_eSeekDone.Wait();
m_eSeekDone.Reset();
m_demultiplexer.SetMediaChanging(false);
m_demultiplexer.m_bRebuildOngoing = false;
break;
}
//.........这里部分代码省略.........
示例15: fixMPEGinAVI
//.........这里部分代码省略.........
if (regErr != ERROR_SUCCESS || dwType != REG_BINARY) {
return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK;
}
if (buf[2] >= 0x0b) { // Third byte is the major version number
doPostProcessing = true;
}
}
if (doPostProcessing) {
DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: input format disabled or not supported. Trying to maintain in the graph..."));
IFilterMapper2 *pMapper = NULL;
IEnumMoniker *pEnum = NULL;
HRESULT hr = CoCreateInstance(CLSID_FilterMapper2,
NULL, CLSCTX_INPROC, IID_IFilterMapper2,
(void **) &pMapper);
if (FAILED(hr)) {
// Error handling omitted for clarity.
}
GUID arrayInTypes[2];
arrayInTypes[0] = mt->majortype;//MEDIATYPE_Video;
arrayInTypes[1] = mt->subtype;//MEDIASUBTYPE_dvsd;
hr = pMapper->EnumMatchingFilters(
&pEnum,
0, // Reserved.
TRUE, // Use exact match?
MERIT_DO_NOT_USE + 1, // Minimum merit.
TRUE, // At least one input pin?
1, // Number of major type/subtype pairs for input.
arrayInTypes, // Array of major type/subtype pairs for input.
NULL, // Input medium.
NULL, // Input pin category.
FALSE, // Must be a renderer?
TRUE, // At least one output pin?
0, // Number of major type/subtype pairs for output.
NULL, // Array of major type/subtype pairs for output.
NULL, // Output medium.
NULL); // Output pin category.
// Enumerate the monikers.
IMoniker *pMoniker;
ULONG cFetched;
while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK) {
IPropertyBag *pPropBag = NULL;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void **)&pPropBag);
if (SUCCEEDED(hr)) {
// To retrieve the friendly name of the filter, do the following:
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
if (SUCCEEDED(hr)) {
if (varName.pbstrVal == NULL || _strnicmp(FFDSHOW_NAME_L, varName.bstrVal, 22) != 0) {
// Display the name in your UI somehow.
DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: compatible filter found (%s)"), varName.pbstrVal);
hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pCompatibleFilter);
}
}
// Now add the filter to the graph. Remember to release pFilter later.
IFilterGraph *pGraph = NULL;
fv->deci->getGraph(&pGraph);
IGraphBuilder *pGraphBuilder = NULL;
hr = pGraph->QueryInterface(IID_IGraphBuilder, (void **)&pGraphBuilder);
if (hr == S_OK) {
pGraphBuilder->AddFilter(pCompatibleFilter, varName.bstrVal);
} else {
pCompatibleFilter->Release();
pCompatibleFilter = NULL;
}
// Clean up.
VariantClear(&varName);
pGraphBuilder->Release();
pPropBag->Release();
}
pMoniker->Release();
if (pCompatibleFilter != NULL) {
break;
}
}
// Clean up.
pMapper->Release();
pEnum->Release();
}
}
if (pCompatibleFilter != NULL) {
return S_OK;
}
return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK;
}