本文整理汇总了C++中IPin::QueryInterface方法的典型用法代码示例。如果您正苦于以下问题:C++ IPin::QueryInterface方法的具体用法?C++ IPin::QueryInterface怎么用?C++ IPin::QueryInterface使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IPin
的用法示例。
在下文中一共展示了IPin::QueryInterface方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: FindPinInterface
//查找Pin上某一个接口,只要找到Filter上某一个Pin又该接口即返回
BOOL FindPinInterface(IBaseFilter* pFilter, REFGUID iid, void** ppUnk)
{
if(!pFilter || !ppUnk)
return FALSE;
HRESULT hr = E_FAIL;
IEnumPins* pEnumPin = NULL;
if(FAILED(pFilter->EnumPins(&pEnumPin)))
{
return FALSE;
}
IPin* pPin = NULL;
while(pEnumPin->Next(1,&pPin,NULL) == S_OK)
{
hr = pPin->QueryInterface(iid,ppUnk);
pPin->Release();
if(SUCCEEDED(hr))
{
pEnumPin->Release();
return TRUE;
}
}
pEnumPin->Release();
return FALSE;
}
示例2: GetConnected
HRESULT CH264DecoderOutputPin::GetUncompSurfacesInfo(
const GUID* profileID, AMVAUncompBufferInfo* uncompBufInfo)
{
HRESULT r = E_INVALIDARG;
if (m_decoder->IsFormatSupported(*profileID))
{
intrusive_ptr<IAMVideoAccelerator> accel;
IPin* connected = GetConnected();
if (!connected)
return E_UNEXPECTED;
r = connected->QueryInterface(IID_IAMVideoAccelerator,
reinterpret_cast<void**>(&accel));
if (SUCCEEDED(r) && accel)
{
const int surfCount = getDecodeSurfacesCount();
uncompBufInfo->dwMaxNumSurfaces = surfCount;
uncompBufInfo->dwMinNumSurfaces = surfCount;
r = m_decoder->ConfirmDXVA1UncompFormat(
accel.get(), profileID,
&uncompBufInfo->ddUncompPixelFormat);
if (SUCCEEDED(r))
{
memcpy(&m_uncompPixelFormat,
&uncompBufInfo->ddUncompPixelFormat,
sizeof(m_uncompPixelFormat));
m_DXVA1DecoderID = *profileID;
}
}
}
return r;
}
示例3: GetAMConfigForSinglePin
HRESULT GetAMConfigForSinglePin(IUnknown* pUnk, PIN_DIRECTION direction, IAMStreamConfig** ppConfig)
{
IBaseFilter* pBaseFilter = NULL;
HRESULT hr = pUnk->QueryInterface(IID_IBaseFilter, (void**)&pBaseFilter);
if (SUCCEEDED(hr))
{
IEnumPins* pEnumPins = NULL;
hr = pBaseFilter->EnumPins(&pEnumPins);
if (SUCCEEDED(hr))
{
pEnumPins->Reset();
if (SUCCEEDED(hr))
{
IPin* pPin = NULL;
BOOL bFound = FALSE;
while (((pEnumPins->Next(1, &pPin, NULL)) == S_OK) && !bFound)
{
PIN_DIRECTION fetchedDir;
hr = pPin->QueryDirection(&fetchedDir);
if (SUCCEEDED(hr) && (fetchedDir == direction))
{
hr = pPin->QueryInterface(IID_IAMStreamConfig, (void**)ppConfig);
bFound = SUCCEEDED(hr);
}
pPin->Release();
}
}
pEnumPins->Release();
}
pBaseFilter->Release();
}
return hr;
}
示例4: FindPinInterface
HRESULT FindPinInterface(
IBaseFilter *pFilter, // Pointer to the filter to search.
REFGUID iid, // IID of the interface.
void **ppUnk) // Receives the interface pointer.
{
if (!pFilter || !ppUnk) return E_POINTER;
HRESULT hr = E_FAIL;
IEnumPins *pEnum = 0;
if (FAILED(pFilter->EnumPins(&pEnum)))
{
return E_FAIL;
}
// Query every pin for the interface.
IPin *pPin = 0;
while (S_OK == pEnum->Next(1, &pPin, 0))
{
hr = pPin->QueryInterface(iid, ppUnk);
pPin->Release();
if (SUCCEEDED(hr))
{
break;
}
}
pEnum->Release();
return hr;
}
示例5: GetAMConfigForMultiPin
HRESULT GetAMConfigForMultiPin(IUnknown* pUnk, PIN_DIRECTION direct, IAMStreamConfig** ppConfig)
{
IBaseFilter* pBaseFilter = NULL;
HRESULT hr = pUnk->QueryInterface(IID_IBaseFilter, (void**)&pBaseFilter);
if (SUCCEEDED(hr))
{
IEnumPins* pEnumPins = NULL;
hr = pBaseFilter->EnumPins(&pEnumPins);
if (SUCCEEDED(hr))
{
pEnumPins->Reset();
if (SUCCEEDED(hr))
{
IPin* pPin = NULL;
BOOL bFound = FALSE;
while ((pEnumPins->Next(1, &pPin, NULL) == S_OK) && !bFound)
{
PIN_DIRECTION fetchedDir;
hr = pPin->QueryDirection(&fetchedDir);
if (SUCCEEDED(hr) && (fetchedDir == direct))
{
IKsPropertySet* pPS;
hr = pPin->QueryInterface(IID_IKsPropertySet, (void**)&pPS);
if (SUCCEEDED(hr))
{
GUID guid = { 0 };
DWORD dwReturn = 0;
hr = pPS->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, 0, 0, &guid, sizeof(guid), &dwReturn);
if (SUCCEEDED(hr) && ::IsEqualGUID(guid, PIN_CATEGORY_CAPTURE))
{
hr = pPin->QueryInterface(IID_IAMStreamConfig, (void**)ppConfig);
bFound = SUCCEEDED(hr);
}
pPS->Release();
}
}
pPin->Release();
}
}
pEnumPins->Release();
}
pBaseFilter->Release();
}
return hr;
}
示例6: SetCaptureBufferSize
void CCaptureDevice::SetCaptureBufferSize(void)
{
IPin * pCapturePin = GetPin();
if (pCapturePin)
{
DWORD dwBytesPerSec = 0;
AM_MEDIA_TYPE * pmt = {0};
IAMStreamConfig * pCfg = NULL;
HRESULT hr = pCapturePin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
if ( hr==S_OK )
{
hr = pCfg->GetFormat(&pmt);
if ( hr==S_OK )
{
WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
dwBytesPerSec = pWF->nAvgBytesPerSec;
pWF->nChannels = 1;
pWF->wBitsPerSample = 8;
pWF->nSamplesPerSec = 11025;
pWF->nAvgBytesPerSec = pWF->nSamplesPerSec * pWF->nChannels * pWF->wBitsPerSample / 8;
pWF->nBlockAlign = 1;
/*
info.cbSize = sizeof(WAVEFORMATEX);
info.wFormatTag = 1;
info.nChannels = 2;
info.nSamplesPerSec = 44100;
//info.nSamplesPerSec = 22050;
11025
info.wBitsPerSample = 16;
info.nAvgBytesPerSec = info.nSamplesPerSec * info.nChannels * info.wBitsPerSample / 8;
info.nBlockAlign = 4;
*/
pCfg->SetFormat( pmt );
DeleteMediaType(pmt);
}
pCfg->Release();
}
/* if (dwBytesPerSec)
{
IAMBufferNegotiation * pNeg = NULL;
hr = pCapturePin->QueryInterface(IID_IAMBufferNegotiation,
(void **)&pNeg);
if (SUCCEEDED(hr))
{
ALLOCATOR_PROPERTIES AllocProp;
AllocProp.cbAlign = -1; // -1 means no preference.
AllocProp.cbBuffer = dwBytesPerSec * dwLatencyInMilliseconds / 1000;
AllocProp.cbPrefix = -1;
AllocProp.cBuffers = -1;
hr = pNeg->SuggestAllocatorProperties(&AllocProp);
pNeg->Release();
}
}*/
}
}
示例7: setVideoHWND
/*
* Class: sage_DShowMediaPlayer
* Method: setVideoHWND0
* Signature: (JJ)V
*/
JNIEXPORT void JNICALL Java_sage_DShowMediaPlayer_setVideoHWND0
(JNIEnv *env, jobject jo, jlong dataPtr, jlong vhwnd)
{
CPlayerData* playData = (CPlayerData*) dataPtr;
IGraphBuilder* pGraph = playData->GetGraph();
IVideoWindow* pVW = NULL;
HRESULT hr = pGraph->QueryInterface(IID_IVideoWindow, (void**)&pVW);
if (SUCCEEDED(hr))
{
slog((env, "DShowPlayer setVideoHWND(%d)\r\n", (int) vhwnd));
pVW->put_AutoShow(OAFALSE);
pVW->put_Owner((OAHWND)vhwnd);
pVW->put_MessageDrain((OAHWND)vhwnd);
pVW->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS | WS_CLIPCHILDREN);
pVW->put_Visible(OATRUE);
// We do all of our own aspect ratio control, so don't let DShow do any for us
// by setting the aspect ratio mode on the video rendering filter's pin
IEnumFilters *pEnum = NULL;
hr = pGraph->EnumFilters(&pEnum);
if (SUCCEEDED(hr))
{
IBaseFilter *currFilt = NULL;
while (pEnum->Next(1, &currFilt, NULL) == S_OK)
{
IPin *overlayPin = NULL;
hr = currFilt->FindPin(L"Input0", &overlayPin);
if (SUCCEEDED(hr))
{
// Right pin name, let's see if it's overlay
IMixerPinConfig *pOverlayMix = NULL;
hr = overlayPin->QueryInterface(IID_IMixerPinConfig, (void**)&pOverlayMix);
if (SUCCEEDED(hr))
{
pOverlayMix->SetAspectRatioMode(AM_ARMODE_STRETCHED);
SAFE_RELEASE(pOverlayMix);
}
SAFE_RELEASE(overlayPin);
}
SAFE_RELEASE(currFilt);
}
SAFE_RELEASE(pEnum);
hr = S_OK;
}
SAFE_RELEASE(pVW);
}
HTESTPRINT(hr);
}
示例8: GetPin
IAMStreamConfig * CCaptureDevice::GetStreamConfig(void)
{
IAMStreamConfig * pConfig = NULL;
if (m_pBaseFilter)
{
// Get the capture output pin first
IPin * pCapture = GetPin();
if (pCapture)
{
pCapture->QueryInterface(IID_IAMStreamConfig, (void **)&pConfig);
}
if (pConfig)
{
pConfig->Release();
}
}
return pConfig;
}
示例9:
HRESULT
CPosPassThru::GetPeerSeeking(IMediaSeeking ** ppMS)
{
*ppMS = NULL;
IPin *pConnected;
HRESULT hr = m_pPin->ConnectedTo(&pConnected);
if (FAILED(hr)) {
return E_NOTIMPL;
}
IMediaSeeking * pMS;
hr = pConnected->QueryInterface(IID_IMediaSeeking, (void **) &pMS);
pConnected->Release();
if (FAILED(hr)) {
return E_NOTIMPL;
}
*ppMS = pMS;
return S_OK;
}
示例10: GetColorKeyInternal
//
// CFilePlayer::GetColorKeyInternal(): Private method to query the color key
// value from teh first input pin of the OverlayMixer.
//
HRESULT CFilePlayer::GetColorKeyInternal(IBaseFilter *pOvM)
{
DbgLog((LOG_TRACE, 5, TEXT("CFilePlayer::GetColorKeyInternal() entered"))) ;
if (NULL == pOvM)
return E_INVALIDARG ;
IEnumPins *pEnumPins ;
IPin *pPin ;
ULONG ul ;
PIN_DIRECTION pd ;
DWORD dwColorKey ;
IMixerPinConfig *pMPC ;
HRESULT hr = pOvM->EnumPins(&pEnumPins) ;
ASSERT(pEnumPins) ;
while (S_OK == pEnumPins->Next(1, &pPin, &ul) && 1 == ul) // try all pins
{
pPin->QueryDirection(&pd) ;
if (PINDIR_INPUT == pd) // only the 1st in pin
{
hr = pPin->QueryInterface(IID_IMixerPinConfig, (LPVOID *) &pMPC) ;
ASSERT(SUCCEEDED(hr) && pMPC) ;
hr = pMPC->GetColorKey(NULL, &dwColorKey) ; // just get the physical color
SetColorKey(dwColorKey) ;
// Set mode to stretch - that way we don't fight the overlay
// mixer about the exact way to fix the aspect ratio
pMPC->SetAspectRatioMode(AM_ARMODE_STRETCHED);
ASSERT(SUCCEEDED(hr)) ;
pMPC->Release() ;
pPin->Release() ; // exiting early; release pin
break ; // we are done
}
pPin->Release() ;
}
pEnumPins->Release() ; // done with pin enum
return S_OK ;
}
示例11: eHandler
Camera::Camera(bool Show,bool Start) : eHandler(this),_realData(false),_UpdateWindow(Show),_LastData(0),_CurData(0) {
DWORD no;
IGraphBuilder *graph = 0;
ctrl = 0;
ICreateDevEnum *devs = 0;
IEnumMoniker *cams = 0;
IMoniker *mon = 0;
IBaseFilter *cam = 0;
IEnumPins *pins = 0;
IPin *pin = 0;
IEnumFilters *fil = 0;
IBaseFilter *rnd = 0;
IMemInputPin *mem = 0;
curCamera = this;
_isOn = Start;
CoCreateInstance( CLSID_FilterGraph, 0, CLSCTX_INPROC,IID_IGraphBuilder, (void **)&graph );
graph->QueryInterface( IID_IMediaControl, (void **)&ctrl );
CoCreateInstance (CLSID_SystemDeviceEnum, 0, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &devs);
devs->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &cams, 0);
cams->Next (1,&mon,0); // get first found capture device (webcam)
mon->BindToObject(0,0,IID_IBaseFilter, (void**)&cam);
graph->AddFilter(cam, L"Capture Source"); // add web cam to graph as source
cam->EnumPins(&pins); // we need output pin to autogenerate rest of the graph
pins->Next(1,&pin, 0); // via graph->Render
graph->Render(pin); // graph builder now builds whole filter chain including MJPG decompression on some webcams
graph->EnumFilters(&fil); // from all newly added filters
fil->Next(1,&rnd,0); // we find last one (renderer)
rnd->EnumPins(&pins); // because data we are intersted in are pumped to renderers input pin
pins->Next(1,&pin, 0); // via Receive member of IMemInputPin interface
pin->QueryInterface(IID_IMemInputPin,(void**)&mem);
DsHook(mem,6,Receive); // so we redirect it to our own proc to grab image data
if (Start) this->Start();
}
示例12: BuildPreview
//.........这里部分代码省略.........
// emt->Release();
// }
// pin->Release();
// pin = NULL;
// }
// }
// RELEASE(ep);
//}
pSrcFilter = p_streams[1].p_device_filter;
// do not render local audio
//hr = p_capture_graph_builder2->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Audio,
// pSrcFilter, NULL, NULL);
//if (FAILED(hr))
//{
// Msg(TEXT("Couldn't render the video capture stream. hr=0x%x\r\n")
// TEXT("The capture device may already be in use by another application.\r\n\r\n")
// TEXT("The sample will now close."), hr);
// pSrcFilter->Release();
// return hr;
//}
{
IEnumPins *ep;
IPin *pin = NULL;
IAMBufferNegotiation *buffer_negotiation = NULL;
ALLOCATOR_PROPERTIES props = { -1, -1, -1, -1 };
pSrcFilter->EnumPins(&ep);
ep->Reset();
while (SUCCEEDED(hr = ep->Next(1, &pin, 0)) && hr != S_FALSE){
if (pin->QueryInterface(IID_IAMBufferNegotiation, (void **)&buffer_negotiation) == S_OK){
buffer_negotiation->GetAllocatorProperties(&props);
props.cbBuffer = 4096; // set to 4096 byte: acc encode frame length
buffer_negotiation->SuggestAllocatorProperties(&props);
RELEASE(buffer_negotiation);
}
RELEASE(pin);
}
RELEASE(ep);
}
//{
// IEnumPins *ep;
// IPin *inputpin = NULL;
// IPin *voutputpin = NULL;
// IPin *aoutputpin = NULL;
// IPin *pin = NULL;
// bool bFindI420 = false;
// bool bFindPCM = false;
// //pSrcFilter = p_streams[0].p_device_filter;
// pSrcFilter->EnumPins(&ep);
// if (SUCCEEDED(hr)){
// ep->Reset();
// while (SUCCEEDED(hr = ep->Next(1, &pin, 0)) && hr != S_FALSE){
// PIN_DIRECTION pinDir;
// pin->QueryDirection(&pinDir);
// if (pinDir == PINDIR_OUTPUT){
// AM_MEDIA_TYPE *pmt;
// IEnumMediaTypes *emt;
// pin->EnumMediaTypes(&emt);
// while (hr = emt->Next(1, &pmt, NULL), hr != S_FALSE){
示例13: AssembeAudioBranch
HRESULT CFLVConverter::AssembeAudioBranch(const CParameters & param, IPin* pSrc, IPin* pDest)
{
HRESULT hr=S_OK;
IPin* pEndPin = NULL;
AM_MEDIA_TYPE mtSrc;
AM_MEDIA_TYPE *pmtWrp = NULL;
WE_WAVEFORMATEX* pwfxSrc = NULL;
WE_WAVEFORMATEX* pwfxWrp = NULL;
ULONG ulFetched = 0;
if(!pSrc || !pDest)
return E_POINTER;
m_wfx.wFormatTag = WAVE_FORMAT_MPEGLAYER3;
m_wfx.nChannels = 1;
m_wfx.nSamplesPerSec = 11025;
m_wfx.wBitsPerSample = 0;
m_wfx.nBlockAlign = 1;
m_wfx.nAvgBytesPerSec = 2500;
m_wfx.cbSize = MPEGLAYER3_WFX_EXTRA_BYTES;
MPEGLAYER3WAVEFORMAT* pMp3Format = (MPEGLAYER3WAVEFORMAT*) &m_wfx;
pMp3Format->wID = MPEGLAYER3_ID_MPEG;
pMp3Format->fdwFlags = MPEGLAYER3_FLAG_PADDING_ON;//MPEGLAYER3_FLAG_PADDING_OFF;
pMp3Format->nBlockSize = 132;
pMp3Format->nFramesPerBlock = 1;
pMp3Format->nCodecDelay = 1393;
CComPtr<IMediaSeeking> cpMediaSeeking;
hr = pSrc->QueryInterface(IID_IMediaSeeking, (void **)&cpMediaSeeking);
if(SUCCEEDED(hr))
{
hr = cpMediaSeeking->SetTimeFormat(&TIME_FORMAT_SAMPLE);
if(SUCCEEDED(hr))
{
cpMediaSeeking->GetDuration(&m_llAudioSamplesCount);
}
}
pEndPin = pSrc;
pEndPin->AddRef();
CComPtr<IBaseFilter> cpAudioSkipFilter;
hr = CreateAndInsertFilter(pGB, CLSID_CAudioSkipper, &cpAudioSkipFilter, L"AudioSkipper");
if(SUCCEEDED(hr))
{
hr = JoinFilterToChain(pGB, cpAudioSkipFilter, &pEndPin);
CComPtr<IAudioSkip> cpAudioSkip;
hr = cpAudioSkipFilter->QueryInterface(IID_IAudioSkip, (void**) &cpAudioSkip);
if(SUCCEEDED(hr))
{
cpAudioSkip->SetSamplesCount(m_llVideoFramesCount, m_llAudioSamplesCount);
cpAudioSkip->SetIntervals((void*)param.GetAllDeletedInterval(), (void*)param.GetAudioDeletedInterval());
}
}
if(FAILED(hr))
{
SAFE_RELEASE(pEndPin);
return hr;
}
hr = pSrc->ConnectionMediaType(&mtSrc);
if(SUCCEEDED(hr))
{
pwfxSrc = (WE_WAVEFORMATEX*) mtSrc.pbFormat;
if((pwfxSrc->wFormatTag!=m_wfx.wFormatTag) || (pwfxSrc->nSamplesPerSec!=m_wfx.nSamplesPerSec))
{
CComPtr<IBaseFilter> cpAcmWrapper = NULL;
hr = CoCreateInstance(CLSID_ACMWrapper, NULL, CLSCTX_INPROC, IID_IBaseFilter, (LPVOID *)&cpAcmWrapper);
if(SUCCEEDED(hr))
{
hr = JoinFilterToChain(pGB, cpAcmWrapper, L"ACM Wrapper", &pEndPin);
if(SUCCEEDED(hr))
{
CComPtr<IAMStreamConfig> cpAudioStreamConfig;
hr = pEndPin->QueryInterface(IID_IAMStreamConfig,(void **)&cpAudioStreamConfig);
if(SUCCEEDED(hr))
{
hr = cpAudioStreamConfig->GetFormat(&pmtWrp);
if(SUCCEEDED(hr))
{
pwfxWrp = (WE_WAVEFORMATEX*) pmtWrp->pbFormat;
if(WAVE_FORMAT_PCM!=m_wfx.wFormatTag)
{
WAVEFORMATEX wfxSrs;
wfxSrs.nChannels = m_wfx.nChannels;
wfxSrs.wFormatTag = WAVE_FORMAT_PCM;
MMRESULT mmr = acmFormatSuggest(NULL, &m_wfx, &wfxSrs, sizeof(wfxSrs),ACM_FORMATSUGGESTF_NCHANNELS| ACM_FORMATSUGGESTF_WFORMATTAG);
if(MMSYSERR_NOERROR==mmr)
{
pwfxWrp->nChannels = wfxSrs.nChannels;
pwfxWrp->nSamplesPerSec = wfxSrs.nSamplesPerSec;
pwfxWrp->nAvgBytesPerSec = wfxSrs.nAvgBytesPerSec;
pwfxWrp->nBlockAlign = wfxSrs.nBlockAlign;
pwfxWrp->wBitsPerSample = wfxSrs.wBitsPerSample;
}
}
else
{
pwfxWrp->nChannels = m_wfx.nChannels;
pwfxWrp->nSamplesPerSec = m_wfx.nSamplesPerSec;
//.........这里部分代码省略.........
示例14: LoadFilters
//.........这里部分代码省略.........
colorType = DeviceOutputType_HDYC;
else
{
colorType = DeviceOutputType_RGB;
expectedMediaType = MEDIASUBTYPE_RGB32;
}
strShader = ChooseShader();
if(strShader.IsValid())
colorConvertShader = CreatePixelShaderFromFile(strShader);
if(colorType != DeviceOutputType_RGB && !colorConvertShader)
{
AppWarning(TEXT("DShowPlugin: Could not create color space conversion pixel shader"));
goto cleanFinish;
}
if(colorType == DeviceOutputType_YV12 || colorType == DeviceOutputType_I420)
{
for(int i=0; i<numThreads; i++)
hConvertThreads[i] = OSCreateThread((XTHREAD)PackPlanarThread, convertData+i);
}
//------------------------------------------------
// set chroma details
keyBaseColor = Color4().MakeFromRGBA(keyColor);
Matrix4x4TransformVect(keyChroma, (colorType == DeviceOutputType_HDYC) ? (float*)yuv709Mat : (float*)yuvMat, keyBaseColor);
keyChroma *= 2.0f;
//------------------------------------------------
// configure video pin
if(FAILED(err = devicePin->QueryInterface(IID_IAMStreamConfig, (void**)&config)))
{
AppWarning(TEXT("DShowPlugin: Could not get IAMStreamConfig for device pin, result = %08lX"), err);
goto cleanFinish;
}
AM_MEDIA_TYPE outputMediaType;
CopyMediaType(&outputMediaType, bestOutput->mediaType);
VIDEOINFOHEADER *vih = reinterpret_cast<VIDEOINFOHEADER*>(outputMediaType.pbFormat);
BITMAPINFOHEADER *bmi = GetVideoBMIHeader(&outputMediaType);
vih->AvgTimePerFrame = frameInterval;
bmi->biWidth = renderCX;
bmi->biHeight = renderCY;
bmi->biSizeImage = renderCX*renderCY*(bmi->biBitCount>>3);
if(FAILED(err = config->SetFormat(&outputMediaType)))
{
if(err != E_NOTIMPL)
{
AppWarning(TEXT("DShowPlugin: SetFormat on device pin failed, result = %08lX"), err);
goto cleanFinish;
}
}
FreeMediaType(outputMediaType);
//------------------------------------------------
// get audio pin configuration, optionally configure audio pin to 44100
GUID expectedAudioType;
if(soundOutputType == 1)
示例15: GetSupportedFormats
HRESULT CAudioCompressorFormats::GetSupportedFormats(std::vector<WAVEFORMATEX*>& listFormats)
{
CStringW swDeviceName(m_sAudComp);
HRESULT hr = m_pSysDevEnum->CreateClassEnumerator(CLSID_AudioCompressorCategory, &m_pEnumCat, 0);
if(NULL == m_pEnumCat)
return E_POINTER;
if(S_OK == hr)
{
ULONG cFetched;
while(m_pEnumCat->Next(1, &m_pMoniker, &cFetched) == S_OK)
{
IPropertyBag *pPropBag;
hr = m_pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void **)&pPropBag);
if (SUCCEEDED(hr))
{
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
if (SUCCEEDED(hr))
{
if(wcscmp((WCHAR*)varName.pbstrVal, swDeviceName.GetBuffer()) == 0)
{
m_pMoniker->AddRef();
break;
}
}
VariantClear(&varName);
pPropBag->Release();
}
m_pMoniker->Release();
}
}
if(m_pMoniker)
{
IBaseFilter *pFilter = 0;
hr = m_pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pFilter);
if(SUCCEEDED(hr))
{
IEnumPins *pEnum = NULL;
hr = pFilter->EnumPins(&pEnum);
if (SUCCEEDED(hr))
{
IPin *pPin = NULL;
while(S_OK == pEnum->Next(1, &pPin, NULL))
{
IAMStreamConfig *pConf;
hr = pPin->QueryInterface(IID_IAMStreamConfig, (void**)&pConf);
if (SUCCEEDED(hr))
{
CString sFormat;
int iCount, iSize;
BYTE *pSCC = NULL;
AM_MEDIA_TYPE *pmt;
float fSample;
hr = pConf->GetNumberOfCapabilities(&iCount, &iSize);
pSCC = new BYTE[iSize];
if (pSCC == NULL)
{
return E_POINTER;
}
if (iSize == sizeof(AUDIO_STREAM_CONFIG_CAPS))
{
// Use the audio capabilities structure.
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
AUDIO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = pConf->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr))
{
if(pmtConfig->formattype == FORMAT_WaveFormatEx)
{
WAVEFORMATEX *pFormat = new WAVEFORMATEX(*(reinterpret_cast<WAVEFORMATEX*>(pmtConfig->pbFormat)));
if(pFormat)
{
listFormats.push_back(pFormat);
}
FreeMediaType(*pmtConfig);
CoTaskMemFree(pmtConfig);
}
}
}
delete pSCC;
}
pConf->Release();
}
pPin->Release();
}
pEnum->Release();
}
pFilter->Release();
}
}
}