本文整理汇总了C++中IAMStreamConfig类的典型用法代码示例。如果您正苦于以下问题:C++ IAMStreamConfig类的具体用法?C++ IAMStreamConfig怎么用?C++ IAMStreamConfig使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了IAMStreamConfig类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: SetCaptureSize
HRESULT SetCaptureSize(IPin* capPreviewOutputPin, int width, int height, int avgTimePerFrame)
{
HRESULT hr = S_OK;
IAMStreamConfig *streamConfig;
hr = capPreviewOutputPin->QueryInterface(IID_IAMStreamConfig, (void**)&streamConfig);
if(FAILED(hr))
{
ErrorPrint("Get stream config interface error", hr);
return hr;
}
AM_MEDIA_TYPE *mediaType;
VIDEO_STREAM_CONFIG_CAPS configCaps;
hr = streamConfig->GetStreamCaps(0, &mediaType, (BYTE*)&configCaps);
if (FAILED(hr))
{
ErrorPrint("Get stream caps error");
return hr;
}
VIDEOINFOHEADER* videoHeader = (VIDEOINFOHEADER*)mediaType->pbFormat;
videoHeader->bmiHeader.biWidth = width;
videoHeader->bmiHeader.biHeight = height;
videoHeader->bmiHeader.biSizeImage = DIBSIZE(videoHeader->bmiHeader);
videoHeader->AvgTimePerFrame = avgTimePerFrame;
streamConfig->SetFormat(mediaType);
DeleteMediaType(mediaType);
return S_OK;
}
示例2: BindFilter
void VideoCapture::EnumResolutions()
{
int iCount, iSize, iChosen=-1;
IBaseFilter *pSource;
CComPtr <ICaptureGraphBuilder2> pCaptB;
VIDEO_STREAM_CONFIG_CAPS caps;
HRESULT hr;
bool response;
IAMStreamConfig *pConfig;
devices_resolutions = new DeviceResolutions[nDevices];
pCaptB.CoCreateInstance(CLSID_CaptureGraphBuilder2);
for (unsigned int iDevice=0; iDevice<nDevices; iDevice++)
{
response = BindFilter(iDevice, &pSource);
hr = pCaptB->FindInterface(
&PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Video,
pSource,
IID_IAMStreamConfig,
(void**)&pConfig);
if (!SUCCEEDED(hr))
{
pSource->Release();
devices_resolutions[iDevice].nResolutions = 0;
continue;
}
pConfig->GetNumberOfCapabilities(&iCount, &iSize);
devices_resolutions[iDevice].SetNResolutions(iCount);
for(int i=0; i < iCount; i++) {
AM_MEDIA_TYPE *pmt;
if( pConfig->GetStreamCaps(i, &pmt, reinterpret_cast<BYTE*>(&caps)) == S_OK ) {
VIDEOINFOHEADER *pVih =
reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
devices_resolutions[iDevice].x[i] = caps.InputSize.cx;
devices_resolutions[iDevice].y[i] = caps.InputSize.cy;
devices_resolutions[iDevice].color_space[i] = pmt->subtype;
devices_resolutions[iDevice].compression[i] = pVih->bmiHeader.biCompression;
DeleteMediaType(pmt);
}
}
pSource->Release();
pConfig->Release();
pSource = 0;
}
}
示例3: __CONTEXT
HRESULT
recChannel_t::set_rate(float FR)
{
__CONTEXT("recChannel_t::set_rate");
if (FR<1)
{
return S_OK;
}
float factorRate = FR/30;
int hr = 0;
if (factorRate<0.1) factorRate = 0.1;
frameRate = factorRate;
IAMStreamConfig *pConfig = NULL;
if ((camInfo->getKind() == SHARED ||
camInfo->getKind() == CAM) &&
actualFormat.pbFormat != NULL)
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) actualFormat.pbFormat;
double newFR = 10000000.0/FR;
pVih->AvgTimePerFrame = newFR;
camInfo->setRate(pVih->AvgTimePerFrame);
if (camInfo->getKind() == CAM)
{
IPin * pInput = NULL;
get_camInfo()->output->ConnectedTo(&pInput);
if (mapping)
{
pControl->Stop();
}
if (pInput)
{
get_camInfo()->output->Disconnect();
pInput->Disconnect();
}
hr = get_camInfo()->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
if (pConfig)
{
int hr = pConfig->SetFormat(&actualFormat);
errorCheck(hr);
pConfig->Release();
}
if (pInput)
{
hr = pGraph->Connect(get_camInfo()->output,pInput);
errorCheck(hr);
}
errorCheck(hr);
if (mapping)
{
pControl->Run();
}
}
}
return hr;
}
示例4: listGUIDS
bool MIPDirectShowCapture::listGUIDS(std::list<GUID> &guids)
{
guids.clear();
HRESULT hr;
IAMStreamConfig *pConfig = 0;
hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig);
if (HR_FAILED(hr))
{
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG);
return false;
}
int count = 0;
int s = 0;
hr = pConfig->GetNumberOfCapabilities(&count, &s);
if (HR_FAILED(hr))
{
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS);
return false;
}
if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS);
return false;
}
for (int i = 0; i < count; i++)
{
VIDEO_STREAM_CONFIG_CAPS caps;
AM_MEDIA_TYPE *pMediaType;
hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps);
if (HR_SUCCEEDED(hr))
{
if (pMediaType->majortype == MEDIATYPE_Video)
{
GUID subType = pMediaType->subtype;
guids.push_back(subType);
// uint8_t *pSubType = (uint8_t *)&subType;
//
// printf("0x%02x%02x%02x%02x %c%c%c%c\n",(int)pSubType[0],(int)pSubType[1],(int)pSubType[2],(int)pSubType[3],
// (char)pSubType[0],(char)pSubType[1],(char)pSubType[2],(char)pSubType[3]);
}
}
}
return true;
}
示例5: GetOutputPin
/// 设置音频信息
BOOL CAudioCapture::SetAudioFormat(ENUM_FREQUENCY_TYPE enFrequency,
ENUM_CHANNEL_TYPE enChannel, ENUM_SAMPLE_TYPE enSample)
{
if(NULL != m_pCaptureFilter)
{
BOOL bResult = FALSE;
do
{
IPin* pOutPin = GetOutputPin(m_pCaptureFilter, (uint16_t)0);
if(NULL != pOutPin)
{
IAMBufferNegotiation *pNeg = NULL;
IAMStreamConfig *pCfg = NULL;
// Get buffer negotiation interface
HRESULT hr = pOutPin->QueryInterface(IID_IAMBufferNegotiation, (void **)&pNeg);
if (FAILED(hr))
{
pOutPin->Release();
break;
}
// Find number of bytes in one second
long lBytesPerSecond = (long) (enSample * enFrequency * enChannel);
// 针对FAAC编码器 做出的调整
long lBufferSize = 1024 * enSample * enChannel;
// Set the buffer size based on selected settings
ALLOCATOR_PROPERTIES prop={0};
prop.cbBuffer = lBufferSize;
prop.cBuffers = 6;
prop.cbAlign = enSample * enChannel;
hr = pNeg->SuggestAllocatorProperties(&prop);
pNeg->Release();
// Now set the actual format of the audio data
hr = pOutPin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
if (FAILED(hr))
{
pOutPin->Release();
break;
}
// Read current media type/format
AM_MEDIA_TYPE *pmt={0};
hr = pCfg->GetFormat(&pmt);
if (SUCCEEDED(hr))
{
// Fill in values for the new format
WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
pWF->nChannels = (WORD) enChannel;
pWF->nSamplesPerSec = enFrequency;
pWF->nAvgBytesPerSec = lBytesPerSecond;
pWF->wBitsPerSample = (WORD) (enSample * 8);
pWF->nBlockAlign = (WORD) (enSample * enChannel);
// Set the new formattype for the output pin
hr = pCfg->SetFormat(pmt);
UtilDeleteMediaType(pmt);
}
// Release interfaces
pCfg->Release();
pOutPin->Release();
bResult = TRUE;
}
}while(FALSE);
return bResult;
}
else
{
m_enFrequency = enFrequency;
m_enChannel = enChannel;
m_enSample = enSample;
return TRUE;
}
}
示例6: GetPin
void CCaptureDevice::SetCaptureBufferSize(void)
{
IPin * pCapturePin = GetPin();
if (pCapturePin)
{
DWORD dwBytesPerSec = 0;
AM_MEDIA_TYPE * pmt = {0};
IAMStreamConfig * pCfg = NULL;
HRESULT hr = pCapturePin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
if ( hr==S_OK )
{
hr = pCfg->GetFormat(&pmt);
if ( hr==S_OK )
{
WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
dwBytesPerSec = pWF->nAvgBytesPerSec;
pWF->nChannels = 1;
pWF->wBitsPerSample = 8;
pWF->nSamplesPerSec = 11025;
pWF->nAvgBytesPerSec = pWF->nSamplesPerSec * pWF->nChannels * pWF->wBitsPerSample / 8;
pWF->nBlockAlign = 1;
/*
info.cbSize = sizeof(WAVEFORMATEX);
info.wFormatTag = 1;
info.nChannels = 2;
info.nSamplesPerSec = 44100;
//info.nSamplesPerSec = 22050;
11025
info.wBitsPerSample = 16;
info.nAvgBytesPerSec = info.nSamplesPerSec * info.nChannels * info.wBitsPerSample / 8;
info.nBlockAlign = 4;
*/
pCfg->SetFormat( pmt );
DeleteMediaType(pmt);
}
pCfg->Release();
}
/* if (dwBytesPerSec)
{
IAMBufferNegotiation * pNeg = NULL;
hr = pCapturePin->QueryInterface(IID_IAMBufferNegotiation,
(void **)&pNeg);
if (SUCCEEDED(hr))
{
ALLOCATOR_PROPERTIES AllocProp;
AllocProp.cbAlign = -1; // -1 means no preference.
AllocProp.cbBuffer = dwBytesPerSec * dwLatencyInMilliseconds / 1000;
AllocProp.cbPrefix = -1;
AllocProp.cBuffers = -1;
hr = pNeg->SuggestAllocatorProperties(&AllocProp);
pNeg->Release();
}
}*/
}
}
示例7: while
HRESULT Captura::IniciarVentanaVideo(HWND hWnd,int width, int height)
{
HRESULT hr;
RECT rcDest;
// CComPtr<IAMStreamConfig> pConfig;
IAMStreamConfig * pConfig;
IEnumMediaTypes *pMedia;
AM_MEDIA_TYPE *pmt = NULL, *pfnt = NULL;
hr = m_pCamOutPin->EnumMediaTypes( &pMedia );
if(SUCCEEDED(hr))
{
while(pMedia->Next(1, &pmt, 0) == S_OK)
{
if( pmt->formattype == FORMAT_VideoInfo )
{
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->pbFormat;
if( vih->bmiHeader.biWidth == width && vih->bmiHeader.biHeight == height )
{
pfnt = pmt;
break;
}
BorrarTipoMedio( pmt );
}
}
pMedia->Release();
}
hr = m_pCamOutPin->QueryInterface( IID_IAMStreamConfig, (void **) &pConfig );
if(SUCCEEDED(hr))
{
if( pfnt != NULL )
{
hr=pConfig->SetFormat( pfnt );
BorrarTipoMedio( pfnt );
}
hr = pConfig->GetFormat( &pfnt );
if(SUCCEEDED(hr))
{
m_nAncho = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biWidth;
m_nAlto = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biHeight;
BorrarTipoMedio( pfnt );
}
}
::GetClientRect (hWnd,&rcDest);
hr = m_pWC->SetVideoPosition(NULL, &rcDest);
return hr;
}
示例8: while
/* 设置捕获图像帧的格式,遍历所有格式是否有预定格式,若没有则以默认格式捕获 */
HRESULT CVMR_Capture::InitVideoWindow(HWND hWnd,int width, int height)
{
HRESULT hr;
RECT rcDest;
IAMStreamConfig *pConfig;
IEnumMediaTypes *pMedia;
AM_MEDIA_TYPE *pmt = NULL, *pfnt = NULL;
hr = m_pCamOutPin->EnumMediaTypes( &pMedia );
if(SUCCEEDED(hr))
{
//把所有视频的所有格式遍历一遍,看是否有预定的格式
while(pMedia->Next(1, &pmt, 0) == S_OK)
{
if( pmt->formattype == FORMAT_VideoInfo )
{
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->pbFormat;
// 当前的格式是否与预定格式相同,即宽和高相同
if( vih->bmiHeader.biWidth == width && vih->bmiHeader.biHeight == height )
{
pfnt = pmt;
break;
}
DeleteMediaType( pmt );
}
}
pMedia->Release();
}
hr = m_pCamOutPin->QueryInterface( IID_IAMStreamConfig, (void **) &pConfig );
if(SUCCEEDED(hr))
{
// 有预定的格式
if( pfnt != NULL )
{
hr=pConfig->SetFormat( pfnt );
DeleteMediaType( pfnt );
}
// 没有预定的格式,读取缺省媒体格式
hr = pConfig->GetFormat( &pfnt );
if(SUCCEEDED(hr))
{
m_nWidth = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biWidth; //读取高
m_nHeight = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biHeight; //读取宽
DeleteMediaType( pfnt );
}
}
// 获取传入窗口的区域,以设置显示窗口
::GetClientRect (hWnd,&rcDest);
hr = m_pWC->SetVideoPosition(NULL, &rcDest);
return hr;
}
示例9: initSupportedFormats
void DSCaptureDevice::initSupportedFormats()
{
HRESULT ret;
IAMStreamConfig* streamConfig = NULL;
AM_MEDIA_TYPE* mediaType = NULL;
ret = m_captureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
m_srcFilter, IID_IAMStreamConfig, (void**)&streamConfig);
/* get to find all supported formats */
if(!FAILED(ret))
{
int nb = 0;
int size = 0;
BYTE* allocBytes = NULL;
streamConfig->GetNumberOfCapabilities(&nb, &size);
allocBytes = new BYTE[size];
for(int i = 0 ; i < nb ; i++)
{
if(streamConfig->GetStreamCaps(i, &mediaType, allocBytes) == S_OK)
{
struct DSFormat format;
VIDEOINFOHEADER* hdr = (VIDEOINFOHEADER*)mediaType->pbFormat;
if(hdr)
{
format.height = hdr->bmiHeader.biHeight;
format.width = hdr->bmiHeader.biWidth;
format.pixelFormat = mediaType->subtype.Data1;
format.mediaType = mediaType->subtype;
m_formats.push_back(format);
}
}
}
delete allocBytes;
}
}
示例10: SelectMediaType
void CCaptureDevice::AdjustOutput(void)
{
HRESULT hr = S_OK;
AM_MEDIA_TYPE * pmt = NULL;
LONGLONG avgTimePerFrame = 2000000; // 20fps
pmt = SelectMediaType();
if (pmt)
{
if (pmt->formattype == FORMAT_VideoInfo)
{
VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pmt->pbFormat;
pvi->AvgTimePerFrame = avgTimePerFrame;
pvi->bmiHeader.biWidth = Preferred_Width;
pvi->bmiHeader.biHeight = Preferred_Height;
pvi->bmiHeader.biSizeImage = Preferred_Width * Preferred_Height * pvi->bmiHeader.biBitCount / 8;
IAMStreamConfig * pCfg = GetStreamConfig();
hr = pCfg->SetFormat(pmt);
}
DeleteMediaType(pmt);
}
}
示例11: RX_ERROR
bool VideoCaptureDirectShow2::setDeviceFilterMediaType(ICaptureGraphBuilder2* captureBuilder, IBaseFilter* deviceFilter, AVCapability cap) {
if(!captureBuilder) {
RX_ERROR("Cannot set device filter media type because the given ICaptureGraphBuilder* is invalid");
return false;
}
if(!deviceFilter) {
RX_ERROR("Cannot set the media type for the device filter because the device filter is invalid");
return false;
}
if(cap.index < 0) {
RX_ERROR("Cannot set the media type for the device filter because the given AVCapability has not index. Iterate over the stream caps to retrieve the caps index that we need");
return false;
}
IAMStreamConfig* conf = NULL;
HRESULT hr = captureBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, deviceFilter, IID_IAMStreamConfig, (void**)&conf);
if(FAILED(hr)) {
RX_ERROR("Failed to retrieve a IAMStreamConfig to set the device filter media type");
return false;
}
bool result = true;
AM_MEDIA_TYPE* mt;
VIDEO_STREAM_CONFIG_CAPS caps;
hr = conf->GetStreamCaps(cap.index, &mt, (BYTE*)&caps);
if(FAILED(hr)) {
RX_ERROR("Failed to retrieve the AM_MEDIA_TYPE for the AVCapabiltiy with stream caps index: %d", cap.index);
result = false;
goto done;
}
if(mt->majortype != MEDIATYPE_Video) {
RX_ERROR("The AM_MEDIA_TYPE we found is not an Video type so we cannot use it to set the media format of the device filter");
result = false;
goto done;
}
if(mt->formattype != FORMAT_VideoInfo) {
RX_ERROR("The AM_MEDIA_TYPE we found is not a Format_VideoInfo, so cannot set media type of device filter");
result = false;
goto done;
}
if(mt->cbFormat < sizeof(VIDEOINFOHEADER)) {
RX_ERROR("The AMD_MEDIA_TYPE has an invalid cbFormat size");
result = false;
goto done;
}
if(mt->pbFormat == NULL) {
RX_ERROR("The AM_MEDIA_TYPE.pbFormat is NULL; cannot set type of device filter");
result = false;
goto done;
}
GUID guid_pixfmt = libavPixelFormatToMediaSubTypeGUID(cap.pixel_format);
if(mt->subtype != guid_pixfmt) {
RX_ERROR("The AM_MEDIA_TYPE.subtype is not the same as the one we want..");
result = false;
goto done;
}
hr = conf->SetFormat(mt);
if(FAILED(hr)) {
RX_ERROR("Failed to set the AM_MEDIA_TYPE for the device filter");
result = false;
goto done;
}
done:
deleteMediaType(mt);
safeReleaseDirectShow(&conf);
return result;
}
示例12: GetStreamConfig
AM_MEDIA_TYPE * CCaptureDevice::SelectMediaType(void)
{
// Preferred sequence: UYVY, YUY2, RGB565, RGB555, RGB24, RGB32
VIDEO_STREAM_CONFIG_CAPS pSCC;
AM_MEDIA_TYPE * pmt = NULL;
HRESULT hr = S_OK;
int nCounts=0, nSize=0;
int preferredIndex = -1;
enum {
UYVY = 0, YUY2, RGB565, RGB555, RGB24, RGB32, Unknown
} currentPreferred, temp;
currentPreferred = Unknown;
IAMStreamConfig * pCfg = GetStreamConfig();
pCfg->GetNumberOfCapabilities(&nCounts, &nSize);
for (int i = 0; i < nCounts; i++)
{
if (pCfg->GetStreamCaps(i, &pmt, (BYTE *)&pSCC) == S_OK)
{
if (pmt->subtype == MEDIASUBTYPE_RGB32)
{
temp = RGB32;
}
else if (pmt->subtype == MEDIASUBTYPE_RGB24)
{
temp = RGB24;
}
else if (pmt->subtype == MEDIASUBTYPE_RGB565)
{
temp = RGB565;
}
else if (pmt->subtype == MEDIASUBTYPE_RGB555)
{
temp = RGB555;
}
else if (pmt->subtype == MEDIASUBTYPE_YUY2)
{
temp = YUY2;
}
else if (pmt->subtype == MEDIASUBTYPE_UYVY)
{
temp = UYVY;
}
else
{
temp = Unknown;
}
if (temp < currentPreferred)
{
currentPreferred = temp;
preferredIndex = i;
}
DeleteMediaType(pmt);
}
}
// Get the preferred media type
if (preferredIndex != -1)
{
hr = pCfg->GetStreamCaps(preferredIndex, &pmt, (BYTE *)&pSCC);
}
else
{
hr = pCfg->GetFormat(&pmt);
}
return pmt;
}
示例13: CoInitialize
bool CCameraDS::OpenCamera(int nCamID, bool bDisplayProperties, int nWidth, int nHeight)
{
HRESULT hr = S_OK;
CoInitialize(NULL);
// Create the Filter Graph Manager.
hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC,
IID_IGraphBuilder, (void **)&m_pGraph);
hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (LPVOID *)&m_pSampleGrabberFilter);
hr = m_pGraph->QueryInterface(IID_IMediaControl, (void **) &m_pMediaControl);
hr = m_pGraph->QueryInterface(IID_IMediaEvent, (void **) &m_pMediaEvent);
hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (LPVOID*) &m_pNullFilter);
hr = m_pGraph->AddFilter(m_pNullFilter, L"NullRenderer");
hr = m_pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber, (void**)&m_pSampleGrabber);
AM_MEDIA_TYPE mt;
ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
mt.formattype = FORMAT_VideoInfo;
hr = m_pSampleGrabber->SetMediaType(&mt);
MYFREEMEDIATYPE(mt);
m_pGraph->AddFilter(m_pSampleGrabberFilter, L"Grabber");
// Bind Device Filter. We know the device because the id was passed in
BindFilter(nCamID, &m_pDeviceFilter);
m_pGraph->AddFilter(m_pDeviceFilter, NULL);
CComPtr<IEnumPins> pEnum;
m_pDeviceFilter->EnumPins(&pEnum);
hr = pEnum->Reset();
hr = pEnum->Next(1, &m_pCameraOutput, NULL);
pEnum = NULL;
m_pSampleGrabberFilter->EnumPins(&pEnum);
pEnum->Reset();
hr = pEnum->Next(1, &m_pGrabberInput, NULL);
pEnum = NULL;
m_pSampleGrabberFilter->EnumPins(&pEnum);
pEnum->Reset();
pEnum->Skip(1);
hr = pEnum->Next(1, &m_pGrabberOutput, NULL);
pEnum = NULL;
m_pNullFilter->EnumPins(&pEnum);
pEnum->Reset();
hr = pEnum->Next(1, &m_pNullInputPin, NULL);
//SetCrossBar();
if (bDisplayProperties)
{
CComPtr<ISpecifyPropertyPages> pPages;
HRESULT hr = m_pCameraOutput->QueryInterface(IID_ISpecifyPropertyPages, (void**)&pPages);
if (SUCCEEDED(hr))
{
PIN_INFO PinInfo;
m_pCameraOutput->QueryPinInfo(&PinInfo);
CAUUID caGUID;
pPages->GetPages(&caGUID);
OleCreatePropertyFrame(NULL, 0, 0,
L"Property Sheet", 1,
(IUnknown **)&(m_pCameraOutput.p),
caGUID.cElems,
caGUID.pElems,
0, 0, NULL);
CoTaskMemFree(caGUID.pElems);
PinInfo.pFilter->Release();
}
pPages = NULL;
}
else
{
//////////////////////////////////////////////////////////////////////////////
// 加入由 lWidth和lHeight设置的摄像头的宽和高 的功能,默认320*240
// by flymanbox @2009-01-24
//////////////////////////////////////////////////////////////////////////////
int _Width = nWidth, _Height = nHeight;
IAMStreamConfig* iconfig;
iconfig = NULL;
hr = m_pCameraOutput->QueryInterface(IID_IAMStreamConfig, (void**)&iconfig);
AM_MEDIA_TYPE* pmt;
if(iconfig->GetFormat(&pmt) !=S_OK)
{
//.........这里部分代码省略.........
示例14: if
bool DeviceSource::LoadFilters()
{
if(bCapturing || bFiltersLoaded)
return false;
bool bSucceeded = false;
List<MediaOutputInfo> outputList;
IAMStreamConfig *config = NULL;
bool bAddedVideoCapture = false, bAddedAudioCapture = false, bAddedDevice = false;
GUID expectedMediaType;
IPin *devicePin = NULL, *audioPin = NULL;
HRESULT err;
String strShader;
bUseThreadedConversion = API->UseMultithreadedOptimizations() && (OSGetTotalCores() > 1);
//------------------------------------------------
// basic initialization vars
bool bCheckForceAudio = data->GetInt(TEXT("forceCustomAudioDevice")) != 0;
bUseCustomResolution = data->GetInt(TEXT("customResolution"));
strDevice = data->GetString(TEXT("device"));
strDeviceName = data->GetString(TEXT("deviceName"));
strDeviceID = data->GetString(TEXT("deviceID"));
strAudioDevice = data->GetString(TEXT("audioDevice"));
strAudioName = data->GetString(TEXT("audioDeviceName"));
strAudioID = data->GetString(TEXT("audioDeviceID"));
strAudioGUID = data->GetString(TEXT("audioDeviceCLSID"));
if(strAudioGUID.Compare(TEXT("CLSID_AudioInputDeviceCategory"))) matchGUID = CLSID_AudioInputDeviceCategory;
if(strAudioGUID.Compare(TEXT("CLSID_VideoInputDeviceCategory"))) matchGUID = CLSID_VideoInputDeviceCategory;
if(strAudioGUID.Compare(TEXT("CLSID_AudioRendererCategory"))) {
//Log(TEXT("Dese are spekers.\n"));
matchGUID = CLSID_AudioRendererCategory;
}
bFlipVertical = data->GetInt(TEXT("flipImage")) != 0;
bFlipHorizontal = data->GetInt(TEXT("flipImageHorizontal")) != 0;
opacity = data->GetInt(TEXT("opacity"), 100);
float volume = data->GetFloat(TEXT("volume"), 1.0f);
//------------------------------------------------
// chrom key stuff
bUseChromaKey = data->GetInt(TEXT("useChromaKey")) != 0;
keyColor = data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
keySimilarity = data->GetInt(TEXT("keySimilarity"));
keyBlend = data->GetInt(TEXT("keyBlend"), 80);
keySpillReduction = data->GetInt(TEXT("keySpillReduction"), 50);
if(keyBaseColor.x < keyBaseColor.y && keyBaseColor.x < keyBaseColor.z)
keyBaseColor -= keyBaseColor.x;
else if(keyBaseColor.y < keyBaseColor.x && keyBaseColor.y < keyBaseColor.z)
keyBaseColor -= keyBaseColor.y;
else if(keyBaseColor.z < keyBaseColor.x && keyBaseColor.z < keyBaseColor.y)
keyBaseColor -= keyBaseColor.z;
//------------------------------------------------
// get the device filter and pins
if(strAudioDevice.IsValid())
{
audioDeviceFilter = GetDeviceByValue(matchGUID, L"FriendlyName", strAudioName, L"DevicePath", strAudioID);
if(!audioDeviceFilter) {
AppWarning(TEXT("DShowAudioPlugin: Invalid audio device: name '%s', path '%s'"), strAudioName.Array(), strAudioID.Array());
}
}
soundOutputType = data->GetInt(TEXT("soundOutputType"));
if(soundOutputType != 0)
{
if(matchGUID == CLSID_AudioRendererCategory) {
err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
}
else {
err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
}
if(FAILED(err))
{
Log(TEXT("DShowAudioPlugin: No audio pin, result = %lX"), err);
soundOutputType = 0;
}
}
int soundTimeOffset = data->GetInt(TEXT("soundTimeOffset"));
//GetOutputList(devicePin, outputList);
//------------------------------------------------
// initialize the basic video variables and data
//------------------------------------------------
// log audio info
{
//.........这里部分代码省略.........
示例15: v4w_open_videodevice
//.........这里部分代码省略.........
{
return -14;
}
hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
if(FAILED(hr))
{
return -15;
}
// get null renderer
hr=CoCreateInstance (CLSID_NullRenderer,
NULL,
CLSCTX_INPROC_SERVER,
IID_IBaseFilter,
(void **)&s->m_pNullRenderer);
if(FAILED(hr))
{
return -16;
}
if (s->m_pNullRenderer!=NULL)
{
s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
}
hr = s->m_pBuilder->RenderStream(&pPinCategory,
&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
if (FAILED(hr))
{
return -17;
}
IAMStreamConfig *pConfig = NULL;
hr = s->m_pBuilder->FindInterface(
&pPinCategory, // Preview pin.
&MEDIATYPE_Video, // Any media type.
s->m_pDeviceFilter, // Pointer to the capture filter.
IID_IAMStreamConfig, (void**)&pConfig);
if (pConfig!=NULL)
{
AM_MEDIA_TYPE *pType = NULL;
int iCount, iSize;
pConfig->GetNumberOfCapabilities(&iCount, &iSize);
for (int i = 0; i < iCount; i++) {
VIDEO_STREAM_CONFIG_CAPS scc;
pType = NULL;
pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc);
if (!((pType->formattype == FORMAT_VideoInfo) &&
(pType->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
(pType->pbFormat != NULL)))
continue;
VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat;
if (m != pType->subtype)
continue;
if (videoInfo.bmiHeader.biWidth != s->vsize.width)
continue;
if (videoInfo.bmiHeader.biHeight != s->vsize.height)
continue;