本文整理汇总了C++中IAMStreamConfig::SetFormat方法的典型用法代码示例。如果您正苦于以下问题:C++ IAMStreamConfig::SetFormat方法的具体用法?C++ IAMStreamConfig::SetFormat怎么用?C++ IAMStreamConfig::SetFormat使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IAMStreamConfig
的用法示例。
在下文中一共展示了IAMStreamConfig::SetFormat方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: SetCaptureSize
HRESULT SetCaptureSize(IPin* capPreviewOutputPin, int width, int height, int avgTimePerFrame)
{
HRESULT hr = S_OK;
IAMStreamConfig *streamConfig;
hr = capPreviewOutputPin->QueryInterface(IID_IAMStreamConfig, (void**)&streamConfig);
if(FAILED(hr))
{
ErrorPrint("Get stream config interface error", hr);
return hr;
}
AM_MEDIA_TYPE *mediaType;
VIDEO_STREAM_CONFIG_CAPS configCaps;
hr = streamConfig->GetStreamCaps(0, &mediaType, (BYTE*)&configCaps);
if (FAILED(hr))
{
ErrorPrint("Get stream caps error");
return hr;
}
VIDEOINFOHEADER* videoHeader = (VIDEOINFOHEADER*)mediaType->pbFormat;
videoHeader->bmiHeader.biWidth = width;
videoHeader->bmiHeader.biHeight = height;
videoHeader->bmiHeader.biSizeImage = DIBSIZE(videoHeader->bmiHeader);
videoHeader->AvgTimePerFrame = avgTimePerFrame;
streamConfig->SetFormat(mediaType);
DeleteMediaType(mediaType);
return S_OK;
}
示例2: errorCheck
HRESULT
recChannel_t::set_rate(float FR)
{
__CONTEXT("recChannel_t::set_rate");
if (FR<1)
{
return S_OK;
}
float factorRate = FR/30;
int hr = 0;
if (factorRate<0.1) factorRate = 0.1;
frameRate = factorRate;
IAMStreamConfig *pConfig = NULL;
if ((camInfo->getKind() == SHARED ||
camInfo->getKind() == CAM) &&
actualFormat.pbFormat != NULL)
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) actualFormat.pbFormat;
double newFR = 10000000.0/FR;
pVih->AvgTimePerFrame = newFR;
camInfo->setRate(pVih->AvgTimePerFrame);
if (camInfo->getKind() == CAM)
{
IPin * pInput = NULL;
get_camInfo()->output->ConnectedTo(&pInput);
if (mapping)
{
pControl->Stop();
}
if (pInput)
{
get_camInfo()->output->Disconnect();
pInput->Disconnect();
}
hr = get_camInfo()->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
if (pConfig)
{
int hr = pConfig->SetFormat(&actualFormat);
errorCheck(hr);
pConfig->Release();
}
if (pInput)
{
hr = pGraph->Connect(get_camInfo()->output,pInput);
errorCheck(hr);
}
errorCheck(hr);
if (mapping)
{
pControl->Run();
}
}
}
return hr;
}
示例3: SetCaptureBufferSize
void CCaptureDevice::SetCaptureBufferSize(void)
{
IPin * pCapturePin = GetPin();
if (pCapturePin)
{
DWORD dwBytesPerSec = 0;
AM_MEDIA_TYPE * pmt = {0};
IAMStreamConfig * pCfg = NULL;
HRESULT hr = pCapturePin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
if ( hr==S_OK )
{
hr = pCfg->GetFormat(&pmt);
if ( hr==S_OK )
{
WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
dwBytesPerSec = pWF->nAvgBytesPerSec;
pWF->nChannels = 1;
pWF->wBitsPerSample = 8;
pWF->nSamplesPerSec = 11025;
pWF->nAvgBytesPerSec = pWF->nSamplesPerSec * pWF->nChannels * pWF->wBitsPerSample / 8;
pWF->nBlockAlign = 1;
/*
info.cbSize = sizeof(WAVEFORMATEX);
info.wFormatTag = 1;
info.nChannels = 2;
info.nSamplesPerSec = 44100;
//info.nSamplesPerSec = 22050;
11025
info.wBitsPerSample = 16;
info.nAvgBytesPerSec = info.nSamplesPerSec * info.nChannels * info.wBitsPerSample / 8;
info.nBlockAlign = 4;
*/
pCfg->SetFormat( pmt );
DeleteMediaType(pmt);
}
pCfg->Release();
}
/* if (dwBytesPerSec)
{
IAMBufferNegotiation * pNeg = NULL;
hr = pCapturePin->QueryInterface(IID_IAMBufferNegotiation,
(void **)&pNeg);
if (SUCCEEDED(hr))
{
ALLOCATOR_PROPERTIES AllocProp;
AllocProp.cbAlign = -1; // -1 means no preference.
AllocProp.cbBuffer = dwBytesPerSec * dwLatencyInMilliseconds / 1000;
AllocProp.cbPrefix = -1;
AllocProp.cBuffers = -1;
hr = pNeg->SuggestAllocatorProperties(&AllocProp);
pNeg->Release();
}
}*/
}
}
示例4: IniciarVentanaVideo
HRESULT Captura::IniciarVentanaVideo(HWND hWnd,int width, int height)
{
HRESULT hr;
RECT rcDest;
// CComPtr<IAMStreamConfig> pConfig;
IAMStreamConfig * pConfig;
IEnumMediaTypes *pMedia;
AM_MEDIA_TYPE *pmt = NULL, *pfnt = NULL;
hr = m_pCamOutPin->EnumMediaTypes( &pMedia );
if(SUCCEEDED(hr))
{
while(pMedia->Next(1, &pmt, 0) == S_OK)
{
if( pmt->formattype == FORMAT_VideoInfo )
{
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->pbFormat;
if( vih->bmiHeader.biWidth == width && vih->bmiHeader.biHeight == height )
{
pfnt = pmt;
break;
}
BorrarTipoMedio( pmt );
}
}
pMedia->Release();
}
hr = m_pCamOutPin->QueryInterface( IID_IAMStreamConfig, (void **) &pConfig );
if(SUCCEEDED(hr))
{
if( pfnt != NULL )
{
hr=pConfig->SetFormat( pfnt );
BorrarTipoMedio( pfnt );
}
hr = pConfig->GetFormat( &pfnt );
if(SUCCEEDED(hr))
{
m_nAncho = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biWidth;
m_nAlto = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biHeight;
BorrarTipoMedio( pfnt );
}
}
::GetClientRect (hWnd,&rcDest);
hr = m_pWC->SetVideoPosition(NULL, &rcDest);
return hr;
}
示例5: InitVideoWindow
/* 设置捕获图像帧的格式,遍历所有格式是否有预定格式,若没有则以默认格式捕获 */
HRESULT CVMR_Capture::InitVideoWindow(HWND hWnd,int width, int height)
{
HRESULT hr;
RECT rcDest;
IAMStreamConfig *pConfig;
IEnumMediaTypes *pMedia;
AM_MEDIA_TYPE *pmt = NULL, *pfnt = NULL;
hr = m_pCamOutPin->EnumMediaTypes( &pMedia );
if(SUCCEEDED(hr))
{
//把所有视频的所有格式遍历一遍,看是否有预定的格式
while(pMedia->Next(1, &pmt, 0) == S_OK)
{
if( pmt->formattype == FORMAT_VideoInfo )
{
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->pbFormat;
// 当前的格式是否与预定格式相同,即宽和高相同
if( vih->bmiHeader.biWidth == width && vih->bmiHeader.biHeight == height )
{
pfnt = pmt;
break;
}
DeleteMediaType( pmt );
}
}
pMedia->Release();
}
hr = m_pCamOutPin->QueryInterface( IID_IAMStreamConfig, (void **) &pConfig );
if(SUCCEEDED(hr))
{
// 有预定的格式
if( pfnt != NULL )
{
hr=pConfig->SetFormat( pfnt );
DeleteMediaType( pfnt );
}
// 没有预定的格式,读取缺省媒体格式
hr = pConfig->GetFormat( &pfnt );
if(SUCCEEDED(hr))
{
m_nWidth = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biWidth; //读取高
m_nHeight = ((VIDEOINFOHEADER *)pfnt->pbFormat)->bmiHeader.biHeight; //读取宽
DeleteMediaType( pfnt );
}
}
// 获取传入窗口的区域,以设置显示窗口
::GetClientRect (hWnd,&rcDest);
hr = m_pWC->SetVideoPosition(NULL, &rcDest);
return hr;
}
示例6: AdjustOutput
void CCaptureDevice::AdjustOutput(void)
{
HRESULT hr = S_OK;
AM_MEDIA_TYPE * pmt = NULL;
LONGLONG avgTimePerFrame = 2000000; // 20fps
pmt = SelectMediaType();
if (pmt)
{
if (pmt->formattype == FORMAT_VideoInfo)
{
VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pmt->pbFormat;
pvi->AvgTimePerFrame = avgTimePerFrame;
pvi->bmiHeader.biWidth = Preferred_Width;
pvi->bmiHeader.biHeight = Preferred_Height;
pvi->bmiHeader.biSizeImage = Preferred_Width * Preferred_Height * pvi->bmiHeader.biBitCount / 8;
IAMStreamConfig * pCfg = GetStreamConfig();
hr = pCfg->SetFormat(pmt);
}
DeleteMediaType(pmt);
}
}
示例7: SetAudioFormat
/// 设置音频信息
BOOL CAudioCapture::SetAudioFormat(ENUM_FREQUENCY_TYPE enFrequency,
ENUM_CHANNEL_TYPE enChannel, ENUM_SAMPLE_TYPE enSample)
{
if(NULL != m_pCaptureFilter)
{
BOOL bResult = FALSE;
do
{
IPin* pOutPin = GetOutputPin(m_pCaptureFilter, (uint16_t)0);
if(NULL != pOutPin)
{
IAMBufferNegotiation *pNeg = NULL;
IAMStreamConfig *pCfg = NULL;
// Get buffer negotiation interface
HRESULT hr = pOutPin->QueryInterface(IID_IAMBufferNegotiation, (void **)&pNeg);
if (FAILED(hr))
{
pOutPin->Release();
break;
}
// Find number of bytes in one second
long lBytesPerSecond = (long) (enSample * enFrequency * enChannel);
// 针对FAAC编码器 做出的调整
long lBufferSize = 1024 * enSample * enChannel;
// Set the buffer size based on selected settings
ALLOCATOR_PROPERTIES prop={0};
prop.cbBuffer = lBufferSize;
prop.cBuffers = 6;
prop.cbAlign = enSample * enChannel;
hr = pNeg->SuggestAllocatorProperties(&prop);
pNeg->Release();
// Now set the actual format of the audio data
hr = pOutPin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
if (FAILED(hr))
{
pOutPin->Release();
break;
}
// Read current media type/format
AM_MEDIA_TYPE *pmt={0};
hr = pCfg->GetFormat(&pmt);
if (SUCCEEDED(hr))
{
// Fill in values for the new format
WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
pWF->nChannels = (WORD) enChannel;
pWF->nSamplesPerSec = enFrequency;
pWF->nAvgBytesPerSec = lBytesPerSecond;
pWF->wBitsPerSample = (WORD) (enSample * 8);
pWF->nBlockAlign = (WORD) (enSample * enChannel);
// Set the new formattype for the output pin
hr = pCfg->SetFormat(pmt);
UtilDeleteMediaType(pmt);
}
// Release interfaces
pCfg->Release();
pOutPin->Release();
bResult = TRUE;
}
}while(FALSE);
return bResult;
}
else
{
m_enFrequency = enFrequency;
m_enChannel = enChannel;
m_enSample = enSample;
return TRUE;
}
}
示例8: setCaptureOutputFormat
void DirectShowGrabber::setCaptureOutputFormat() {
IAMStreamConfig *pConfig;
int iCount;
int iSize;
VIDEOINFOHEADER *pVih;
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
int formatSet;
HRESULT hr;
// Reference http://msdn.microsoft.com/library/default.asp?url=/library/en-us/directshow/htm/configurethevideooutputformat.asp
debug_msg("DirectShowGrabber::setCaptureOutputFormat(): enter...\n");
formatSet = 0;
pConfig = NULL;
hr = pBuild_->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
pCaptureFilter_, IID_IAMStreamConfig, (void**)&pConfig);
if (FAILED(hr)) {
Grabber::status_=-1;
return;
}
debug_msg("DirectShowGrabber::setCaptureOutputFormat(): IAMStreamConfig interface acquired\n");
iCount = iSize = 0;
hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
// Check the size to make sure we pass in the correct structure.
// The alternative output of iSize is AUDIO_STREAM_CONFIG_CAPS, btw.
if ( iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS) ) {
for (int iFormat = 0; iFormat < iCount; iFormat++) {
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE *)&scc);
//showErrorMessage(hr);
if( SUCCEEDED(hr) ) {
if ((pmtConfig->majortype == MEDIATYPE_Video) &&
(pmtConfig->subtype == MEDIASUBTYPE_RGB24) &&
(pmtConfig->formattype == FORMAT_VideoInfo) &&
(pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
(pmtConfig->pbFormat != NULL)) {
pVih = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
pVih->bmiHeader.biWidth = 320;
pVih->bmiHeader.biHeight = 240;
pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);
debug_msg("Windows GDI BITMAPINFOHEADER follows:\n");
debug_msg("biWidth= %d\n", pVih->bmiHeader.biWidth);
debug_msg("biHeight= %d\n", pVih->bmiHeader.biHeight);
debug_msg("biSize= %d\n", pVih->bmiHeader.biSize);
debug_msg("biPlanes= %d\n", pVih->bmiHeader.biPlanes);
debug_msg("biBitCount= %d\n", pVih->bmiHeader.biBitCount);
debug_msg("biCompression= %d\n", pVih->bmiHeader.biCompression);
debug_msg("biSizeImage= %d\n", pVih->bmiHeader.biSizeImage);
debug_msg("biXPelsPerMeter=%d\n", pVih->bmiHeader.biXPelsPerMeter);
debug_msg("biYPelsPerMeter=%d\n", pVih->bmiHeader.biYPelsPerMeter);
debug_msg("biClrUsed= %d\n", pVih->bmiHeader.biClrUsed);
debug_msg("biClrImportant= %d\n", pVih->bmiHeader.biClrImportant);
hr = pConfig->SetFormat(pmtConfig);
//showErrorMessage(hr);
// XXX: leak. need to deal with this - msp
//DeleteMediaType(pmtConfig);
formatSet = 1;
break;
}
}
}
}
pConfig->Release();
if( formatSet )
debug_msg("DirectShowGrabber::setCaptureOutputFormat: format set\n");
else
debug_msg("DirectShowGrabber::setCaptureOutputFormat: format not set\n");
}
示例9: main
int main(int argc, char* argv[])
{
ICaptureGraphBuilder2 *pCaptureGraphBuilder = NULL;
IGraphBuilder *pGraphBuilder = NULL;
IBaseFilter *pSource = NULL;
IBaseFilter *pMux = NULL;
IBaseFilter *pVideoCompressor = NULL;
IBaseFilter *pAudioCompressor = NULL;
IAMStreamConfig *pAMStreamConfig = NULL;
IAMVideoCompression *pAMVideoCompression = NULL;
IMediaControl *pControl = NULL;
IMediaSeeking *pSeek = NULL;
IMediaEvent *pEvent = NULL;
HRESULT hr;
DWORD pdwRegister=0;
CoInitialize(NULL);
// Create the capture graph builder.
CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);
// Make the rendering section of the graph.
pCaptureGraphBuilder->SetOutputFileName(
&MEDIASUBTYPE_Avi, // File type.
L"C:\\STDIUE1.avi", // File name.
&pMux, // pointer to the multiplexer.
NULL); // pointer to the file writer.
// Load the source file.
pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder);
pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource);
// Add the compressor filter.
CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **)&pVideoCompressor);
pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor");
// Render the video stream, through the compressor.
pCaptureGraphBuilder->RenderStream(
NULL, // Output pin category
NULL, // Media type
pSource, // Source filter
pVideoCompressor, // Compressor filter
pMux); // Sink filter (the AVI Mux)
/* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **)&pAudioCompressor);
pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/
// Render the audio stream.
pCaptureGraphBuilder->RenderStream(
NULL,
NULL,
pSource,
pAudioCompressor,
pMux);
// Compress at 100k/second data rate.
AM_MEDIA_TYPE *pmt;
pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig);
pAMStreamConfig->GetFormat(&pmt);
if (pmt->formattype == FORMAT_VideoInfo)
{
((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000;
pAMStreamConfig->SetFormat(pmt);
}
// Request key frames every four frames.
pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression);
pAMVideoCompression->put_KeyFrameRate(4);
pAMVideoCompression->Release();
pAMStreamConfig->Release();
// Run the graph.
pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl);
pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
pControl->Run();
printf("Recompressing... \n");
long evCode;
if (SUCCEEDED(hr))
{
REFERENCE_TIME rtTotal, rtNow = 0;
pSeek->GetDuration(&rtTotal);
while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT)
{
//.........这里部分代码省略.........
示例10: setFormat
bool MIPDirectShowCapture::setFormat(int w, int h, real_t rate)
{
HRESULT hr;
IAMStreamConfig *pConfig = 0;
hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig);
if (HR_FAILED(hr))
{
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG);
return false;
}
int count = 0;
int s = 0;
hr = pConfig->GetNumberOfCapabilities(&count, &s);
if (HR_FAILED(hr))
{
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS);
return false;
}
if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS);
return false;
}
for (int i = 0; i < count; i++)
{
VIDEO_STREAM_CONFIG_CAPS caps;
AM_MEDIA_TYPE *pMediaType;
hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps);
if (HR_SUCCEEDED(hr))
{
if ((pMediaType->majortype == MEDIATYPE_Video) &&
(pMediaType->subtype == m_selectedGuid) &&
(pMediaType->formattype == FORMAT_VideoInfo) &&
(pMediaType->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
(pMediaType->pbFormat != 0))
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pMediaType->pbFormat;
pVih->bmiHeader.biWidth = w;
pVih->bmiHeader.biHeight = h;
pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);
pVih->AvgTimePerFrame = (REFERENCE_TIME)(10000000.0/rate);
hr = pConfig->SetFormat(pMediaType);
if (HR_SUCCEEDED(hr))
{
CoTaskMemFree(pMediaType->pbFormat);
pConfig->Release();
return true;
}
}
if (pMediaType->pbFormat != 0)
CoTaskMemFree(pMediaType->pbFormat);
}
}
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTSETCAPS);
return false;
}
示例11: if
//.........这里部分代码省略.........
return -11;
ZeroMemory(pvi, sizeof(VIDEOINFO));
if (s->pix_fmt == MS_YUV420P)
pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
else if (s->pix_fmt == MS_YUY2)
pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
else if (s->pix_fmt == MS_YUYV)
pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
else if (s->pix_fmt == MS_UYVY)
pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
else if (s->pix_fmt == MS_RGB24)
pvi->bmiHeader.biCompression = BI_RGB;
if (s->pix_fmt == MS_YUV420P)
pvi->bmiHeader.biBitCount = 12;
else if (s->pix_fmt == MS_YUY2)
pvi->bmiHeader.biBitCount = 16;
else if (s->pix_fmt == MS_YUYV)
pvi->bmiHeader.biBitCount = 16;
else if (s->pix_fmt == MS_UYVY)
pvi->bmiHeader.biBitCount = 16;
else if (s->pix_fmt == MS_RGB24)
pvi->bmiHeader.biBitCount = 24;
pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
pvi->bmiHeader.biWidth = s->vsize.width;
pvi->bmiHeader.biHeight = s->vsize.height;
pvi->bmiHeader.biPlanes = 1;
pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
pvi->bmiHeader.biClrImportant = 0;
mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));
hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
if(FAILED(hr))
{
return -12;
}
hr = s->m_pDXFilter->SetCallback(Callback);
if(FAILED(hr))
{
return -13;
}
hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
(LPVOID *)&s->m_pIDXFilter);
if(FAILED(hr))
{
return -14;
}
hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
if(FAILED(hr))
{
return -15;
}
// get null renderer
hr=CoCreateInstance (CLSID_NullRenderer,
NULL,
CLSCTX_INPROC_SERVER,
IID_IBaseFilter,
示例12: libavPixelFormatToMediaSubTypeGUID
bool VideoCaptureDirectShow2::setDeviceFilterMediaType(ICaptureGraphBuilder2* captureBuilder, IBaseFilter* deviceFilter, AVCapability cap) {
if(!captureBuilder) {
RX_ERROR("Cannot set device filter media type because the given ICaptureGraphBuilder* is invalid");
return false;
}
if(!deviceFilter) {
RX_ERROR("Cannot set the media type for the device filter because the device filter is invalid");
return false;
}
if(cap.index < 0) {
RX_ERROR("Cannot set the media type for the device filter because the given AVCapability has not index. Iterate over the stream caps to retrieve the caps index that we need");
return false;
}
IAMStreamConfig* conf = NULL;
HRESULT hr = captureBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, deviceFilter, IID_IAMStreamConfig, (void**)&conf);
if(FAILED(hr)) {
RX_ERROR("Failed to retrieve a IAMStreamConfig to set the device filter media type");
return false;
}
bool result = true;
AM_MEDIA_TYPE* mt;
VIDEO_STREAM_CONFIG_CAPS caps;
hr = conf->GetStreamCaps(cap.index, &mt, (BYTE*)&caps);
if(FAILED(hr)) {
RX_ERROR("Failed to retrieve the AM_MEDIA_TYPE for the AVCapabiltiy with stream caps index: %d", cap.index);
result = false;
goto done;
}
if(mt->majortype != MEDIATYPE_Video) {
RX_ERROR("The AM_MEDIA_TYPE we found is not an Video type so we cannot use it to set the media format of the device filter");
result = false;
goto done;
}
if(mt->formattype != FORMAT_VideoInfo) {
RX_ERROR("The AM_MEDIA_TYPE we found is not a Format_VideoInfo, so cannot set media type of device filter");
result = false;
goto done;
}
if(mt->cbFormat < sizeof(VIDEOINFOHEADER)) {
RX_ERROR("The AMD_MEDIA_TYPE has an invalid cbFormat size");
result = false;
goto done;
}
if(mt->pbFormat == NULL) {
RX_ERROR("The AM_MEDIA_TYPE.pbFormat is NULL; cannot set type of device filter");
result = false;
goto done;
}
GUID guid_pixfmt = libavPixelFormatToMediaSubTypeGUID(cap.pixel_format);
if(mt->subtype != guid_pixfmt) {
RX_ERROR("The AM_MEDIA_TYPE.subtype is not the same as the one we want..");
result = false;
goto done;
}
hr = conf->SetFormat(mt);
if(FAILED(hr)) {
RX_ERROR("Failed to set the AM_MEDIA_TYPE for the device filter");
result = false;
goto done;
}
done:
deleteMediaType(mt);
safeReleaseDirectShow(&conf);
return result;
}
示例13: CaptureVideo
HRESULT CaptureVideo()
{
HRESULT hr;
IBaseFilter *pSrcFilter=NULL;
// Get DirectShow interfaces
hr = GetInterfaces();
if (FAILED(hr))
{
Msg(TEXT("Failed to get video interfaces! hr=0x%x"), hr);
return hr;
}
// Attach the filter graph to the capture graph
hr = g_pCapture->SetFiltergraph(g_pGraph);
if (FAILED(hr))
{
Msg(TEXT("Failed to set capture filter graph! hr=0x%x"), hr);
return hr;
}
// Use the system device enumerator and class enumerator to find
// a video capture/preview device, such as a desktop USB video camera.
hr = FindCaptureDevice(&pSrcFilter);
if (FAILED(hr))
{
// Don't display a message because FindCaptureDevice will handle it
return hr;
}
// Add Capture filter to our graph.
hr = g_pGraph->AddFilter(pSrcFilter, L"Video Capture");
if (FAILED(hr))
{
Msg(TEXT("Couldn't add the capture filter to the graph! hr=0x%x\r\n\r\n")
TEXT("If you have a working video capture device, please make sure\r\n")
TEXT("that it is connected and is not being used by another application.\r\n\r\n")
TEXT("The sample will now close."), hr);
pSrcFilter->Release();
return hr;
}
// Copied code
//========================================
IAMStreamConfig *pSC;
hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
&MEDIATYPE_Interleaved,
pSrcFilter, IID_IAMStreamConfig, (void **)&pSC);
if(FAILED(hr))
hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
&MEDIATYPE_Video, pSrcFilter,
IID_IAMStreamConfig, (void **)&pSC);
if (!pSC) {
return hr;
}
int iCount = 0, iSize = 0;
hr = pSC->GetNumberOfCapabilities(&iCount, &iSize);
// Check the size to make sure we pass in the correct structure.
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
// Use the video capabilities structure.
int i = 0;
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = pSC->GetFormat(&pmtConfig);
VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
double fps = 30;
pvi->AvgTimePerFrame = (LONGLONG)(10000000/fps);
pvi->bmiHeader.biSizeImage = DIBSIZE(pvi->bmiHeader);
pvi->bmiHeader.biWidth = 1920;
pvi->bmiHeader.biHeight = 1080;
hr = pSC->SetFormat(pmtConfig);
//hr = pSC->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
//if (SUCCEEDED(hr))
//{
// /* Examine the format, and possibly use it. */
// if (pmtConfig->formattype == FORMAT_VideoInfo) {
// long width = HEADER(pmtConfig->pbFormat)->biWidth;
// long height = HEADER(pmtConfig->pbFormat)->biHeight;
//.........这里部分代码省略.........
示例14: strcmp
int
recChannel_t::source_format(char* newFormat)
{
__CONTEXT("recChannel_t::source_format");
int hr = 0;
bool formatFound = false;
IAMStreamConfig *pConfig = NULL;
AM_MEDIA_TYPE * format = NULL;
pControl->StopWhenReady();
ql_t<AM_MEDIA_TYPE *> auxFormats = camInfo->getFormatList();
for(int i = 0; i<auxFormats.len() ; i++)
{
AM_MEDIA_TYPE format = *(auxFormats.nth(i));
IAMStreamConfig *pConfig = NULL;
IVideoWindow * pWindow = NULL;
char subtypeName [100];
memset(subtypeName,0,100);
GetGUIDString(subtypeName,&format.subtype);
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) format.pbFormat;
if((pVih==NULL && strcmp(newFormat,sourceFormat)==0 )||
(pVih->bmiHeader.biHeight == capInfo.heigth &&
pVih->bmiHeader.biWidth == capInfo.width &&
strcmp(subtypeName,newFormat)==0) ||
camInfo->getKind() == SHARED
)
{
if (strcmp(sourceFormat,newFormat))
{
memset(sourceFormat,0,100);
strcpy(sourceFormat,newFormat);
}
if (!hr && (camInfo->getKind() == CAM || camInfo->getKind() == SHARED)){
camInfo->output->Disconnect();
hr = camInfo->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
//pVih->AvgTimePerFrame = 666666;//
pVih->AvgTimePerFrame = 333333/(frameRate);
int hr = pConfig->SetFormat(&format);
actualFormat = format;
pConfig->Release();
}
formatFound = true;
break;
}
}
if (!formatFound)
{
IAMStreamConfig *pConfig = NULL;
if (camInfo->getKind() == CAM ||
camInfo->getKind() == SHARED)
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) actualFormat.pbFormat;
camInfo->output->Disconnect();
hr = camInfo->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
//pVih->AvgTimePerFrame = 666666;
if (pConfig)
{
int hr = pConfig->SetFormat(&actualFormat);
pConfig->Release();
}
}
}
NOTIFY("reChannel_t"
"\r\n=========================================\r\n"
"Channel %d : Source Description...\r\n"
"- sourceName: %s\r\n"
"- capture Size: %dx%d\r\n"
"- supported Formats: %s\r\n"
"- Window Info: (%d,%d,%d,%d)\r\n"
"- Title: %s\r\n"
"=========================================\r\n",
getId(),
camInfo->getCamName(),
capInfo.width,
capInfo.heigth,
camInfo->getSupportedFormats(),
windowInfo.top,
windowInfo.left,
windowInfo.width,
windowInfo.heigth,
title);
remap();
if (mapping){
map();
}
return 0;
//.........这里部分代码省略.........
示例15: sizeof
//.........这里部分代码省略.........
HRESULT hr = CoInitialize(0);
IAMStreamConfig *pConfig = NULL;
hr = pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_CAPTURE, 0, pDeviceFilter, IID_IAMStreamConfig, (void**)&pConfig);
int iCount = 0, iSize = 0;
hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
// Check the size to make sure we pass in the correct structure.
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
// Use the video capabilities structure.
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr))
{
/* Examine the format, and possibly use it. */
if ((pmtConfig->majortype == MEDIATYPE_Video) &&
(pmtConfig->subtype == MEDIASUBTYPE_RGB24) &&
(pmtConfig->formattype == FORMAT_VideoInfo) &&
(pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
(pmtConfig->pbFormat != NULL))
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
// pVih contains the detailed format information.
LONG lWidth = pVih->bmiHeader.biWidth;
LONG lHeight = pVih->bmiHeader.biHeight;
if( lWidth == 1280 )
// if (iFormat == 26)
{ //2 = '1280x720YUV' YUV, 22 = '1280x800YUV', 26 = '1280x720RGB'
hr = pConfig->SetFormat(pmtConfig);
}
}
// Delete the media type when you are done.
DeleteMediaType(pmtConfig);
}
}
}
// Query the capture filter for the IAMCameraControl interface.
IAMCameraControl *pCameraControl = 0;
hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl);
if (FAILED(hr))
{
// The device does not support IAMCameraControl
}
else
{
long Min, Max, Step, Default, Flags, Val;
// Get the range and default values
hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr))
{
hr = pCameraControl->Set(CameraControl_Exposure, -11, CameraControl_Flags_Manual ); // Min = -11, Max = 1, Step = 1
hr = pCameraControl->Set(CameraControl_Focus, 12, CameraControl_Flags_Manual );
}
}
// Query the capture filter for the IAMVideoProcAmp interface.