本文整理汇总了C++中IAMStreamConfig::Release方法的典型用法代码示例。如果您正苦于以下问题:C++ IAMStreamConfig::Release方法的具体用法?C++ IAMStreamConfig::Release怎么用?C++ IAMStreamConfig::Release使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IAMStreamConfig
的用法示例。
在下文中一共展示了IAMStreamConfig::Release方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: listGUIDS
bool MIPDirectShowCapture::listGUIDS(std::list<GUID> &guids)
{
guids.clear();
HRESULT hr;
IAMStreamConfig *pConfig = 0;
hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig);
if (HR_FAILED(hr))
{
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG);
return false;
}
int count = 0;
int s = 0;
hr = pConfig->GetNumberOfCapabilities(&count, &s);
if (HR_FAILED(hr))
{
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS);
return false;
}
if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS);
return false;
}
for (int i = 0; i < count; i++)
{
VIDEO_STREAM_CONFIG_CAPS caps;
AM_MEDIA_TYPE *pMediaType;
hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps);
if (HR_SUCCEEDED(hr))
{
if (pMediaType->majortype == MEDIATYPE_Video)
{
GUID subType = pMediaType->subtype;
guids.push_back(subType);
// uint8_t *pSubType = (uint8_t *)&subType;
//
// printf("0x%02x%02x%02x%02x %c%c%c%c\n",(int)pSubType[0],(int)pSubType[1],(int)pSubType[2],(int)pSubType[3],
// (char)pSubType[0],(char)pSubType[1],(char)pSubType[2],(char)pSubType[3]);
}
}
}
return true;
}
示例2: errorCheck
HRESULT
recChannel_t::set_rate(float FR)
{
__CONTEXT("recChannel_t::set_rate");
if (FR<1)
{
return S_OK;
}
float factorRate = FR/30;
int hr = 0;
if (factorRate<0.1) factorRate = 0.1;
frameRate = factorRate;
IAMStreamConfig *pConfig = NULL;
if ((camInfo->getKind() == SHARED ||
camInfo->getKind() == CAM) &&
actualFormat.pbFormat != NULL)
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) actualFormat.pbFormat;
double newFR = 10000000.0/FR;
pVih->AvgTimePerFrame = newFR;
camInfo->setRate(pVih->AvgTimePerFrame);
if (camInfo->getKind() == CAM)
{
IPin * pInput = NULL;
get_camInfo()->output->ConnectedTo(&pInput);
if (mapping)
{
pControl->Stop();
}
if (pInput)
{
get_camInfo()->output->Disconnect();
pInput->Disconnect();
}
hr = get_camInfo()->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig);
if (pConfig)
{
int hr = pConfig->SetFormat(&actualFormat);
errorCheck(hr);
pConfig->Release();
}
if (pInput)
{
hr = pGraph->Connect(get_camInfo()->output,pInput);
errorCheck(hr);
}
errorCheck(hr);
if (mapping)
{
pControl->Run();
}
}
}
return hr;
}
示例3: EnumResolutions
void VideoCapture::EnumResolutions()
{
int iCount, iSize, iChosen=-1;
IBaseFilter *pSource;
CComPtr <ICaptureGraphBuilder2> pCaptB;
VIDEO_STREAM_CONFIG_CAPS caps;
HRESULT hr;
bool response;
IAMStreamConfig *pConfig;
devices_resolutions = new DeviceResolutions[nDevices];
pCaptB.CoCreateInstance(CLSID_CaptureGraphBuilder2);
for (unsigned int iDevice=0; iDevice<nDevices; iDevice++)
{
response = BindFilter(iDevice, &pSource);
hr = pCaptB->FindInterface(
&PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Video,
pSource,
IID_IAMStreamConfig,
(void**)&pConfig);
if (!SUCCEEDED(hr))
{
pSource->Release();
devices_resolutions[iDevice].nResolutions = 0;
continue;
}
pConfig->GetNumberOfCapabilities(&iCount, &iSize);
devices_resolutions[iDevice].SetNResolutions(iCount);
for(int i=0; i < iCount; i++) {
AM_MEDIA_TYPE *pmt;
if( pConfig->GetStreamCaps(i, &pmt, reinterpret_cast<BYTE*>(&caps)) == S_OK ) {
VIDEOINFOHEADER *pVih =
reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
devices_resolutions[iDevice].x[i] = caps.InputSize.cx;
devices_resolutions[iDevice].y[i] = caps.InputSize.cy;
devices_resolutions[iDevice].color_space[i] = pmt->subtype;
devices_resolutions[iDevice].compression[i] = pVih->bmiHeader.biCompression;
DeleteMediaType(pmt);
}
}
pSource->Release();
pConfig->Release();
pSource = 0;
}
}
示例4: SetCaptureBufferSize
void CCaptureDevice::SetCaptureBufferSize(void)
{
IPin * pCapturePin = GetPin();
if (pCapturePin)
{
DWORD dwBytesPerSec = 0;
AM_MEDIA_TYPE * pmt = {0};
IAMStreamConfig * pCfg = NULL;
HRESULT hr = pCapturePin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
if ( hr==S_OK )
{
hr = pCfg->GetFormat(&pmt);
if ( hr==S_OK )
{
WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
dwBytesPerSec = pWF->nAvgBytesPerSec;
pWF->nChannels = 1;
pWF->wBitsPerSample = 8;
pWF->nSamplesPerSec = 11025;
pWF->nAvgBytesPerSec = pWF->nSamplesPerSec * pWF->nChannels * pWF->wBitsPerSample / 8;
pWF->nBlockAlign = 1;
/*
info.cbSize = sizeof(WAVEFORMATEX);
info.wFormatTag = 1;
info.nChannels = 2;
info.nSamplesPerSec = 44100;
//info.nSamplesPerSec = 22050;
11025
info.wBitsPerSample = 16;
info.nAvgBytesPerSec = info.nSamplesPerSec * info.nChannels * info.wBitsPerSample / 8;
info.nBlockAlign = 4;
*/
pCfg->SetFormat( pmt );
DeleteMediaType(pmt);
}
pCfg->Release();
}
/* if (dwBytesPerSec)
{
IAMBufferNegotiation * pNeg = NULL;
hr = pCapturePin->QueryInterface(IID_IAMBufferNegotiation,
(void **)&pNeg);
if (SUCCEEDED(hr))
{
ALLOCATOR_PROPERTIES AllocProp;
AllocProp.cbAlign = -1; // -1 means no preference.
AllocProp.cbBuffer = dwBytesPerSec * dwLatencyInMilliseconds / 1000;
AllocProp.cbPrefix = -1;
AllocProp.cBuffers = -1;
hr = pNeg->SuggestAllocatorProperties(&AllocProp);
pNeg->Release();
}
}*/
}
}
示例5: GetPin
IAMStreamConfig * CCaptureDevice::GetStreamConfig(void)
{
IAMStreamConfig * pConfig = NULL;
if (m_pBaseFilter)
{
// Get the capture output pin first
IPin * pCapture = GetPin();
if (pCapture)
{
pCapture->QueryInterface(IID_IAMStreamConfig, (void **)&pConfig);
}
if (pConfig)
{
pConfig->Release();
}
}
return pConfig;
}
示例6: SetAudioFormat
/// 设置音频信息
BOOL CAudioCapture::SetAudioFormat(ENUM_FREQUENCY_TYPE enFrequency,
ENUM_CHANNEL_TYPE enChannel, ENUM_SAMPLE_TYPE enSample)
{
if(NULL != m_pCaptureFilter)
{
BOOL bResult = FALSE;
do
{
IPin* pOutPin = GetOutputPin(m_pCaptureFilter, (uint16_t)0);
if(NULL != pOutPin)
{
IAMBufferNegotiation *pNeg = NULL;
IAMStreamConfig *pCfg = NULL;
// Get buffer negotiation interface
HRESULT hr = pOutPin->QueryInterface(IID_IAMBufferNegotiation, (void **)&pNeg);
if (FAILED(hr))
{
pOutPin->Release();
break;
}
// Find number of bytes in one second
long lBytesPerSecond = (long) (enSample * enFrequency * enChannel);
// 针对FAAC编码器 做出的调整
long lBufferSize = 1024 * enSample * enChannel;
// Set the buffer size based on selected settings
ALLOCATOR_PROPERTIES prop={0};
prop.cbBuffer = lBufferSize;
prop.cBuffers = 6;
prop.cbAlign = enSample * enChannel;
hr = pNeg->SuggestAllocatorProperties(&prop);
pNeg->Release();
// Now set the actual format of the audio data
hr = pOutPin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
if (FAILED(hr))
{
pOutPin->Release();
break;
}
// Read current media type/format
AM_MEDIA_TYPE *pmt={0};
hr = pCfg->GetFormat(&pmt);
if (SUCCEEDED(hr))
{
// Fill in values for the new format
WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
pWF->nChannels = (WORD) enChannel;
pWF->nSamplesPerSec = enFrequency;
pWF->nAvgBytesPerSec = lBytesPerSecond;
pWF->wBitsPerSample = (WORD) (enSample * 8);
pWF->nBlockAlign = (WORD) (enSample * enChannel);
// Set the new formattype for the output pin
hr = pCfg->SetFormat(pmt);
UtilDeleteMediaType(pmt);
}
// Release interfaces
pCfg->Release();
pOutPin->Release();
bResult = TRUE;
}
}while(FALSE);
return bResult;
}
else
{
m_enFrequency = enFrequency;
m_enChannel = enChannel;
m_enSample = enSample;
return TRUE;
}
}
示例7: setCaptureOutputFormat
void DirectShowGrabber::setCaptureOutputFormat() {
IAMStreamConfig *pConfig;
int iCount;
int iSize;
VIDEOINFOHEADER *pVih;
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
int formatSet;
HRESULT hr;
// Reference http://msdn.microsoft.com/library/default.asp?url=/library/en-us/directshow/htm/configurethevideooutputformat.asp
debug_msg("DirectShowGrabber::setCaptureOutputFormat(): enter...\n");
formatSet = 0;
pConfig = NULL;
hr = pBuild_->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
pCaptureFilter_, IID_IAMStreamConfig, (void**)&pConfig);
if (FAILED(hr)) {
Grabber::status_=-1;
return;
}
debug_msg("DirectShowGrabber::setCaptureOutputFormat(): IAMStreamConfig interface acquired\n");
iCount = iSize = 0;
hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
// Check the size to make sure we pass in the correct structure.
// The alternative output of iSize is AUDIO_STREAM_CONFIG_CAPS, btw.
if ( iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS) ) {
for (int iFormat = 0; iFormat < iCount; iFormat++) {
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE *)&scc);
//showErrorMessage(hr);
if( SUCCEEDED(hr) ) {
if ((pmtConfig->majortype == MEDIATYPE_Video) &&
(pmtConfig->subtype == MEDIASUBTYPE_RGB24) &&
(pmtConfig->formattype == FORMAT_VideoInfo) &&
(pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
(pmtConfig->pbFormat != NULL)) {
pVih = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
pVih->bmiHeader.biWidth = 320;
pVih->bmiHeader.biHeight = 240;
pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);
debug_msg("Windows GDI BITMAPINFOHEADER follows:\n");
debug_msg("biWidth= %d\n", pVih->bmiHeader.biWidth);
debug_msg("biHeight= %d\n", pVih->bmiHeader.biHeight);
debug_msg("biSize= %d\n", pVih->bmiHeader.biSize);
debug_msg("biPlanes= %d\n", pVih->bmiHeader.biPlanes);
debug_msg("biBitCount= %d\n", pVih->bmiHeader.biBitCount);
debug_msg("biCompression= %d\n", pVih->bmiHeader.biCompression);
debug_msg("biSizeImage= %d\n", pVih->bmiHeader.biSizeImage);
debug_msg("biXPelsPerMeter=%d\n", pVih->bmiHeader.biXPelsPerMeter);
debug_msg("biYPelsPerMeter=%d\n", pVih->bmiHeader.biYPelsPerMeter);
debug_msg("biClrUsed= %d\n", pVih->bmiHeader.biClrUsed);
debug_msg("biClrImportant= %d\n", pVih->bmiHeader.biClrImportant);
hr = pConfig->SetFormat(pmtConfig);
//showErrorMessage(hr);
// XXX: leak. need to deal with this - msp
//DeleteMediaType(pmtConfig);
formatSet = 1;
break;
}
}
}
}
pConfig->Release();
if( formatSet )
debug_msg("DirectShowGrabber::setCaptureOutputFormat: format set\n");
else
debug_msg("DirectShowGrabber::setCaptureOutputFormat: format not set\n");
}
示例8: CaptureVideo
HRESULT CaptureVideo()
{
HRESULT hr;
IBaseFilter *pSrcFilter=NULL;
// Get DirectShow interfaces
hr = GetInterfaces();
if (FAILED(hr))
{
Msg(TEXT("Failed to get video interfaces! hr=0x%x"), hr);
return hr;
}
// Attach the filter graph to the capture graph
hr = g_pCapture->SetFiltergraph(g_pGraph);
if (FAILED(hr))
{
Msg(TEXT("Failed to set capture filter graph! hr=0x%x"), hr);
return hr;
}
// Use the system device enumerator and class enumerator to find
// a video capture/preview device, such as a desktop USB video camera.
hr = FindCaptureDevice(&pSrcFilter);
if (FAILED(hr))
{
// Don't display a message because FindCaptureDevice will handle it
return hr;
}
// Add Capture filter to our graph.
hr = g_pGraph->AddFilter(pSrcFilter, L"Video Capture");
if (FAILED(hr))
{
Msg(TEXT("Couldn't add the capture filter to the graph! hr=0x%x\r\n\r\n")
TEXT("If you have a working video capture device, please make sure\r\n")
TEXT("that it is connected and is not being used by another application.\r\n\r\n")
TEXT("The sample will now close."), hr);
pSrcFilter->Release();
return hr;
}
// Copied code
//========================================
IAMStreamConfig *pSC;
hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
&MEDIATYPE_Interleaved,
pSrcFilter, IID_IAMStreamConfig, (void **)&pSC);
if(FAILED(hr))
hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW,
&MEDIATYPE_Video, pSrcFilter,
IID_IAMStreamConfig, (void **)&pSC);
if (!pSC) {
return hr;
}
int iCount = 0, iSize = 0;
hr = pSC->GetNumberOfCapabilities(&iCount, &iSize);
// Check the size to make sure we pass in the correct structure.
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
// Use the video capabilities structure.
int i = 0;
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = pSC->GetFormat(&pmtConfig);
VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
double fps = 30;
pvi->AvgTimePerFrame = (LONGLONG)(10000000/fps);
pvi->bmiHeader.biSizeImage = DIBSIZE(pvi->bmiHeader);
pvi->bmiHeader.biWidth = 1920;
pvi->bmiHeader.biHeight = 1080;
hr = pSC->SetFormat(pmtConfig);
//hr = pSC->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
//if (SUCCEEDED(hr))
//{
// /* Examine the format, and possibly use it. */
// if (pmtConfig->formattype == FORMAT_VideoInfo) {
// long width = HEADER(pmtConfig->pbFormat)->biWidth;
// long height = HEADER(pmtConfig->pbFormat)->biHeight;
//.........这里部分代码省略.........
示例9: main
int main(int argc, char* argv[])
{
ICaptureGraphBuilder2 *pCaptureGraphBuilder = NULL;
IGraphBuilder *pGraphBuilder = NULL;
IBaseFilter *pSource = NULL;
IBaseFilter *pMux = NULL;
IBaseFilter *pVideoCompressor = NULL;
IBaseFilter *pAudioCompressor = NULL;
IAMStreamConfig *pAMStreamConfig = NULL;
IAMVideoCompression *pAMVideoCompression = NULL;
IMediaControl *pControl = NULL;
IMediaSeeking *pSeek = NULL;
IMediaEvent *pEvent = NULL;
HRESULT hr;
DWORD pdwRegister=0;
CoInitialize(NULL);
// Create the capture graph builder.
CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);
// Make the rendering section of the graph.
pCaptureGraphBuilder->SetOutputFileName(
&MEDIASUBTYPE_Avi, // File type.
L"C:\\STDIUE1.avi", // File name.
&pMux, // pointer to the multiplexer.
NULL); // pointer to the file writer.
// Load the source file.
pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder);
pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource);
// Add the compressor filter.
CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **)&pVideoCompressor);
pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor");
// Render the video stream, through the compressor.
pCaptureGraphBuilder->RenderStream(
NULL, // Output pin category
NULL, // Media type
pSource, // Source filter
pVideoCompressor, // Compressor filter
pMux); // Sink filter (the AVI Mux)
/* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **)&pAudioCompressor);
pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/
// Render the audio stream.
pCaptureGraphBuilder->RenderStream(
NULL,
NULL,
pSource,
pAudioCompressor,
pMux);
// Compress at 100k/second data rate.
AM_MEDIA_TYPE *pmt;
pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig);
pAMStreamConfig->GetFormat(&pmt);
if (pmt->formattype == FORMAT_VideoInfo)
{
((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000;
pAMStreamConfig->SetFormat(pmt);
}
// Request key frames every four frames.
pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression);
pAMVideoCompression->put_KeyFrameRate(4);
pAMVideoCompression->Release();
pAMStreamConfig->Release();
// Run the graph.
pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl);
pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
pControl->Run();
printf("Recompressing... \n");
long evCode;
if (SUCCEEDED(hr))
{
REFERENCE_TIME rtTotal, rtNow = 0;
pSeek->GetDuration(&rtTotal);
while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT)
{
//.........这里部分代码省略.........
示例10: if
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) {
std::vector<CameraConfig> cfg_list;
int count = getDeviceCount();
if (count==0) return cfg_list;
comInit();
HRESULT hr;
ICaptureGraphBuilder2 *lpCaptureGraphBuilder;
IGraphBuilder *lpGraphBuilder;
IBaseFilter *lpInputFilter;
IAMStreamConfig *lpStreamConfig;
char nDeviceName[255];
WCHAR wDeviceName[255];
for (int cam_id=0;cam_id<count;cam_id++) {
if ((dev_id>=0) && (dev_id!=cam_id)) continue;
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder);
if (FAILED(hr)) // FAILED is a macro that tests the return value
{
printf("ERROR - Could not create the Filter Graph Manager\n");
comUnInit();
return cfg_list;
}
// Create the Filter Graph Manager.
hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder);
if (FAILED(hr))
{
printf("ERROR - Could not add the graph builder!\n");
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder);
if (FAILED(hr))
{
printf("ERROR - Could not set filtergraph\n");
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
memset(wDeviceName, 0, sizeof(WCHAR) * 255);
memset(nDeviceName, 0, sizeof(char) * 255);
hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName);
if (SUCCEEDED(hr)){
hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName);
}else{
printf("ERROR - Could not find specified video device\n");
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig);
if(FAILED(hr)){
printf("ERROR: Couldn't config the stream!\n");
lpInputFilter->Release();
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
CameraConfig cam_cfg;
CameraTool::initCameraConfig(&cam_cfg);
cam_cfg.driver = DRIVER_DEFAULT;
cam_cfg.device = cam_id;
sprintf(cam_cfg.name, "%s", nDeviceName);
int iCount = 0;
int iSize = 0;
hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
std::vector<CameraConfig> fmt_list;
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
GUID lastFormat = MEDIASUBTYPE_None;
for (int iFormat = 0; iFormat < iCount; iFormat+=2)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr)){
if ( pmtConfig->subtype != lastFormat) {
if (fmt_list.size()>0) {
std::sort(fmt_list.begin(), fmt_list.end());
cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
fmt_list.clear();
//.........这里部分代码省略.........
示例11: setFormat
bool MIPDirectShowCapture::setFormat(int w, int h, real_t rate)
{
HRESULT hr;
IAMStreamConfig *pConfig = 0;
hr = m_pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, 0, m_pCaptDevice, IID_IAMStreamConfig, (void**)&pConfig);
if (HR_FAILED(hr))
{
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECONFIG);
return false;
}
int count = 0;
int s = 0;
hr = pConfig->GetNumberOfCapabilities(&count, &s);
if (HR_FAILED(hr))
{
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTGETDEVICECAPS);
return false;
}
if (s != sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_INVALIDCAPS);
return false;
}
for (int i = 0; i < count; i++)
{
VIDEO_STREAM_CONFIG_CAPS caps;
AM_MEDIA_TYPE *pMediaType;
hr = pConfig->GetStreamCaps(i, &pMediaType, (BYTE*)&caps);
if (HR_SUCCEEDED(hr))
{
if ((pMediaType->majortype == MEDIATYPE_Video) &&
(pMediaType->subtype == m_selectedGuid) &&
(pMediaType->formattype == FORMAT_VideoInfo) &&
(pMediaType->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
(pMediaType->pbFormat != 0))
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pMediaType->pbFormat;
pVih->bmiHeader.biWidth = w;
pVih->bmiHeader.biHeight = h;
pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);
pVih->AvgTimePerFrame = (REFERENCE_TIME)(10000000.0/rate);
hr = pConfig->SetFormat(pMediaType);
if (HR_SUCCEEDED(hr))
{
CoTaskMemFree(pMediaType->pbFormat);
pConfig->Release();
return true;
}
}
if (pMediaType->pbFormat != 0)
CoTaskMemFree(pMediaType->pbFormat);
}
}
pConfig->Release();
setErrorString(MIPDIRECTSHOWCAPTURE_ERRSTR_CANTSETCAPS);
return false;
}
示例12: gst_caps_ref
static GstCaps *
gst_dshowaudiosrc_get_caps (GstBaseSrc * basesrc)
{
HRESULT hres = S_OK;
IBindCtx *lpbc = NULL;
IMoniker *audiom = NULL;
DWORD dwEaten;
GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (basesrc);
gunichar2 *unidevice = NULL;
if (src->device) {
g_free (src->device);
src->device = NULL;
}
src->device =
gst_dshow_getdevice_from_devicename (&CLSID_AudioInputDeviceCategory,
&src->device_name);
if (!src->device) {
GST_ERROR ("No audio device found.");
return NULL;
}
unidevice =
g_utf8_to_utf16 (src->device, strlen (src->device), NULL, NULL, NULL);
if (!src->audio_cap_filter) {
hres = CreateBindCtx (0, &lpbc);
if (SUCCEEDED (hres)) {
hres =
MkParseDisplayName (lpbc, (LPCOLESTR) unidevice, &dwEaten, &audiom);
if (SUCCEEDED (hres)) {
hres = audiom->BindToObject (lpbc, NULL, IID_IBaseFilter,
(LPVOID *) & src->audio_cap_filter);
audiom->Release ();
}
lpbc->Release ();
}
}
if (src->audio_cap_filter && !src->caps) {
/* get the capture pins supported types */
IPin *capture_pin = NULL;
IEnumPins *enumpins = NULL;
HRESULT hres;
hres = src->audio_cap_filter->EnumPins (&enumpins);
if (SUCCEEDED (hres)) {
while (enumpins->Next (1, &capture_pin, NULL) == S_OK) {
IKsPropertySet *pKs = NULL;
hres =
capture_pin->QueryInterface (IID_IKsPropertySet, (LPVOID *) & pKs);
if (SUCCEEDED (hres) && pKs) {
DWORD cbReturned;
GUID pin_category;
RPC_STATUS rpcstatus;
hres =
pKs->Get (AMPROPSETID_Pin,
AMPROPERTY_PIN_CATEGORY, NULL, 0, &pin_category, sizeof (GUID),
&cbReturned);
/* we only want capture pins */
if (UuidCompare (&pin_category, (UUID *) & PIN_CATEGORY_CAPTURE,
&rpcstatus) == 0) {
IAMStreamConfig *streamcaps = NULL;
if (SUCCEEDED (capture_pin->QueryInterface (IID_IAMStreamConfig,
(LPVOID *) & streamcaps))) {
src->caps =
gst_dshowaudiosrc_getcaps_from_streamcaps (src, capture_pin,
streamcaps);
streamcaps->Release ();
}
}
pKs->Release ();
}
capture_pin->Release ();
}
enumpins->Release ();
}
}
if (unidevice) {
g_free (unidevice);
}
if (src->caps) {
return gst_caps_ref (src->caps);
}
return NULL;
}
示例13: getDevFilter
QVector<VideoMode> DirectShow::getDeviceModes(QString devName)
{
QVector<VideoMode> modes;
IBaseFilter* devFilter = getDevFilter(devName);
if (!devFilter)
return modes;
// The outter loop tries to find a valid output pin
GUID category;
DWORD r2;
IEnumPins *pins = nullptr;
IPin *pin;
if (devFilter->EnumPins(&pins) != S_OK)
return modes;
while (pins->Next(1, &pin, nullptr) == S_OK)
{
IKsPropertySet *p = nullptr;
PIN_INFO info;
pin->QueryPinInfo(&info);
info.pFilter->Release();
if (info.dir != PINDIR_OUTPUT)
goto next;
if (pin->QueryInterface(IID_IKsPropertySet, (void**)&p) != S_OK)
goto next;
if (p->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY,
nullptr, 0, &category, sizeof(GUID), &r2) != S_OK)
goto next;
if (!IsEqualGUID(category, PIN_CATEGORY_CAPTURE))
goto next;
// Now we can list the video modes for the current pin
// Prepare for another wall of spaghetti DIRECT SHOW QUALITY code
{
IAMStreamConfig *config = nullptr;
VIDEO_STREAM_CONFIG_CAPS *vcaps = nullptr;
int size, n;
if (pin->QueryInterface(IID_IAMStreamConfig, (void**)&config) != S_OK)
goto next;
if (config->GetNumberOfCapabilities(&n, &size) != S_OK)
goto pinend;
assert(size == sizeof(VIDEO_STREAM_CONFIG_CAPS));
vcaps = new VIDEO_STREAM_CONFIG_CAPS;
for (int i=0; i<n; ++i)
{
AM_MEDIA_TYPE* type = nullptr;
if (config->GetStreamCaps(i, &type, (BYTE*)vcaps) != S_OK)
goto nextformat;
if (!IsEqualGUID(type->formattype, FORMAT_VideoInfo)
&& !IsEqualGUID(type->formattype, FORMAT_VideoInfo2))
goto nextformat;
VideoMode mode;
mode.width = vcaps->MaxOutputSize.cx;
mode.height = vcaps->MaxOutputSize.cy;
mode.FPS = 1e7 / vcaps->MinFrameInterval;
if (!modes.contains(mode))
modes.append(std::move(mode));
nextformat:
if (type->pbFormat)
CoTaskMemFree(type->pbFormat);
CoTaskMemFree(type);
}
pinend:
config->Release();
delete vcaps;
}
next:
if (p)
p->Release();
pin->Release();
}
return modes;
}
示例14: IAMStreamConfig
static GstCaps *
gst_dshowvideosrc_getcaps_from_streamcaps (GstDshowVideoSrc * src, IPin * pin)
{
GstCaps *caps = NULL;
HRESULT hres = S_OK;
int icount = 0;
int isize = 0;
VIDEO_STREAM_CONFIG_CAPS vscc;
int i = 0;
IAMStreamConfig *streamcaps = NULL;
hres = pin->QueryInterface (IID_IAMStreamConfig, (LPVOID *) & streamcaps);
if (FAILED (hres)) {
GST_ERROR ("Failed to retrieve IAMStreamConfig (error=0x%x)", hres);
return NULL;
}
streamcaps->GetNumberOfCapabilities (&icount, &isize);
if (isize != sizeof (vscc)) {
streamcaps->Release ();
return NULL;
}
caps = gst_caps_new_empty ();
for (i = 0; i < icount; i++) {
GstCapturePinMediaType *pin_mediatype =
gst_dshow_new_pin_mediatype_from_streamcaps (pin, i, streamcaps);
if (pin_mediatype) {
GstCaps *mediacaps = NULL;
GstVideoFormat video_format =
gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);
if (video_format != GST_VIDEO_FORMAT_UNKNOWN) {
mediacaps = gst_dshow_new_video_caps (video_format, NULL,
pin_mediatype);
} else if (gst_dshow_check_mediatype (pin_mediatype->mediatype,
MEDIASUBTYPE_dvsd, FORMAT_VideoInfo)) {
mediacaps =
gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN,
"video/x-dv, systemstream=FALSE", pin_mediatype);
} else if (gst_dshow_check_mediatype (pin_mediatype->mediatype,
MEDIASUBTYPE_dvsd, FORMAT_DvInfo)) {
mediacaps =
gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN,
"video/x-dv, systemstream=TRUE", pin_mediatype);
pin_mediatype->granularityWidth = 0;
pin_mediatype->granularityHeight = 0;
} else if(gst_dshow_check_mediatype(pin_mediatype->mediatype,
MEDIASUBTYPE_MJPG, FORMAT_VideoInfo)) {
mediacaps = gst_dshow_new_video_caps(GST_VIDEO_FORMAT_UNKNOWN,
"image/jpeg", pin_mediatype);
}
if (mediacaps) {
src->pins_mediatypes =
g_list_append (src->pins_mediatypes, pin_mediatype);
gst_caps_append (caps, mediacaps);
} else {
/* failed to convert dshow caps */
gst_dshow_free_pin_mediatype (pin_mediatype);
}
}
}
streamcaps->Release ();
if (caps && gst_caps_is_empty (caps)) {
gst_caps_unref (caps);
caps = NULL;
}
return caps;
}
示例15: init
//.........这里部分代码省略.........
// To take each frame for texture generation
hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
IID_ISampleGrabber, (void **)&pGrabber);
if (FAILED(hr)) return hr;
hr = pGrabber->QueryInterface(IID_IBaseFilter, (void **)&pGrabberBase);
// We have to set the 24-bit RGB desire here
// So that the proper conversion filters
// Are added automatically.
AM_MEDIA_TYPE desiredType;
memset(&desiredType, 0, sizeof(desiredType));
desiredType.majortype = MEDIATYPE_Video;
desiredType.subtype = MEDIASUBTYPE_RGB24;
desiredType.formattype = FORMAT_VideoInfo;
pGrabber->SetMediaType(&desiredType);
pGrabber->SetBufferSamples(TRUE);
// add to Graph
pGraph->AddFilter(pGrabberBase, L"Grabber");
/* Null render filter */
hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pNullRender);
if(FAILED(hr)) return hr;
pGraph->AddFilter(pNullRender, L"Render");
// Connect the graph
hr = ConnectFilters(pGraph, pCap, pGrabberBase);
if(FAILED(hr)) return hr;
hr = ConnectFilters(pGraph, pGrabberBase, pNullRender);
// Set output format of capture:
IAMStreamConfig *pConfig = NULL;
hr = pBuild->FindInterface(
&PIN_CATEGORY_CAPTURE, // Capture pin.
0, // Any media type.
pCap, // Pointer to the capture filter.
IID_IAMStreamConfig, (void**)&pConfig);
if (FAILED(hr)) return hr;
AM_MEDIA_TYPE *pmtConfig;
hr = pConfig->GetFormat(&pmtConfig);
if (FAILED(hr)) return hr;
// Try and find a good video format
int iCount = 0, iSize = 0;
hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
// Check the size to make sure we pass in the correct structure.
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
// Use the video capabilities structure.
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr))
{
VIDEOINFOHEADER *hdr = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
if (hdr->bmiHeader.biWidth == CAM_WIDTH &&
hdr->bmiHeader.biHeight == CAM_HEIGHT &&
hdr->bmiHeader.biBitCount == 24)
{
pConfig->SetFormat(pmtConfig);
}
}
}
}
pConfig->Release();
// Set camera stuff
IAMCameraControl *pCamControl = NULL;
hr = pCap->QueryInterface(IID_IAMCameraControl, (void **)&pCamControl);
if (FAILED(hr)) return hr;
// Get the range and default value.
long Min, Max, Step, Default, Flags;
// For getting: long Val;
hr = pCamControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Focus, 0, CameraControl_Flags_Manual);
#if 0
hr = pCamControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr)) pCamControl->Set(CameraControl_Exposure, -4, CameraControl_Flags_Manual);
#endif
pCamControl->Release();
IAMVideoProcAmp *pProcAmp = 0;
hr = pCap->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
if (FAILED(hr)) return hr;
#if 0
hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Brightness, 30, VideoProcAmp_Flags_Manual);
hr = pProcAmp->GetRange(VideoProcAmp_Gain, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Gain, 30, VideoProcAmp_Flags_Manual);
hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_WhiteBalance, 4500, VideoProcAmp_Flags_Manual);
hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual);
hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr)) pProcAmp->Set(VideoProcAmp_Contrast, 6, VideoProcAmp_Flags_Manual);
#endif
pProcAmp->Release();
hr = pMediaControl->Run();
return hr;
}