本文整理汇总了C++中IGraphBuilder::QueryInterface方法的典型用法代码示例。如果您正苦于以下问题:C++ IGraphBuilder::QueryInterface方法的具体用法?C++ IGraphBuilder::QueryInterface怎么用?C++ IGraphBuilder::QueryInterface使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IGraphBuilder
的用法示例。
在下文中一共展示了IGraphBuilder::QueryInterface方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
void main()
{
int option;
IGraphBuilder *pGraph = NULL; //Creem l'apuntador al graf de filtres
IMediaControl *pControl = NULL; //creem l'apuntador a un controlador per ayurar i iniciar el graf
IMediaEvent *pEvent = NULL; // apunta a l'objecte necessari per obtenir events del filter graph manager
//IBaseFilter *pGrabberF = NULL;
//ISampleGrabber *pGrabber = NULL;
HRESULT hr = CoInitialize(NULL); // Inicialitzem la llibreria COM
if ( FAILED(hr) ){
printf("ERROR - Could not initialize COM library");
return;
}
// Create the filter graph manager and query for interfaces.
hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&pGraph);
if (FAILED(hr))
{
printf("ERROR - Could not create the Filter Graph Manager.");
return;
}
hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
hr = pGraph->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
// Build the graph. IMPORTANT: Change this string to a file on your system.
cout<<"introduce 1:chicken 2:futbol 3: video futbol audio chicken: \n";
cin>>option;
switch(option)
{
case 1: hr = pGraph->RenderFile(L"C:\\Users\\Victor\\Downloads\\chicken.wmv", NULL);
break;
case 2: hr = pGraph->RenderFile(L"C:\\Users\\Victor\\Downloads\\futbol.mpg", NULL);
break;
case 3: // Create the Sample Grabber filter.
break;
}
if (SUCCEEDED(hr))
{
// Run the graph.
hr = pControl->Run();
if (SUCCEEDED(hr))
{
// Wait for completion.
long evCode;
pEvent->WaitForCompletion(INFINITE, &evCode);
// Note: Do not use INFINITE in a real application, because it
// can block indefinitely.
}
}
pControl->Release();
pEvent->Release();
pGraph->Release();
CoUninitialize();
}
示例2: play_movie
void play_movie( HWND hwnd )
{
IGraphBuilder *pGraph;
IMediaControl *pMediaControl;
IMediaEvent *pEvent;
IBasicVideo *pBasic;
IVideoWindow *pVidWin = NULL;
RECT grc;
long width, height;
CoInitialize(NULL);
// Create the filter graph manager and query for interfaces.
CoCreateInstance(
CLSID_FilterGraph,
NULL,
CLSCTX_INPROC_SERVER,
IID_IGraphBuilder,
(void **)&pGraph);
pGraph->QueryInterface(IID_IVideoWindow, (void **)&pVidWin);
pGraph->QueryInterface(IID_IMediaControl, (void **)&pMediaControl);
pGraph->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
pGraph->QueryInterface(IID_IBasicVideo, (void**)&pBasic );
// Build the graph. IMPORTANT: Change string to a file on your system.
pGraph->RenderFile(L"e:\\alpha\\running.avi", NULL);
pBasic->GetVideoSize( &width, &height );
printf( "video frames are %d x %d\n", width, height );
pVidWin->put_Owner((OAHWND)hwnd);
pVidWin->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS);
GetClientRect( hwnd, &grc );
pVidWin->SetWindowPosition(10, 10, width, height);
printf( "window is %d x %d\n", grc.right, grc.bottom );
// Run the graph.
pMediaControl->Run();
// Wait for completion.
long evCode;
pEvent->WaitForCompletion(INFINITE, &evCode);
pVidWin->put_Visible(OAFALSE);
pVidWin->put_Owner(NULL);
// Clean up.
pBasic->Release();
pVidWin->Release();
pMediaControl->Release();
pEvent->Release();
pGraph->Release();
CoUninitialize();
}
示例3: playNormalVideo
void playNormalVideo()
{
IGraphBuilder *pGraphBuilder;
IMediaControl *pMediaControl;
IMediaEvent *pMediaEvent;
long eventCode;
CoInitialize(NULL);
CoCreateInstance(CLSID_FilterGraph,
NULL,
CLSCTX_INPROC,
IID_IGraphBuilder,
(LPVOID *)&pGraphBuilder);
pGraphBuilder->QueryInterface(IID_IMediaControl,
(LPVOID *)&pMediaControl);
pGraphBuilder->QueryInterface(IID_IMediaEvent,
(LPVOID *)&pMediaEvent);
pMediaControl->RenderFile(L"Rscreen_shortFinger.avi");
pMediaControl->Run();
FILTER_STATE fs;
HRESULT hr = pMediaControl->GetState(100, (OAFilterState*)&fs);
// The first argument is timeout value.
// If you change the "-1" part into "2",
// WaitForCompletion will timeout after 2 seconds.
pMediaEvent->WaitForCompletion(-1, &eventCode);
switch (eventCode) {
case 0:
printf("timeout\n");
break;
case EC_COMPLETE:
printf("complete\n");
break;
case EC_ERRORABORT:
printf("errorabort\n");
break;
case EC_USERABORT:
printf("userabort\n");
break;
}
hr = pMediaControl->GetState(100, (OAFilterState*)&fs);
pMediaControl->Run();
hr = pMediaControl->GetState(100, (OAFilterState*)&fs);
pMediaEvent->WaitForCompletion(-1, &eventCode);
pMediaControl->Release();
pGraphBuilder->Release();
CoUninitialize();
}
示例4: CoInitialize
CHogVideo::CHogVideo()
{
m_bCOMInitialized = false;
m_pGraph = NULL;
m_pMediaControl = NULL;
m_pEvent = NULL;
m_pVideoWindow = NULL;
HRESULT hr = NULL;
// Initialise COM.
hr = CoInitialize(NULL);
if (FAILED(hr))
{
Cleanup();
return;
}
else
m_bCOMInitialized = true;
// Create the filter graph manager and query for interfaces.
hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
IID_IGraphBuilder, (void **)&m_pGraph);
if (FAILED(hr))
{
Cleanup();
return;
}
IGraphBuilder *pGraph = (IGraphBuilder*)m_pGraph;
hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&m_pMediaControl);
if (FAILED(hr))
{
Cleanup();
return;
}
hr = pGraph->QueryInterface(IID_IVideoWindow, (void**)&m_pVideoWindow);
if (FAILED(hr))
{
Cleanup();
return;
}
hr = pGraph->QueryInterface(IID_IMediaEvent, (void **)&m_pEvent);
if (FAILED(hr))
{
Cleanup();
return;
}
}
示例5: MAX
/*
* Class: sage_DShowMediaPlayer
* Method: setGraphVolume0
* Signature: (JF)F
*/
JNIEXPORT jfloat JNICALL Java_sage_DShowMediaPlayer_setGraphVolume0
(JNIEnv *env, jobject jo, jlong dataPtr, jfloat vol)
{
if (!dataPtr) return 0;
CPlayerData* playData = (CPlayerData*) dataPtr;
IGraphBuilder* pGraph = playData->GetGraph();
vol = MAX(0, MIN(1.0f, vol));
if (pGraph)
{
IBasicAudio *pAudio = NULL;
HRESULT hr = pGraph->QueryInterface(IID_IBasicAudio, (void**)&pAudio);
if (SUCCEEDED(hr))
{
long theVolume;
if (vol == 0)
theVolume = -10000;
else
{
theVolume = (long)(10000*log((vol*2999.0) + 1)/log(3000.0)) - 10000;
}
hr = pAudio->put_Volume(theVolume);
SAFE_RELEASE(pAudio);
}
}
return vol;
}
示例6: PlayMovie
void Video::PlayMovie(string path)
{
IGraphBuilder *pGraph = NULL;
IMediaControl *pControl = NULL;
IMediaEvent *pEvent = NULL;
IVideoWindow *pVideo = NULL;
// Initialize the COM library.
CoInitialize(NULL);
// Create the filter graph manager and query for interfaces.
CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&pGraph);
// Build the graph
int len;
int slength = (int)path.length() + 1;
len = MultiByteToWideChar(CP_ACP, 0, path.c_str(), slength, 0, 0);
wchar_t* buf = new wchar_t[len];
MultiByteToWideChar(CP_ACP, 0, path.c_str(), slength, buf, len);
std::wstring r(buf);
delete[] buf;
pGraph->RenderFile(LPCWSTR(r.c_str()), NULL);
// set the owner window
pGraph->QueryInterface(IID_IVideoWindow, (void **) &pVideo);
pVideo->put_Owner((OAHWND)window);
pVideo->put_WindowStyle( WS_CHILD );
pVideo->put_Left(0);
pVideo->put_Top(0);
pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
pGraph->QueryInterface(IID_IMediaEvent, (void **)&pEvent);
pControl->Run();
long evCode;
pEvent->WaitForCompletion(-1, &evCode);
// release controls
pControl->Release();
pEvent->Release();
pGraph->Release();
pVideo->Release();
CoUninitialize();
}
示例7: InitCaptureGraphBuilder
HRESULT VideoTexture::InitCaptureGraphBuilder(
IGraphBuilder **ppGraph, // Receives the pointer.
ICaptureGraphBuilder2 **ppBuild // Receives the pointer.
)
{
if (!ppGraph || !ppBuild)
{
return E_POINTER;
}
IGraphBuilder *pGraph = NULL;
ICaptureGraphBuilder2 *pBuild = NULL;
// Create the Capture Graph Builder.
HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL,
CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**)&pBuild );
if (SUCCEEDED(hr))
{
// Create the Filter Graph Manager.
hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,
IID_IGraphBuilder, (void**)&pGraph);
if (SUCCEEDED(hr))
{
// Initialize the Capture Graph Builder.
pBuild->SetFiltergraph(pGraph);
// Return both interface pointers to the caller.
*ppBuild = pBuild;
*ppGraph = pGraph; // The caller must release both interfaces.
// media control and so on
hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pMediaControl);
if (FAILED(hr)) return hr;
hr = pGraph->QueryInterface (IID_IMediaEvent, (void **)&pMediaEvent);
if (FAILED(hr)) return hr;
return S_OK;
}
else
{
pBuild->Release();
}
}
return hr; // Failed
}
示例8:
/*
* Class: sage_DShowMediaPlayer
* Method: setNotificationWindow0
* Signature: (JJ)V
*/
JNIEXPORT void JNICALL Java_sage_DShowMediaPlayer_setNotificationWindow0
(JNIEnv *env, jobject jo, jlong dataPtr, jlong notifyHwnd)
{
if (!dataPtr) return;
CPlayerData* playerData = (CPlayerData*) dataPtr;
IGraphBuilder* pGraph = playerData->GetGraph();
if (!pGraph) return;
IMediaEventEx *pME = NULL;
pGraph->QueryInterface(IID_IMediaEventEx, (void**)&pME);
HRESULT hr = pME->SetNotifyWindow((OAHWND)notifyHwnd, WM_DVD_EVENT, 0);
HTESTPRINT(hr);
SAFE_RELEASE(pME);
}
示例9: setVideoHWND
/*
* Class: sage_DShowMediaPlayer
* Method: setVideoHWND0
* Signature: (JJ)V
*/
JNIEXPORT void JNICALL Java_sage_DShowMediaPlayer_setVideoHWND0
(JNIEnv *env, jobject jo, jlong dataPtr, jlong vhwnd)
{
CPlayerData* playData = (CPlayerData*) dataPtr;
IGraphBuilder* pGraph = playData->GetGraph();
IVideoWindow* pVW = NULL;
HRESULT hr = pGraph->QueryInterface(IID_IVideoWindow, (void**)&pVW);
if (SUCCEEDED(hr))
{
slog((env, "DShowPlayer setVideoHWND(%d)\r\n", (int) vhwnd));
pVW->put_AutoShow(OAFALSE);
pVW->put_Owner((OAHWND)vhwnd);
pVW->put_MessageDrain((OAHWND)vhwnd);
pVW->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS | WS_CLIPCHILDREN);
pVW->put_Visible(OATRUE);
// We do all of our own aspect ratio control, so don't let DShow do any for us
// by setting the aspect ratio mode on the video rendering filter's pin
IEnumFilters *pEnum = NULL;
hr = pGraph->EnumFilters(&pEnum);
if (SUCCEEDED(hr))
{
IBaseFilter *currFilt = NULL;
while (pEnum->Next(1, &currFilt, NULL) == S_OK)
{
IPin *overlayPin = NULL;
hr = currFilt->FindPin(L"Input0", &overlayPin);
if (SUCCEEDED(hr))
{
// Right pin name, let's see if it's overlay
IMixerPinConfig *pOverlayMix = NULL;
hr = overlayPin->QueryInterface(IID_IMixerPinConfig, (void**)&pOverlayMix);
if (SUCCEEDED(hr))
{
pOverlayMix->SetAspectRatioMode(AM_ARMODE_STRETCHED);
SAFE_RELEASE(pOverlayMix);
}
SAFE_RELEASE(overlayPin);
}
SAFE_RELEASE(currFilt);
}
SAFE_RELEASE(pEnum);
hr = S_OK;
}
SAFE_RELEASE(pVW);
}
HTESTPRINT(hr);
}
示例10: texture
video::video(std::wstring path): texture(1,1,false,GL_LINEAR,GL_REPEAT), rendered(false), isPlaying(false) {
printf("Opening video \"%ls\"\n", path.c_str());
IGraphBuilder * graph;
IBaseFilter * base;
__int64 clipLength;
CoInitialize(0);
CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC, IID_IGraphBuilder, (void**)&graph);
HRESULT hr = S_OK;
graph->QueryInterface(IID_IMediaControl, (void**)&mediaControl);
graph->QueryInterface(IID_IMediaSeeking, (void**)&mediaSeeking);
grabber = new textureGrabber(0, &hr);
grabber->AddRef();
grabber->QueryInterface(IID_IBaseFilter, (void**)&base);
graph->AddFilter(base, L"peisikVideoSystem OpenGL texture renderer");
hr = graph->RenderFile(path.c_str(), 0);
grabber->setTexture(&texture);
graph->Release();
base->Release();
if(SUCCEEDED(hr) && grabber->width)
{
printf("Video succesfully rendered\n");
mediaSeeking->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME);
mediaSeeking->GetDuration(&clipLength);
length = (long double)(clipLength)/(long double)10000000.0;
__int64 position = 0;
mediaSeeking->SetPositions(&position, AM_SEEKING_AbsolutePositioning, &position, AM_SEEKING_NoPositioning);
rendered = true;
}
else
printf("Couldn't find a working video graph\n");
}
示例11: eHandler
Camera::Camera(bool Show,bool Start) : eHandler(this),_realData(false),_UpdateWindow(Show),_LastData(0),_CurData(0) {
DWORD no;
IGraphBuilder *graph = 0;
ctrl = 0;
ICreateDevEnum *devs = 0;
IEnumMoniker *cams = 0;
IMoniker *mon = 0;
IBaseFilter *cam = 0;
IEnumPins *pins = 0;
IPin *pin = 0;
IEnumFilters *fil = 0;
IBaseFilter *rnd = 0;
IMemInputPin *mem = 0;
curCamera = this;
_isOn = Start;
CoCreateInstance( CLSID_FilterGraph, 0, CLSCTX_INPROC,IID_IGraphBuilder, (void **)&graph );
graph->QueryInterface( IID_IMediaControl, (void **)&ctrl );
CoCreateInstance (CLSID_SystemDeviceEnum, 0, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &devs);
devs->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &cams, 0);
cams->Next (1,&mon,0); // get first found capture device (webcam)
mon->BindToObject(0,0,IID_IBaseFilter, (void**)&cam);
graph->AddFilter(cam, L"Capture Source"); // add web cam to graph as source
cam->EnumPins(&pins); // we need output pin to autogenerate rest of the graph
pins->Next(1,&pin, 0); // via graph->Render
graph->Render(pin); // graph builder now builds whole filter chain including MJPG decompression on some webcams
graph->EnumFilters(&fil); // from all newly added filters
fil->Next(1,&rnd,0); // we find last one (renderer)
rnd->EnumPins(&pins); // because data we are intersted in are pumped to renderers input pin
pins->Next(1,&pin, 0); // via Receive member of IMemInputPin interface
pin->QueryInterface(IID_IMemInputPin,(void**)&mem);
DsHook(mem,6,Receive); // so we redirect it to our own proc to grab image data
if (Start) this->Start();
}
示例12: video_add
int video_add(string fname) {
enigma::VideoStruct* videoStruct = new enigma::VideoStruct();
IGraphBuilder *pGraph = NULL;
// Initialize the COM library.
HRESULT hr = CoInitialize(NULL);
if (FAILED(hr))
{
MessageBox(NULL, "Failed to initialize COM library.", "ERROR", MB_ICONERROR | MB_OK);
return -1;
}
// Create the filter graph manager and query for interfaces.
hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
IID_IGraphBuilder, (void **)&pGraph);
if (FAILED(hr))
{
MessageBox(NULL, "Failed to create the Filter Graph Manager.", "ERROR", MB_ICONERROR | MB_OK);
return -1;
}
// Build the graph.
hr = pGraph->RenderFile(std::wstring(fname.begin(), fname.end()).c_str(), NULL);
IVideoWindow *pVidWin = NULL;
pGraph->QueryInterface(IID_IVideoWindow, (void **)&pVidWin);
pVidWin->put_Owner((OAHWND)enigma::hWnd);
pVidWin->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS);
videoStruct->pGraph = pGraph;
enigma::videoStructs.push_back(videoStruct);
return enigma::videoStructs.size() - 1;
}
示例13: main
// A very simple program to capture a webcam & audio to a file using DirectShow
//
int main(int argc, char* argv[])
{
ICaptureGraphBuilder2 *pCaptureGraph = NULL; // Capture graph builder object
IGraphBuilder *pGraph = NULL; // Graph builder object
IMediaControl *pControl = NULL; // Media control object
IFileSinkFilter *pSink = NULL; // File sink object
IBaseFilter *pAudioInputFilter = NULL; // Audio Capture filter
IBaseFilter *pVideoInputFilter = NULL; // Video Capture filter
IBaseFilter *pASFWriter = NULL; // WM ASF File config interface
// Initialize the COM library.
HRESULT hr = CoInitialize(NULL);
if (FAILED(hr))
{
// We’ll send our error messages to the console.
printf("ERROR - Could not initialize COM library");
return hr;
}
// Create the filter graph manager and query for interfaces.
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER,
IID_ICaptureGraphBuilder2, (void **)&pCaptureGraph);
if (FAILED(hr)) // FAILED is a macro that tests the return value
{
printf("ERROR - Could not create the Filter Graph Manager.");
return hr;
}
// Use a method of the capture graph builder
// To create an output path for the stream
hr = pCaptureGraph->SetOutputFileName(&MEDIASUBTYPE_Asf,
L"C:\\MyWebcam.ASF", &pASFWriter, &pSink);
// Now configure the ASF Writer
// Present the property pages for this filter
hr = ShowFilterPropertyPages(pASFWriter);
// Now get the filter graph manager
// That's part of the capture graph builder
hr = pCaptureGraph->GetFiltergraph(&pGraph);
// Using QueryInterface on the graph builder,
// Get the Media Control object.
hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
if (FAILED(hr))
{
printf("ERROR - Could not create the Media Control object.");
pGraph->Release(); // Clean up after ourselves.
CoUninitialize(); // And uninitalize COM
return hr;
}
// Get an AudioCapture filter.
// But there are several to choose from
// So we need to enumerate them, and pick one.
// Then add the audio capture filter to the filter graph.
hr = GetAudioInputFilter(&pAudioInputFilter, L"Logitech");
if (SUCCEEDED(hr)) {
hr = pGraph->AddFilter(pAudioInputFilter, L"Webcam Audio Capture");
}
// Now create the video input filter from the webcam
hr = GetVideoInputFilter(&pVideoInputFilter, L"Logitech");
if (SUCCEEDED(hr)) {
hr = pGraph->AddFilter(pVideoInputFilter, L"Webcam Video Capture");
}
// Add a video renderer
//IBaseFilter *pVideoRenderer = NULL;
//hr = AddFilterByCLSID(pGraph, CLSID_VideoRenderer, L"Video Renderer", &pVideoRenderer);
// Use another method of the capture graph builder
// To provide a render path for video preview
IBaseFilter *pIntermediate = NULL;
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
pVideoInputFilter, NULL, NULL);
// Now add the video capture to the output file
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
pVideoInputFilter, NULL, pASFWriter);
// And do the same for the audio
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio,
pAudioInputFilter, NULL, pASFWriter);
if (SUCCEEDED(hr))
{
// Run the graph.
hr = pControl->Run();
if (SUCCEEDED(hr))
{
// Wait patiently for completion of the recording
wprintf(L"Started recording...press Enter to stop recording.\n");
// Wait for completion.
char ch;
ch = getchar(); // We wait for keyboard input
}
//.........这里部分代码省略.........
示例14: sizeof
//.........这里部分代码省略.........
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr))
{
/* Examine the format, and possibly use it. */
if ((pmtConfig->majortype == MEDIATYPE_Video) &&
(pmtConfig->subtype == MEDIASUBTYPE_RGB24) &&
(pmtConfig->formattype == FORMAT_VideoInfo) &&
(pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
(pmtConfig->pbFormat != NULL))
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
// pVih contains the detailed format information.
LONG lWidth = pVih->bmiHeader.biWidth;
LONG lHeight = pVih->bmiHeader.biHeight;
if( lWidth == 1280 )
// if (iFormat == 26)
{ //2 = '1280x720YUV' YUV, 22 = '1280x800YUV', 26 = '1280x720RGB'
hr = pConfig->SetFormat(pmtConfig);
}
}
// Delete the media type when you are done.
DeleteMediaType(pmtConfig);
}
}
}
// Query the capture filter for the IAMCameraControl interface.
IAMCameraControl *pCameraControl = 0;
hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl);
if (FAILED(hr))
{
// The device does not support IAMCameraControl
}
else
{
long Min, Max, Step, Default, Flags, Val;
// Get the range and default values
hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr))
{
hr = pCameraControl->Set(CameraControl_Exposure, -11, CameraControl_Flags_Manual ); // Min = -11, Max = 1, Step = 1
hr = pCameraControl->Set(CameraControl_Focus, 12, CameraControl_Flags_Manual );
}
}
// Query the capture filter for the IAMVideoProcAmp interface.
IAMVideoProcAmp *pProcAmp = 0;
hr = pDeviceFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
if (FAILED(hr))
{
// The device does not support IAMVideoProcAmp
}
else
{
long Min, Max, Step, Default, Flags, Val;
// Get the range and default values
hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
示例15: main
int main(int argc, char* argv[])
{
IGraphBuilder *pGraph = NULL;
ICaptureGraphBuilder2 *pBuilder = NULL;
IBaseFilter *pSrc = NULL;
IBaseFilter *ppf = NULL;
IFileSinkFilter *pSink = NULL;
IMediaControl *pMC = NULL;
HRESULT hr;
CoInitialize (NULL);
// Create the filter graph.
CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC,
IID_IGraphBuilder, (void **)&pGraph);
// Create the capture graph builder.
CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
IID_ICaptureGraphBuilder2, (void **)&pBuilder);
pBuilder->SetFiltergraph(pGraph);
pSrc=GetAudioDevice ();
// add the first audio filter in the list
pGraph->AddFilter(pSrc, L"Video Capture");
/* pBuilder->SetOutputFileName(
&MEDIASUBTYPE_Avi,
L"C:\\Example.avi",
&ppf,
&pSink);*/
// pBuilder->AllocCapFile (L"C:\\temp.avi", _MAX_PATH);
pBuilder->RenderStream(
&PIN_CATEGORY_CAPTURE, // Pin category
&MEDIATYPE_Audio, // Media type
pSrc, // Capture filter
NULL, // Compression filter (optional)
ppf // Multiplexer or renderer filter
);
REFERENCE_TIME rtStart = 20000000,
rtStop = 50000000;
/* pBuilder->ControlStream(
&PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Audio,
pSrc, // Source filter
&rtStart, // Start time
&rtStop, // Stop time
0, // Start cookie
0 // Stop cookie
);*/
pGraph->QueryInterface (IID_IMediaControl, (void **) &pMC);
pMC->Run ();
MessageBox (NULL, "Stop Recording", NULL, NULL);
pMC->Stop ();
/* CProgress *pProg = new CProgress(TEXT(""), NULL, &hr);
IAMCopyCaptureFileProgress *pIProg = NULL;
hr = pProg->QueryInterface(IID_IAMCopyCaptureFileProgress,
(void **)&pIProg);
//pBuilder->CopyCaptureFile (L"C:\\temp.avi", L"C:\\final.avi", TRUE, pIProg);*/
CoUninitialize ();
return 0;
}