本文整理汇总了C++中IBaseFilter类的典型用法代码示例。如果您正苦于以下问题:C++ IBaseFilter类的具体用法?C++ IBaseFilter怎么用?C++ IBaseFilter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了IBaseFilter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: PTRACE
void CVisualPage::OnCamSetupButton()
{
CComboBox * box = (CComboBox*)(GetDlgItem(IDC_RECORDING_COMBO));
int i = box->GetCurSel();
int n = box->GetLBTextLen(i);
CString s;
box->GetLBText(i, s.GetBuffer(n));
PString setupDeviceName = s;
s.ReleaseBuffer();
if (setupDeviceName.IsEmpty()) return;
if (setupDeviceName.Find("fake") == 0) return;
if (setupDeviceName.Find("monitor") == 0) return;
if (setupDeviceName.Find("zmonitor") == 0) return;
PTRACE(4,"PVidDirectShow\tCurrent device: " << setupDeviceName);
HRESULT hr;
IBaseFilter * pFilter = NULL;
IMoniker *pMoniker =NULL;
ICreateDevEnum *pDevEnum =NULL;
IEnumMoniker *pClassEnum = NULL;
ULONG cFetched;
::CoInitialize(NULL);
// Create the system device enumerator
hr = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &pDevEnum);
if (FAILED(hr)) { ::CoUninitialize(); return; }
// Create an enumerator for the video capture devices
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
if (FAILED(hr)) { ::CoUninitialize(); return; }
if (pClassEnum == NULL) { ::CoUninitialize(); return; }
PTRACE(4,"PVidDirectShow\tEntering device enumeration loop...");
while (1)
{ // Get the next device
hr = pClassEnum->Next(1, &pMoniker, &cFetched);
if (hr != S_OK) { PTRACE(4, "PVidDirectShow\tGetInputDeviceNames() No more video capture device"); break; }
// Get the property bag
IPropertyBag *pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag));
if (FAILED(hr))
{ PTRACE(4,"PVidDerectShow\tBindToStorage failed, continue");
pMoniker->Release();
continue;
}
// Find the description or friendly name.
VARIANT DeviceName;
DeviceName.vt = VT_BSTR;
hr = pPropBag->Read(L"Description", &DeviceName, NULL);
if (FAILED(hr)) hr = pPropBag->Read(L"FriendlyName", &DeviceName, NULL);
if (SUCCEEDED(hr))
{ char *pDeviceName = BSTR_to_ANSI(DeviceName.bstrVal);
if (pDeviceName)
{ PTRACE(4, "PVidDirectShow\tGetInputDeviceNames() Found this capture device '"<< pDeviceName <<"'");
if(PString(pDeviceName) == setupDeviceName)
{
PTRACE(4, "PVidDirectShow\tCamera Setup: device found");
pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**) &pFilter);
ISpecifyPropertyPages *p_spec; CAUUID cauuid;
HRESULT hr = pFilter->QueryInterface( IID_ISpecifyPropertyPages, (void **)&p_spec );
if( !FAILED(hr) )
if( SUCCEEDED(p_spec->GetPages( &cauuid )) )
{ if( cauuid.cElems > 0 )
{ HWND hwnd_desktop = ::GetDesktopWindow();
OleCreatePropertyFrame( hwnd_desktop, 30, 30, NULL, 1, (LPUNKNOWN *)(&pFilter), cauuid.cElems, cauuid.pElems, 0, 0, NULL );
CoTaskMemFree( cauuid.pElems );
}
p_spec->Release();
}
}
free(pDeviceName);
}
}
pPropBag->Release();
pMoniker->Release();
}
::CoUninitialize();
}
示例2: Enumerate
HRESULT Enumerate()
{
HRESULT hr;
IBaseFilter *pSrc = NULL;
CComPtr <IMoniker> pMoniker =NULL;
ULONG cFetched;
char str_tmp[MAX_PATH];
printf("Enumerating WDM device drivers...\n\n");
// Create the system device enumerator
CComPtr <ICreateDevEnum> pDevEnum =NULL;
hr = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
IID_ICreateDevEnum, (void ** ) &pDevEnum);
if (FAILED(hr))
{
printf("Couldn't create system enumerator!\n");
return(hr);
}
// Create an enumerator for the video capture devices
CComPtr <IEnumMoniker> pClassEnum = NULL;
hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &pClassEnum, 0);
if (FAILED(hr))
{
printf("Couldn't create system enumerator!\n");
return(hr);
}
// If there are no enumerators for the requested type, then
// CreateClassEnumerator will succeed, but pClassEnum will be NULL.
if (pClassEnum == NULL)
{
printf("No video capture hardware detected!\n");
return(E_FAIL);
}
while(S_OK == (pClassEnum->Next (1, &pMoniker, &cFetched)))
{
CComPtr<IPropertyBag> pProp;
pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pProp);
VARIANT varName;
VariantInit(&varName);
hr = pProp->Read(L"FriendlyName", &varName, 0);
if(SUCCEEDED(hr))
{
strcpy(str_tmp,_bstr_t(varName.bstrVal));
printf("--Device name: %s\n",str_tmp);
// Bind Moniker to a filter object
hr = pMoniker->BindToObject(0,0,IID_IBaseFilter, (void**)&pSrc);
if(FAILED(hr))
{
printf(" Error: could not bind to filter object.\n");
goto next;
}
// -----------------------------------------------------------------------
IAMExtDevice *pExtDev = NULL;
hr = pSrc->QueryInterface(IID_IAMExtDevice, (void**)&pExtDev);
if(SUCCEEDED(hr))
{
printf(" IAMExtDevice properties:\n");
long l;
hr = pExtDev->get_DevicePort(&l);
if(SUCCEEDED(hr))
{
printf(" DevicePort: ",l);
switch(l)
{
case(DEV_PORT_1394): printf("IEEE 1394 Bus");
break;
default : printf("(%i) non IEEE 1394\n",l);
pExtDev->Release();
goto next;
break;
};
printf("\n");
}
LPOLESTR ole_str = NULL;
hr = pExtDev->get_ExternalDeviceID(&ole_str);
if(SUCCEEDED(hr))
{
unsigned __int64 msdv_id = *((unsigned __int64*) ole_str);
printf(" ExternalDeviceID: %s (hexadecimal)\n",_ui64toa(msdv_id,str_tmp,16));
printf(" >> Unique 64-bit identifier, as defined by IEEE 1394.\n");
CoTaskMemFree(ole_str);
}
hr = pExtDev->get_ExternalDeviceVersion(&ole_str);
if(SUCCEEDED(hr))
{
strcpy(str_tmp,_bstr_t(ole_str));
printf(" ExternalDeviceVersion: %s\n",str_tmp);
CoTaskMemFree(ole_str);
}
//.........这里部分代码省略.........
示例3: glEnable
// use cameraID 1 for first and so on
HRESULT VideoTexture::init(int cameraID)
{
if (cameraID <= 0) return S_FALSE;
glEnable(GL_TEXTURE_2D);
// Texture -> This will be put into the camera module
glGenTextures(1, textures); // Create The Texture
// Typical Texture Generation Using Data From The Bitmap
for (int i = 0; i < 1; i++)
{
//glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, textures[i]);
// Generate The Texture (640x480... make changeable!)
//glTexImage2D(GL_TEXTURE_2D, 0, 3, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, ...THe data111!!!);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); // Linear Filtering
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); // Linear Filtering
// Enable Texture Mapping
glTexImage2D(GL_TEXTURE_2D, 0, 3, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
}
// Video stuff:
// Create captue graph builder:
HRESULT hr = InitCaptureGraphBuilder(&pGraph, &pBuild);
if (FAILED(hr)) return hr;
IEnumMoniker *enumerator;
hr = EnumerateDevices(CLSID_VideoInputDeviceCategory, &enumerator);
//DisplayDeviceInformation(enumerator);
// Take the first camera:
IMoniker *pMoniker = NULL;
for (int i = 0; i < cameraID; i++)
{
enumerator->Next(1, &pMoniker, NULL);
}
IBaseFilter *pCap = NULL;
hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pCap);
if (SUCCEEDED(hr))
{
hr = pGraph->AddFilter(pCap, L"Capture Filter");
if (FAILED(hr)) return hr;
}
else return hr;
// Create the Sample Grabber which we will use
// To take each frame for texture generation
hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
IID_ISampleGrabber, (void **)&pGrabber);
if (FAILED(hr)) return hr;
hr = pGrabber->QueryInterface(IID_IBaseFilter, (void **)&pGrabberBase);
// We have to set the 24-bit RGB desire here
// So that the proper conversion filters
// Are added automatically.
AM_MEDIA_TYPE desiredType;
memset(&desiredType, 0, sizeof(desiredType));
desiredType.majortype = MEDIATYPE_Video;
desiredType.subtype = MEDIASUBTYPE_RGB24;
desiredType.formattype = FORMAT_VideoInfo;
pGrabber->SetMediaType(&desiredType);
pGrabber->SetBufferSamples(TRUE);
// add to Graph
pGraph->AddFilter(pGrabberBase, L"Grabber");
/* Null render filter */
hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pNullRender);
if(FAILED(hr)) return hr;
pGraph->AddFilter(pNullRender, L"Render");
// Connect the graph
hr = ConnectFilters(pGraph, pCap, pGrabberBase);
if(FAILED(hr)) return hr;
hr = ConnectFilters(pGraph, pGrabberBase, pNullRender);
// Set output format of capture:
IAMStreamConfig *pConfig = NULL;
hr = pBuild->FindInterface(
&PIN_CATEGORY_CAPTURE, // Capture pin.
0, // Any media type.
pCap, // Pointer to the capture filter.
IID_IAMStreamConfig, (void**)&pConfig);
if (FAILED(hr)) return hr;
AM_MEDIA_TYPE *pmtConfig;
hr = pConfig->GetFormat(&pmtConfig);
if (FAILED(hr)) return hr;
// Try and find a good video format
int iCount = 0, iSize = 0;
hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
// Check the size to make sure we pass in the correct structure.
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
// Use the video capabilities structure.
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr))
{
VIDEOINFOHEADER *hdr = (VIDEOINFOHEADER *)pmtConfig->pbFormat;
//.........这里部分代码省略.........
示例4: main
// A very simple program to capture a webcam & audio to a file using DirectShow
//
int main(int argc, char* argv[])
{
ICaptureGraphBuilder2 *pCaptureGraph = NULL; // Capture graph builder object
IGraphBuilder *pGraph = NULL; // Graph builder object
IMediaControl *pControl = NULL; // Media control object
IFileSinkFilter *pSink = NULL; // File sink object
IBaseFilter *pAudioInputFilter = NULL; // Audio Capture filter
IBaseFilter *pVideoInputFilter = NULL; // Video Capture filter
IBaseFilter *pASFWriter = NULL; // WM ASF File config interface
// Initialize the COM library.
HRESULT hr = CoInitialize(NULL);
if (FAILED(hr))
{
// We’ll send our error messages to the console.
printf("ERROR - Could not initialize COM library");
return hr;
}
// Create the filter graph manager and query for interfaces.
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER,
IID_ICaptureGraphBuilder2, (void **)&pCaptureGraph);
if (FAILED(hr)) // FAILED is a macro that tests the return value
{
printf("ERROR - Could not create the Filter Graph Manager.");
return hr;
}
// Use a method of the capture graph builder
// To create an output path for the stream
hr = pCaptureGraph->SetOutputFileName(&MEDIASUBTYPE_Asf,
L"C:\\MyWebcam.ASF", &pASFWriter, &pSink);
// Now configure the ASF Writer
// Present the property pages for this filter
hr = ShowFilterPropertyPages(pASFWriter);
// Now get the filter graph manager
// That's part of the capture graph builder
hr = pCaptureGraph->GetFiltergraph(&pGraph);
// Using QueryInterface on the graph builder,
// Get the Media Control object.
hr = pGraph->QueryInterface(IID_IMediaControl, (void **)&pControl);
if (FAILED(hr))
{
printf("ERROR - Could not create the Media Control object.");
pGraph->Release(); // Clean up after ourselves.
CoUninitialize(); // And uninitalize COM
return hr;
}
// Get an AudioCapture filter.
// But there are several to choose from
// So we need to enumerate them, and pick one.
// Then add the audio capture filter to the filter graph.
hr = GetAudioInputFilter(&pAudioInputFilter, L"Logitech");
if (SUCCEEDED(hr)) {
hr = pGraph->AddFilter(pAudioInputFilter, L"Webcam Audio Capture");
}
// Now create the video input filter from the webcam
hr = GetVideoInputFilter(&pVideoInputFilter, L"Logitech");
if (SUCCEEDED(hr)) {
hr = pGraph->AddFilter(pVideoInputFilter, L"Webcam Video Capture");
}
// Add a video renderer
//IBaseFilter *pVideoRenderer = NULL;
//hr = AddFilterByCLSID(pGraph, CLSID_VideoRenderer, L"Video Renderer", &pVideoRenderer);
// Use another method of the capture graph builder
// To provide a render path for video preview
IBaseFilter *pIntermediate = NULL;
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
pVideoInputFilter, NULL, NULL);
// Now add the video capture to the output file
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
pVideoInputFilter, NULL, pASFWriter);
// And do the same for the audio
hr = pCaptureGraph->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio,
pAudioInputFilter, NULL, pASFWriter);
if (SUCCEEDED(hr))
{
// Run the graph.
hr = pControl->Run();
if (SUCCEEDED(hr))
{
// Wait patiently for completion of the recording
wprintf(L"Started recording...press Enter to stop recording.\n");
// Wait for completion.
char ch;
ch = getchar(); // We wait for keyboard input
}
//.........这里部分代码省略.........
示例5: LoadParam
bool CPlaylist::Load()
{
IGraphBuilder * pGraph = NULL;
IAMPlayList * pPlaylist = NULL;
HRESULT hr;
bool bResult;
if (NULL != m_pList || true == m_bTransient)
{
return true;
}
//
// Make sure that this is one of our playlist read the last played element
//
bResult = LoadParam();
hr = CoCreateInstance(CLSID_FilterGraph,
NULL,
CLSCTX_INPROC_SERVER,
IID_IGraphBuilder,
(void**) &pGraph);
if (SUCCEEDED(hr))
{
hr = pGraph->RenderFile(m_pszPath, NULL);
}
if (SUCCEEDED(hr))
{
IEnumFilters * pEnum = NULL;
IBaseFilter * pFilter = NULL;
hr = pGraph->EnumFilters(&pEnum);
if (pEnum)
{
while (!pPlaylist && pEnum->Next(1, &pFilter, NULL) == S_OK)
{
hr = pFilter->QueryInterface(IID_IAMPlayList, (void**)&pPlaylist);
pFilter->Release();
}
if (!pPlaylist)
{
hr = E_NOINTERFACE;
}
pEnum->Release();
}
}
if (SUCCEEDED(hr))
{
DWORD dwCount;
IAMPlayListItem * pItem = NULL;
if(pPlaylist)
hr = pPlaylist->GetItemCount(&dwCount);
else
hr = E_FAIL;
if (SUCCEEDED(hr))
{
for (DWORD i = 0; i < dwCount; i++)
{
hr = pPlaylist->GetItem(i, &pItem);
if (SUCCEEDED(hr))
{
BSTR pszSource = NULL;
hr = pItem->GetSourceURL(0, &pszSource);
if (SUCCEEDED(hr))
{
InsertTrack(i, pszSource);
}
pItem->Release();
}
}
}
}
if (pPlaylist)
{
pPlaylist->Release();
}
if (pGraph)
{
pGraph->Release();
}
if (SUCCEEDED(hr))
{
return true;
}
else
//.........这里部分代码省略.........
示例6: gst_dshowvideodec_create_graph_and_filters
static gboolean
gst_dshowvideodec_create_graph_and_filters (GstDshowVideoDec * vdec)
{
HRESULT hres = S_FALSE;
GstDshowVideoDecClass *klass =
(GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
IBaseFilter *srcfilter = NULL;
IBaseFilter *sinkfilter = NULL;
gboolean ret = FALSE;
/* create the filter graph manager object */
hres = CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC,
IID_IFilterGraph, (LPVOID *) & vdec->filtergraph);
if (hres != S_OK || !vdec->filtergraph) {
GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
"of the directshow graph manager (error=%d)", hres), (NULL));
goto error;
}
hres = vdec->filtergraph->QueryInterface(IID_IMediaFilter,
(void **) &vdec->mediafilter);
if (hres != S_OK || !vdec->mediafilter) {
GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
("Can't get IMediacontrol interface "
"from the graph manager (error=%d)", hres), (NULL));
goto error;
}
/* create fake src filter */
vdec->fakesrc = new FakeSrc();
/* Created with a refcount of zero, so increment that */
vdec->fakesrc->AddRef();
hres = vdec->fakesrc->QueryInterface(IID_IBaseFilter,
(void **) &srcfilter);
if (FAILED (hres)) {
GST_WARNING_OBJECT (vdec, "Failed to QI fakesrc to IBaseFilter");
goto error;
}
/* search a decoder filter and create it */
vdec->decfilter = gst_dshow_find_filter (
klass->entry->input_majortype,
klass->entry->input_subtype,
klass->entry->output_majortype,
klass->entry->output_subtype,
klass->entry->preferred_filters);
if (vdec->decfilter == NULL) {
GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
"of the decoder filter"), (NULL));
goto error;
}
/* create fake sink filter */
vdec->fakesink = new VideoFakeSink(vdec);
/* Created with a refcount of zero, so increment that */
vdec->fakesink->AddRef();
hres = vdec->fakesink->QueryInterface(IID_IBaseFilter,
(void **) &sinkfilter);
if (FAILED (hres)) {
GST_WARNING_OBJECT (vdec, "Failed to QI fakesink to IBaseFilter");
goto error;
}
/* add filters to the graph */
hres = vdec->filtergraph->AddFilter (srcfilter, L"src");
if (hres != S_OK) {
GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add fakesrc filter "
"to the graph (error=%d)", hres), (NULL));
goto error;
}
hres = vdec->filtergraph->AddFilter(vdec->decfilter, L"decoder");
if (hres != S_OK) {
GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add decoder filter "
"to the graph (error=%d)", hres), (NULL));
goto error;
}
hres = vdec->filtergraph->AddFilter(sinkfilter, L"sink");
if (hres != S_OK) {
GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add fakesink filter "
"to the graph (error=%d)", hres), (NULL));
goto error;
}
vdec->setup = TRUE;
ret = TRUE;
done:
if (srcfilter)
srcfilter->Release();
if (sinkfilter)
sinkfilter->Release();
return ret;
error:
if (vdec->fakesrc) {
//.........这里部分代码省略.........
示例7: InitializeWindowlessVMR
HRESULT InitializeWindowlessVMR(IBaseFilter **ppVmr9)
{
IBaseFilter* pVmr = NULL;
if (!ppVmr9)
return E_POINTER;
*ppVmr9 = NULL;
// Create the VMR and add it to the filter graph.
HRESULT hr = CoCreateInstance(CLSID_VideoMixingRenderer9, NULL,
CLSCTX_INPROC, IID_IBaseFilter, (void**)&pVmr);
if (SUCCEEDED(hr))
{
hr = pGB->AddFilter(pVmr, L"Video Mixing Renderer 9");
if (SUCCEEDED(hr))
{
// Set the rendering mode and number of streams
SmartPtr <IVMRFilterConfig9> pConfig;
JIF(pVmr->QueryInterface(IID_IVMRFilterConfig9, (void**)&pConfig));
JIF(pConfig->SetRenderingMode(VMR9Mode_Windowless));
hr = pVmr->QueryInterface(IID_IVMRWindowlessControl9, (void**)&pWC);
if( SUCCEEDED(hr))
{
hr = pWC->SetVideoClippingWindow(ghApp);
hr = pWC->SetBorderColor(RGB(0,0,0));
}
#ifndef BILINEAR_FILTERING
// Request point filtering (instead of bilinear filtering)
// to improve the text quality. In general, if you are
// not scaling the app Image, you should use point filtering.
// This is very important if you are doing source color keying.
IVMRMixerControl9 *pMix;
hr = pVmr->QueryInterface(IID_IVMRMixerControl9, (void**)&pMix);
if( SUCCEEDED(hr))
{
DWORD dwPrefs=0;
hr = pMix->GetMixingPrefs(&dwPrefs);
if (SUCCEEDED(hr))
{
dwPrefs |= MixerPref_PointFiltering;
dwPrefs &= ~(MixerPref_BiLinearFiltering);
hr = pMix->SetMixingPrefs(dwPrefs);
}
pMix->Release();
}
#endif
// Get alpha-blended bitmap interface
hr = pVmr->QueryInterface(IID_IVMRMixerBitmap9, (void**)&pBMP);
}
else
Msg(TEXT("Failed to add VMR to graph! hr=0x%x\r\n"), hr);
// Don't release the pVmr interface because we are copying it into
// the caller's ppVmr9 pointer
*ppVmr9 = pVmr;
}
else
Msg(TEXT("Failed to create VMR! hr=0x%x\r\n"), hr);
return hr;
}
示例8: EXECUTE_ASSERT
//
// OnReceiveMessage
//
// This is the derived class window message handler methods
//
LRESULT CVideoText::OnReceiveMessage(HWND hwnd, // Window handle
UINT uMsg, // Message ID
WPARAM wParam, // First parameter
LPARAM lParam) // Other parameter
{
IBaseFilter *pFilter = NULL;
RECT ClientRect;
// Blank out the window background
if (uMsg == WM_ERASEBKGND) {
EXECUTE_ASSERT(GetClientRect(m_hwnd,&ClientRect));
HBRUSH hBrush = CreateSolidBrush(RGB(0,0,0));
EXECUTE_ASSERT(FillRect(m_hdc,&ClientRect,hBrush));
EXECUTE_ASSERT(DeleteObject(hBrush));
return (LRESULT) 0;
}
// Handle WM_CLOSE by aborting the playback
if (uMsg == WM_CLOSE) {
m_pRenderer->NotifyEvent(EC_USERABORT,0,0);
DoShowWindow(SW_HIDE);
return CBaseWindow::OnClose();
}
// We pass on WM_ACTIVATEAPP messages to the filtergraph so that the
// IVideoWindow plug in distributor can switch us out of fullscreen
// mode where appropriate. These messages may also be used by the
// resource manager to keep track of which renderer has the focus
if (uMsg == WM_ACTIVATEAPP) {
NOTE1("Notification of EC_ACTIVATE (%d)",(BOOL) wParam);
m_pRenderer->QueryInterface(IID_IBaseFilter,(void **) &pFilter);
m_pRenderer->NotifyEvent(EC_ACTIVATE,wParam,(LPARAM) pFilter);
pFilter->Release();
return (LRESULT) 0;
}
// Treat clicks on text as requests to move window
if (uMsg == WM_NCHITTEST) {
LRESULT Result = DefWindowProc(hwnd,uMsg,wParam,lParam);
if (Result == HTCLIENT) {
Result = HTCAPTION;
}
return Result;
}
// The base class that implements IVideoWindow looks after a flag
// that says whether or not the cursor should be hidden. If so we
// hide the cursor and return (LRESULT) 1. Otherwise we pass to
// the DefWindowProc to show the cursor as normal. This is used
// when our window is made fullscreen to imitate the Modex filter
if (uMsg == WM_SETCURSOR) {
if (IsCursorHidden() == TRUE) {
SetCursor(NULL);
return (LRESULT) 1;
}
}
// When we detect a display change we send an EC_DISPLAY_CHANGED
// message along with our input pin. The filtergraph will stop
// everyone and reconnect our input pin. When being reconnected
// we can then accept the media type that matches the new display
// mode since we may no longer be able to draw the current format
if (uMsg == WM_DISPLAYCHANGE) {
m_pRenderer->m_Display.RefreshDisplayType(NULL);
m_pRenderer->OnDisplayChange();
NOTE("Sent EC_DISPLAY_CHANGED event");
return (LRESULT) 0;
}
return CBaseWindow::OnReceiveMessage(hwnd,uMsg,wParam,lParam);
} // OnReceiveMessage
示例9: CoInitializeEx
// Open multimedia stream.
BOOL CDShow::Open(ZString& pFileName, IDirectDraw7 *pDD)
{
// Multimedia stream pointer
IAMMultiMediaStream *pAMStream;
IGraphBuilder *pGb; // 7/10 #110
IEnumFilters *pEfs;
IBasicAudio *pBa;
//7/29/09 we can now do stuff while the video plays
CoInitializeEx(NULL,COINIT_MULTITHREADED);
// Create Multimedia stream object
if (FAILED(CoCreateInstance(CLSID_AMMultiMediaStream, NULL, CLSCTX_INPROC_SERVER,
IID_IAMMultiMediaStream, (void **)&pAMStream)))
{
// Return FALSE to let caller know we failed.
return FALSE;
}
// Initialize Multimedia stream object
if (FAILED(pAMStream->Initialize(STREAMTYPE_READ, 0, NULL)))
{
// There are alot of possiblities to fail.....
return FALSE;
}
// Add primary video stream.
if (FAILED((pAMStream->AddMediaStream(pDD, &MSPID_PrimaryVideo, 0, NULL))))
{
return FALSE;
}
// Add default sound render to primary video stream,
// so sound will be played back automatically.
if (FAILED(pAMStream->AddMediaStream(NULL, &MSPID_PrimaryAudio, AMMSF_ADDDEFAULTRENDERER, NULL)))
{
// Return FALSE to let caller know we failed.
return FALSE;
}
// Convert filename to UNICODE.
// Notice the safe way to get the actual size of a string.
WCHAR wPath[MAX_PATH];
MultiByteToWideChar(CP_ACP, 0, pFileName, -1, wPath, sizeof(wPath)/sizeof(wPath[0]));
// Build the filter graph for our multimedia stream.
if (FAILED((pAMStream->OpenFile(wPath, 0))))
{
// Return FALSE to let caller know we failed.
return FALSE;
}
//7/10 #110
FILTER_INFO FilterInfo;
pAMStream->GetFilterGraph(&pGb);
pGb->EnumFilters(&pEfs);
IBaseFilter *pFilter;
unsigned long cFetched;
while(pEfs->Next(1, &pFilter, &cFetched) == S_OK) {
FILTER_INFO FilterInfo;
pFilter->QueryFilterInfo(&FilterInfo);
char szName[MAX_FILTER_NAME];
long cch = WideCharToMultiByte(CP_ACP,0,FilterInfo.achName,MAX_FILTER_NAME,szName,MAX_FILTER_NAME,0,0);
if (cch > 0) {
if (!strcmp("WMAudio Decoder DMO",szName)) {
// set the volume to music level
FilterInfo.pGraph->QueryInterface(IID_IBasicAudio,(void**)&pBa);
HKEY hKey;
DWORD dwResult = 0;
if (ERROR_SUCCESS == ::RegOpenKeyEx(HKEY_LOCAL_MACHINE, ALLEGIANCE_REGISTRY_KEY_ROOT,0, KEY_READ, &hKey)) {
DWORD dwSize = sizeof(dwResult);
DWORD dwType = REG_DWORD;
::RegQueryValueEx(hKey, "MusicGain", NULL, &dwType, (BYTE*)&dwResult, &dwSize);
::RegCloseKey(hKey);
if (dwType != REG_DWORD)
dwResult = 0;
}
long vol = (dwResult * -1) * 100;
if (vol < -5000) {
vol = -10000;
}
pBa->put_Volume(vol);
pBa->Release();
}
if (FilterInfo.pGraph != NULL)
FilterInfo.pGraph->Release();
pFilter->Release();
}
}
pEfs->Release();
pGb->Release();
// Assign member to temperary stream pointer.
m_pMMStream = pAMStream;
// Add a reference to the file
//.........这里部分代码省略.........
示例10: FindRenderer
HRESULT FindRenderer(IGraphBuilder *pGB, const GUID *mediatype, IBaseFilter **ppFilter)
{
HRESULT hr;
IEnumFilters *pEnum = NULL;
IBaseFilter *pFilter = NULL;
IPin *pPin;
ULONG ulFetched, ulInPins, ulOutPins;
BOOL bFound=FALSE;
// Verify graph builder interface
if (!pGB)
return E_NOINTERFACE;
// Verify that a media type was passed
if (!mediatype)
return E_POINTER;
// Clear the filter pointer in case there is no match
if (ppFilter)
*ppFilter = NULL;
// Get filter enumerator
hr = pGB->EnumFilters(&pEnum);
if (FAILED(hr))
return hr;
pEnum->Reset();
// Enumerate all filters in the graph
while(!bFound && (pEnum->Next(1, &pFilter, &ulFetched) == S_OK))
{
#ifdef DEBUG
// Read filter name for debugging purposes
FILTER_INFO FilterInfo;
TCHAR szName[256];
hr = pFilter->QueryFilterInfo(&FilterInfo);
if (SUCCEEDED(hr))
{
// Show filter name in debugger
#ifdef UNICODE
lstrcpy(szName, FilterInfo.achName);
#else
WideCharToMultiByte(CP_ACP, 0, FilterInfo.achName, -1, szName, 256, 0, 0);
#endif
FilterInfo.pGraph->Release();
}
#endif
// Find a filter with one input and no output pins
hr = CountFilterPins(pFilter, &ulInPins, &ulOutPins);
if (FAILED(hr))
break;
if ((ulInPins == 1) && (ulOutPins == 0))
{
// Get the first pin on the filter
pPin=0;
pPin = GetInPin(pFilter, 0);
// Read this pin's major media type
AM_MEDIA_TYPE type= {0};
hr = pPin->ConnectionMediaType(&type);
if (FAILED(hr))
break;
// Is this pin's media type the requested type?
// If so, then this is the renderer for which we are searching.
// Copy the interface pointer and return.
if (type.majortype == *mediatype)
{
// Found our filter
*ppFilter = pFilter;
bFound = TRUE;;
}
// This is not the renderer, so release the interface.
else
pFilter->Release();
// Delete memory allocated by ConnectionMediaType()
FreeMediaType(type);
}
else
{
// No match, so release the interface
pFilter->Release();
}
}
pEnum->Release();
return hr;
}
示例11: init_dshow
//---------------------------------------------------------------------------------------
// Initialise all the DirectShow structures we need
//---------------------------------------------------------------------------------------
int init_dshow(wchar_t *FileName, int render_in_own_window)
{
HRESULT err;
FILTER_STATE state = State_Stopped;
// Create a new graph
err = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&pGB);
if (err != S_OK)
return 0;
// Get access to the video controls
err = pGB->QueryInterface(IID_IMediaControl, (void **)&pMC);
if (err != S_OK)
return 0;
err = pGB->QueryInterface(IID_IMediaSeeking, (void **)&pMS);
if (err != S_OK)
return 0;
err = pGB->QueryInterface(IID_IMediaEventEx, (void **)&pME);
if (err != S_OK)
return 0;
if (render_in_own_window)
{
IBaseFilter *pVMR;
IVMRFilterConfig *pFC;
long lWidth, lHeight;
RECT rcSrc, rcDest;
err = CoCreateInstance(CLSID_VideoMixingRenderer, NULL, CLSCTX_INPROC, IID_IBaseFilter, (void**)&pVMR);
if (err != S_OK)
return 0;
// Add the VMR to the filter graph.
err = pGB->AddFilter(pVMR, L"VMR");
if (err != S_OK)
return 0;
// Set the rendering mode.
err = pVMR->QueryInterface(IID_IVMRFilterConfig, (void**)&pFC);
if (err != S_OK)
return 0;
err = pFC->SetRenderingMode(VMRMode_Windowless);
if (err != S_OK)
return 0;
pFC->Release();
// Set the window.
err = pVMR->QueryInterface(IID_IVMRWindowlessControl, (void**)&pWC);
if (err != S_OK)
return 0;
err = pWC->SetVideoClippingWindow(game_window);
if (err != S_OK)
return 0;
pVMR->Release();
// Find the native video size.
err = pWC->GetNativeVideoSize(&lWidth, &lHeight, NULL, NULL);
// Set the source rectangle.
SetRect(&rcSrc, 0, 0, lWidth/2, lHeight/2);
// Get the window client area.
GetClientRect(game_window, &rcDest);
// Set the destination rectangle.
SetRect(&rcDest, 0, 0, rcDest.right, rcDest.bottom);
// Set the video position.
err = pWC->SetVideoPosition(&rcSrc, &rcDest);
}
// Add the source file
err = pGB->RenderFile(FileName, NULL);
if (err != S_OK)
return 0;
// Have the graph signal event via window callbacks for performance
err = pME->SetNotifyWindow((OAHWND)game_window, WM_GRAPHNOTIFY0, 0);
if (err != S_OK)
return 0;
err = pMS->SetTimeFormat(&TIME_FORMAT_FRAME);
if(err != S_OK)
{
err = pMS->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME);
}
err = pMC->Run();
do
{
err = pMC->GetState(0, (long *)&state);
} while (state != State_Running);
//.........这里部分代码省略.........
示例12: memset
/////////////////////////////////////////////////////////
// really open the file ! (OS dependent)
//
/////////////////////////////////////////////////////////
void pix_movieDS::realOpen(char *filename)
{
WCHAR WideFileName[MAXPDSTRING];
HRESULT RetVal;
AM_MEDIA_TYPE MediaType;
BOOL bFrameTime = TRUE;
GUID Guid;
// Convert c-string to Wide string.
memset(&WideFileName, 0, MAXPDSTRING * 2);
if (0 == MultiByteToWideChar(CP_ACP, 0, filename, strlen(filename), WideFileName,
MAXPDSTRING))
{
error("Unable to load %s", filename);
return;
}
// Add a file source filter to the filter graph.
RetVal = FilterGraph->AddSourceFilter(WideFileName, L"SOURCE", &VideoFilter);
if (RetVal != S_OK || NULL == VideoFilter)
{
error("Unable to render %s", filename);
return;
}
// Create an instance of the sample grabber filter. The filter allows frames to be
// buffered from a video source.
RetVal = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (void**)&SampleFilter);
if (RetVal != S_OK || NULL == SampleFilter)
{
error("Unable to create SampleFilter interface %d", RetVal);
return;
}
// Add sample grabber filter to the filter graph.
RetVal = FilterGraph->AddFilter(SampleFilter, L"Sample Grabber");
if (RetVal != S_OK)
{
error("Unable to add SampleFilter %d", RetVal);
return;
}
// Find an interface to the SampleGrabber from the SampleGrabber filter. The
// SampleGrabber allows frames to be grabbed from the filter. SetBufferSamples(TRUE)
// tells the SampleGrabber to buffer the frames. SetOneShot(FALSE) tells the
// SampleGrabber to continuously grab frames. has GetCurrentBuffer() method
RetVal = SampleFilter->QueryInterface(IID_ISampleGrabber, (void **)&SampleGrabber);
if (RetVal != S_OK || NULL == SampleGrabber)
{
error("Unable to create SampleGrabber interface %d", RetVal);
return;
}
// Set the media type that the SampleGrabber wants.
// MEDIATYPE_Video selects only video and not interleaved audio and video
// MEDIASUBTYPE_RGB24 is the colorspace and format to deliver frames
// MediaType.formattype is GUID_NULLsince it is handled later to get file info
memset(&MediaType, 0, sizeof(AM_MEDIA_TYPE));
MediaType.majortype = MEDIATYPE_Video;
MediaType.subtype = MEDIASUBTYPE_RGB24;
MediaType.formattype = GUID_NULL;
RetVal = SampleGrabber->SetMediaType(&MediaType);
// Set the SampleGrabber to return continuous frames
RetVal = SampleGrabber->SetOneShot(FALSE);
if (RetVal != S_OK)
{
error("Unable to setup sample grabber %d", RetVal);
return;
}
// Set the SampleGrabber to copy the data to a buffer. This only set to FALSE when a
// callback is used.
RetVal = SampleGrabber->SetBufferSamples(TRUE);
if (RetVal != S_OK)
{
error("Unable to setup sample grabber %d", RetVal);
return;
}
// Create the Null Renderer interface. The Null Renderer is used to disable rendering of a
//.........这里部分代码省略.........
示例13: getDeviceCount
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) {
std::vector<CameraConfig> cfg_list;
int count = getDeviceCount();
if (count==0) return cfg_list;
comInit();
HRESULT hr;
ICaptureGraphBuilder2 *lpCaptureGraphBuilder;
IGraphBuilder *lpGraphBuilder;
IBaseFilter *lpInputFilter;
IAMStreamConfig *lpStreamConfig;
char nDeviceName[255];
WCHAR wDeviceName[255];
for (int cam_id=0;cam_id<count;cam_id++) {
if ((dev_id>=0) && (dev_id!=cam_id)) continue;
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder);
if (FAILED(hr)) // FAILED is a macro that tests the return value
{
printf("ERROR - Could not create the Filter Graph Manager\n");
comUnInit();
return cfg_list;
}
// Create the Filter Graph Manager.
hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder);
if (FAILED(hr))
{
printf("ERROR - Could not add the graph builder!\n");
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder);
if (FAILED(hr))
{
printf("ERROR - Could not set filtergraph\n");
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
memset(wDeviceName, 0, sizeof(WCHAR) * 255);
memset(nDeviceName, 0, sizeof(char) * 255);
hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName);
if (SUCCEEDED(hr)){
hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName);
}else{
printf("ERROR - Could not find specified video device\n");
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig);
if(FAILED(hr)){
printf("ERROR: Couldn't config the stream!\n");
lpInputFilter->Release();
lpGraphBuilder->Release();
lpCaptureGraphBuilder->Release();
comUnInit();
return cfg_list;
}
CameraConfig cam_cfg;
CameraTool::initCameraConfig(&cam_cfg);
cam_cfg.driver = DRIVER_DEFAULT;
cam_cfg.device = cam_id;
sprintf(cam_cfg.name, "%s", nDeviceName);
int iCount = 0;
int iSize = 0;
hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
std::vector<CameraConfig> fmt_list;
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
GUID lastFormat = MEDIASUBTYPE_None;
for (int iFormat = 0; iFormat < iCount; iFormat+=2)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr)){
if ( pmtConfig->subtype != lastFormat) {
if (fmt_list.size()>0) {
std::sort(fmt_list.begin(), fmt_list.end());
cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
fmt_list.clear();
//.........这里部分代码省略.........
示例14: main
int main()
{
// for playing
IGraphBuilder *pGraphBuilder;
ICaptureGraphBuilder2 *pCaptureGraphBuilder2;
IMediaControl *pMediaControl;
IBaseFilter *pDeviceFilter = NULL;
// to select a video input device
ICreateDevEnum *pCreateDevEnum = NULL;
IEnumMoniker *pEnumMoniker = NULL;
IMoniker *pMoniker = NULL;
ULONG nFetched = 0;
// initialize COM
CoInitialize(NULL);
//
// selecting a device
//
// Create CreateDevEnum to list device
CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
// Create EnumMoniker to list VideoInputDevice
pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
&pEnumMoniker, 0);
if (pEnumMoniker == NULL) {
// this will be shown if there is no capture device
printf("no device\n");
return 0;
}
// reset EnumMoniker
pEnumMoniker->Reset();
// get each Moniker
while (pEnumMoniker->Next(1, &pMoniker, &nFetched) == S_OK)
{
IPropertyBag *pPropertyBag;
TCHAR devname[256];
// bind to IPropertyBag
pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void **)&pPropertyBag);
VARIANT var;
// get FriendlyName
var.vt = VT_BSTR;
pPropertyBag->Read(L"FriendlyName", &var, 0);
WideCharToMultiByte(CP_ACP, 0,
var.bstrVal, -1, devname, sizeof(devname), 0, 0);
VariantClear(&var);
printf("%s\r\n", devname);
printf(" select this device ? [y] or [n]\r\n");
int ch = getchar();
// you can start playing by 'y' + return key
// if you press the other key, it will not be played.
if (ch == 'y')
{
// Bind Monkier to Filter
pMoniker->BindToObject(0, 0, IID_IBaseFilter,
(void**)&pDeviceFilter );
}
else
{
getchar();
}
// release
pMoniker->Release();
pPropertyBag->Release();
if (pDeviceFilter != NULL)
{
// go out of loop if getchar() returns 'y'
break;
}
}
if (pDeviceFilter != NULL) {
//
// PLAY
//
// create FilterGraph
CoCreateInstance(CLSID_FilterGraph,
NULL,
CLSCTX_INPROC,
IID_IGraphBuilder,
(LPVOID *)&pGraphBuilder);
// create CaptureGraphBuilder2
CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
IID_ICaptureGraphBuilder2,
(LPVOID *)&pCaptureGraphBuilder2);
//.........这里部分代码省略.........
示例15: MultiByteToWideChar
//.........这里部分代码省略.........
pGraph->EnumFilters(&iEnumFilters);
ULONG filterCount = 16;
IFilter *iFilters[16];
iEnumFilters->Next(filterCount, iFilters, &filterCount);
for( ULONG j = 0; j < filterCount; ++j )
{
FILTER_INFO filterInfo;
iFilters[j]->QueryFilterInfo(&filterInfo);
filterInfo.pGraph->Release();
iFilters[j]->Release();
}
iEnumFilters->Release();
}*/
iVideoWindow->HideCursor(OATRUE);
iVideoWindow->put_Visible( OAFALSE );
iVideoWindow->put_AutoShow( OAFALSE );
LONG windowStyle;
iVideoWindow->get_WindowStyle( &windowStyle);
windowStyle &= ~WS_BORDER & ~WS_CAPTION & ~WS_SIZEBOX & ~WS_THICKFRAME &
~WS_HSCROLL & ~WS_VSCROLL & ~WS_VISIBLE;
iVideoWindow->put_WindowStyle( windowStyle);
}
else
iVideoWindow = NULL;
if( (hr = pGraph->QueryInterface(IID_IMediaControl, (void **) &pMC)) == 0)
{
pMC->Run(); // sometimes it returns 1, but still ok
state = PLAYING;
pMC->Release();
}
if( iVideoWindow )
{
iVideoWindow->put_Visible( OAFALSE );
LONG windowStyle;
iVideoWindow->get_WindowStyle( &windowStyle);
windowStyle &= ~WS_BORDER & ~WS_CAPTION & ~WS_SIZEBOX & ~WS_THICKFRAME &
~WS_HSCROLL & ~WS_VSCROLL & ~WS_VISIBLE;
iVideoWindow->put_WindowStyle( windowStyle);
LONG maxWidth;
LONG maxHeight;
hr=iVideoWindow->GetMaxIdealImageSize( &maxWidth, &maxHeight);
#ifdef FULL_SCREEN_VIDEO
#else
iVideoWindow->put_BorderColor( RGB(0,0,0) );
iVideoWindow->put_WindowState(SW_MAXIMIZE);
IBaseFilter *iFilter;
if( pGraph->FindFilterByName((const WCHAR *)L"Video Renderer", &iFilter) == 0)
{
IBasicVideo *iBasicVideo;
if( iFilter->QueryInterface(IID_IBasicVideo, (void **)&iBasicVideo) == 0)
{
LONG screenWidth;
LONG screenHeight;
LONG videoWidth;
LONG videoHeight;
if( iVideoWindow->get_Width(&screenWidth) == 0 &&
iVideoWindow->get_Height(&screenHeight) == 0 &&
iBasicVideo->GetVideoSize(&videoWidth, &videoHeight) == 0)
{
// zoom in by 2 if possible
if( screenWidth >= videoWidth * 2 &&
screenHeight >= videoHeight * 2)
{
videoWidth *= 2;
videoHeight *= 2;
}
// center the video client area
iBasicVideo->SetDestinationPosition(
(screenWidth-videoWidth)/2, (screenHeight-videoHeight)/2,
videoWidth, videoHeight);
}
iBasicVideo->Release();
}
iFilter->Release();
}
#endif
iVideoWindow->HideCursor(OATRUE);
iVideoWindow->SetWindowForeground(OATRUE);
}
if(iVideoWindow)
{
iVideoWindow->Release();
iVideoWindow = NULL;
}
}
if( hr && !skip_on_fail_flag)
err.run("video.play error %d", hr );
}