本文整理汇总了C#中ICaptureGraphBuilder2.FindInterface方法的典型用法代码示例。如果您正苦于以下问题:C# ICaptureGraphBuilder2.FindInterface方法的具体用法?C# ICaptureGraphBuilder2.FindInterface怎么用?C# ICaptureGraphBuilder2.FindInterface使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ICaptureGraphBuilder2
的用法示例。
在下文中一共展示了ICaptureGraphBuilder2.FindInterface方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: GetInterfaces
void GetInterfaces()
{
Type comType = null;
object comObj = null;
try
{
//ICaptureGraphBuilder2 pBuilder = null;
// Initiate Capture Graph Builder
Guid clsid = typeof(CaptureGraphBuilder2).GUID;
comType = Type.GetTypeFromCLSID(clsid);
comObj = Activator.CreateInstance(comType);
m_bCapGraph = (ICaptureGraphBuilder2)comObj;
// Initiate Graph Builder
Guid clsfg = typeof(FilterGraph).GUID;
comType = Type.GetTypeFromCLSID(clsfg); //Clsid.FilterGraph);
comObj = Activator.CreateInstance(comType);
m_bGraph = (IGraphBuilder)comObj;
// Initiate Video Configuration Interface
DsGuid cat = PinCategory.Capture;
DsGuid type = MediaType.Interleaved;
Guid iid = typeof(IAMVideoProcAmp).GUID;
m_bCapGraph.FindInterface(cat, type, capFilter, iid, out comObj);
m_iVidConfig = (IAMVideoProcAmp)comObj;
// test
//m_iVidConfig.Set(VideoProcAmpProperty.WhiteBalance, 0, VideoProcAmpFlags.Manual);
// Initiate Camera Configuration Interface
cat = PinCategory.Capture;
type = MediaType.Interleaved;
iid = typeof(IAMCameraControl).GUID;
m_bCapGraph.FindInterface(cat, type, capFilter, iid, out comObj);
m_iCamConfig = (IAMCameraControl)comObj;
}
catch (Exception ee)
{
if (comObj != null)
Marshal.ReleaseComObject(comObj);
throw new Exception("Could not get interfaces\r\n" + ee.Message);
}
}
示例2: ShowCapPinDialog
public static bool ShowCapPinDialog(ICaptureGraphBuilder2 bld, IBaseFilter flt, IntPtr hwnd)
{
object ppint = null;
ISpecifyPropertyPages pages = null;
bool flag;
DsCAUUID pPages = new DsCAUUID();
try
{
Guid capture = PinCategory.Capture;
Guid interleaved = MediaType.Interleaved;
Guid gUID = typeof(IAMStreamConfig).GUID;
if (bld.FindInterface(ref capture, ref interleaved, flt, ref gUID, out ppint) != 0)
{
interleaved = MediaType.Video;
if (bld.FindInterface(ref capture, ref interleaved, flt, ref gUID, out ppint) != 0)
{
return false;
}
}
pages = ppint as ISpecifyPropertyPages;
if (pages == null)
{
return false;
}
int num = pages.GetPages(out pPages);
num = OleCreatePropertyFrame(hwnd, 30, 30, null, 1, ref ppint, pPages.cElems, pPages.pElems, 0, 0, IntPtr.Zero);
flag = true;
}
catch (Exception exception)
{
Trace.WriteLine("!Ds.NET: ShowCapPinDialog " + exception.Message);
flag = false;
}
finally
{
if (pPages.pElems != IntPtr.Zero)
{
Marshal.FreeCoTaskMem(pPages.pElems);
}
pages = null;
if (ppint != null)
{
Marshal.ReleaseComObject(ppint);
}
ppint = null;
}
return flag;
}
示例3: ShowCapPinDialog
public static bool ShowCapPinDialog( ICaptureGraphBuilder2 bld, IBaseFilter flt, IntPtr hwnd )
{
int hr;
object comObj = null;
ISpecifyPropertyPages spec = null;
DsCAUUID cauuid = new DsCAUUID();
try
{
DsGuid cat = PinCategory.Capture;
DsGuid type = MediaType.Interleaved;
DsGuid iid = typeof(IAMStreamConfig).GUID;
hr = bld.FindInterface( cat, type, flt, iid, out comObj );
if( hr != 0 )
{
type = MediaType.Video;
hr = bld.FindInterface( cat, type, flt, iid, out comObj );
if( hr != 0 )
return false;
}
spec = comObj as ISpecifyPropertyPages;
if( spec == null )
return false;
hr = spec.GetPages( out cauuid );
hr = OleCreatePropertyFrame( hwnd, 30, 30, null, 1,
ref comObj, cauuid.cElems, cauuid.pElems, 0, 0, IntPtr.Zero );
return true;
}
catch( Exception ee )
{
Trace.WriteLine( "!Ds.NET: ShowCapPinDialog " + ee.Message );
return false;
}
finally
{
if( cauuid.pElems != IntPtr.Zero )
Marshal.FreeCoTaskMem( cauuid.pElems );
spec = null;
if( comObj != null )
Marshal.ReleaseComObject( comObj ); comObj = null;
}
}
示例4: SetVideoCaptureParameters
/// <summary>
/// Sets the capture parameters for the video capture device
/// </summary>
private void SetVideoCaptureParameters(ICaptureGraphBuilder2 capGraph, IBaseFilter captureFilter)
{
/* The stream config interface */
object streamConfig;
/* Get the stream's configuration interface */
int hr = capGraph.FindInterface(PinCategory.Capture,
MediaType.Video,
captureFilter,
typeof(IAMStreamConfig).GUID,
out streamConfig);
DsError.ThrowExceptionForHR(hr);
var videoStreamConfig = streamConfig as IAMStreamConfig;
/* If QueryInterface fails... */
if(videoStreamConfig == null)
{
throw new Exception("Failed to get IAMStreamConfig");
}
/* The media type of the video */
AMMediaType media;
/* Get the AMMediaType for the video out pin */
hr = videoStreamConfig.GetFormat(out media);
DsError.ThrowExceptionForHR(hr);
/* Make the VIDEOINFOHEADER 'readable' */
var videoInfo = new VideoInfoHeader();
Marshal.PtrToStructure(media.formatPtr, videoInfo);
/* Setup the VIDEOINFOHEADER with the parameters we want */
videoInfo.AvgTimePerFrame = DSHOW_ONE_SECOND_UNIT/FPS;
videoInfo.BmiHeader.Width = DesiredWidth;
videoInfo.BmiHeader.Height = DesiredHeight;
/* Copy the data back to unmanaged memory */
Marshal.StructureToPtr(videoInfo, media.formatPtr, false);
/* Set the format */
hr = videoStreamConfig.SetFormat(media);
/* We don't want any memory leaks, do we? */
DsUtils.FreeAMMediaType(media);
/* Wait to free AMMediaType before we throw any errors */
DsError.ThrowExceptionForHR(hr);
}
示例5: createGraph
// --------------------- Private Methods -----------------------
/// <summary>
/// Create a new filter graph and add filters (devices, compressors,
/// misc), but leave the filters unconnected. Call renderGraph()
/// to connect the filters.
/// </summary>
protected void createGraph()
{
Guid cat;
Guid med;
int hr;
// Ensure required properties are set
if ( videoDevice == null && audioDevice == null )
throw new ArgumentException( "The video and/or audio device have not been set. Please set one or both to valid capture devices.\n" );
// Skip if we are already created
if ( (int)graphState < (int)GraphState.Created )
{
// Garbage collect, ensure that previous filters are released
GC.Collect();
// Make a new filter graph
#if DSHOWNET
// Make a new filter graph
graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));
// Get the Capture Graph Builder
Guid clsid = Clsid.CaptureGraphBuilder2;
Guid riid = typeof(ICaptureGraphBuilder2).GUID;
captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid);
#else
FilterGraph graph = new FilterGraph();
graphBuilder = (IGraphBuilder)graph;
// Get the Capture Graph Builder
captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
#endif
// Link the CaptureGraphBuilder to the filter graph
hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
if (hr < 0) Marshal.ThrowExceptionForHR(hr);
// Add the graph to the Running Object Table so it can be
// viewed with GraphEdit
#if DEBUG
#if DSHOWNET
DsROT.AddGraphToRot(graphBuilder, out rotCookie);
#else
rotCookie = new DsROTEntry(graphBuilder);
#endif
#endif
// Get the video device and add it to the filter graph
if ( VideoDevice != null )
{
videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( VideoDevice.MonikerString );
hr = graphBuilder.AddFilter( videoDeviceFilter, "Video Capture Device" );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
// Get the audio device and add it to the filter graph
if ( AudioDevice != null )
{
audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( AudioDevice.MonikerString );
hr = graphBuilder.AddFilter( audioDeviceFilter, "Audio Capture Device" );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
// Get the video compressor and add it to the filter graph
if ( VideoCompressor != null )
{
videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( VideoCompressor.MonikerString );
hr = graphBuilder.AddFilter( videoCompressorFilter, "Video Compressor" );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
// Get the audio compressor and add it to the filter graph
if ( AudioCompressor != null )
{
audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( AudioCompressor.MonikerString );
hr = graphBuilder.AddFilter( audioCompressorFilter, "Audio Compressor" );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
// Retrieve the stream control interface for the video device
// FindInterface will also add any required filters
// (WDM devices in particular may need additional
// upstream filters to function).
// Try looking for an interleaved media type
object o;
cat = PinCategory.Capture;
med = MediaType.Interleaved;
Guid iid = typeof(IAMStreamConfig).GUID;
#if DSHOWNET
hr = captureGraphBuilder.FindInterface(
ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
//.........这里部分代码省略.........
示例6: SetConfigParms
// Set the Framerate, and video size
private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iFrameRate, int iWidth, int iHeight)
{
int hr;
object o;
AMMediaType media;
// Find the stream config interface
hr = capGraph.FindInterface(
PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o );
IAMStreamConfig videoStreamConfig = o as IAMStreamConfig;
if (videoStreamConfig == null)
{
throw new Exception("Failed to get IAMStreamConfig");
}
// Get the existing format block
hr = videoStreamConfig.GetFormat( out media);
DsError.ThrowExceptionForHR( hr );
// copy out the videoinfoheader
VideoInfoHeader v = new VideoInfoHeader();
Marshal.PtrToStructure( media.formatPtr, v );
// if overriding the framerate, set the frame rate
if (iFrameRate > 0)
{
v.AvgTimePerFrame = 10000000 / iFrameRate;
}
// if overriding the width, set the width
if (iWidth > 0)
{
v.BmiHeader.Width = iWidth;
}
// if overriding the Height, set the Height
if (iHeight > 0)
{
v.BmiHeader.Height = iHeight;
}
// Copy the media structure back
Marshal.StructureToPtr( v, media.formatPtr, false );
// Set the new format
hr = videoStreamConfig.SetFormat( media );
DsError.ThrowExceptionForHR( hr );
DsUtils.FreeAMMediaType(media);
media = null;
}
示例7: InitResolution
void InitResolution(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int targetWidth, int targetHeight)
{
object o;
capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);
AMMediaType media = null;
IAMStreamConfig videoStreamConfig = o as IAMStreamConfig;
IntPtr ptr;
int iC = 0, iS = 0;
videoStreamConfig.GetNumberOfCapabilities(out iC, out iS);
ptr = Marshal.AllocCoTaskMem(iS);
int bestDWidth = 999999;
int bestDHeight = 999999;
int streamID = 0;
for (int i = 0; i < iC; i++)
{
videoStreamConfig.GetStreamCaps(i, out media, ptr);
VideoInfoHeader v;
v = new VideoInfoHeader();
Marshal.PtrToStructure(media.formatPtr, v);
int dW = Math.Abs(targetWidth - v.BmiHeader.Width);
int dH = Math.Abs(targetHeight - v.BmiHeader.Height);
if (dW < bestDWidth && dH < bestDHeight)
{
streamID = i;
bestDWidth = dW;
bestDHeight = dH;
}
}
videoStreamConfig.GetStreamCaps(streamID, out media, ptr);
int hr = videoStreamConfig.SetFormat(media);
Marshal.FreeCoTaskMem(ptr);
DsError.ThrowExceptionForHR(hr);
DsUtils.FreeAMMediaType(media);
media = null;
}
示例8: SetConfigParms
// Set the Framerate, and video size
private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iSampleRate, int iChannels)
{
int hr;
object o;
AMMediaType media;
// Find the stream config interface
hr = capGraph.FindInterface(
PinCategory.Capture, MediaType.Audio, capFilter, typeof(IAMStreamConfig).GUID, out o);
IAMStreamConfig audioStreamConfig = o as IAMStreamConfig;
if (audioStreamConfig == null) {
throw new Exception("Failed to get IAMStreamConfig");
}
// Get the existing format block
hr = audioStreamConfig.GetFormat(out media);
DsError.ThrowExceptionForHR(hr);
// copy out the videoinfoheader
WaveFormatEx i = new WaveFormatEx();
Marshal.PtrToStructure(media.formatPtr, i);
i.wFormatTag = 0x0001; // WAVE_FORMAT_PCM
i.wBitsPerSample = 16;
i.nSamplesPerSec = 44100;
i.nChannels = m_Channels;
i.nBlockAlign = 2;
i.nAvgBytesPerSec = (i.nSamplesPerSec * i.nBlockAlign);
i.cbSize = 0;
// if overriding the framerate, set the frame rate
if (iSampleRate > 0) {
i.nSamplesPerSec = iSampleRate;
}
// if overriding the width, set the width
if (iChannels > 0) {
i.nChannels = (short)iChannels;
}
// Copy the media structure back
Marshal.StructureToPtr(i, media.formatPtr, false);
// Set the new format
hr = audioStreamConfig.SetFormat(media);
DsError.ThrowExceptionForHR(hr);
DsUtils.FreeAMMediaType(media);
media = null;
}
示例9: findCrossbars
/// <summary>
/// Retrieve a list of crossbar filters in the graph.
/// Most hardware devices should have a maximum of 2 crossbars,
/// one for video and another for audio.
/// </summary>
protected ArrayList findCrossbars(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter)
{
ArrayList crossbars = new ArrayList();
Guid category = FindDirection.UpstreamOnly;
Guid type = new Guid();
Guid riid = typeof(IAMCrossbar).GUID;
int hr;
object comObj = null;
object comObjNext = null;
// Find the first interface, look upstream from the selected device
hr = graphBuilder.FindInterface( ref category, ref type, deviceFilter, ref riid, out comObj );
while ( (hr == 0) && (comObj != null) )
{
// If found, add to the list
if ( comObj is IAMCrossbar )
{
crossbars.Add( comObj as IAMCrossbar );
// Find the second interface, look upstream from the next found crossbar
hr = graphBuilder.FindInterface( ref category, ref type, comObj as IBaseFilter, ref riid, out comObjNext );
comObj = comObjNext;
}
else
comObj = null;
}
return( crossbars );
}
示例10: InitConfigParams
/// <summary>
///
/// </summary>
/// <param name="captureGraph"></param>
/// <param name="captureFilter"></param>
private void InitConfigParams(ICaptureGraphBuilder2 captureGraph, IBaseFilter captureFilter)
{
object obj;
AMMediaType media;
int hr = captureGraph.FindInterface(PinCategory.Capture, MediaType.Video, captureFilter, typeof(IAMStreamConfig).GUID, out obj);
IAMStreamConfig videoStreamConfig = obj as IAMStreamConfig;
if (videoStreamConfig == null)
{
throw new Exception("Failed to get IAMStreamConfig");
}
// Get the existing format block
hr = videoStreamConfig.GetFormat(out media);
DsError.ThrowExceptionForHR(hr);
// copy out the videoinfoheader
VideoInfoHeader infoHeader = new VideoInfoHeader();
Marshal.PtrToStructure(media.formatPtr, infoHeader);
if (mFrameRate > 0)
{
infoHeader.AvgTimePerFrame = 10000000 / mFrameRate;
}
if (mWidth > 0)
{
infoHeader.BmiHeader.Width = mWidth;
}
if (mHeight > 0)
{
infoHeader.BmiHeader.Height = mHeight;
}
// Copy the media structure back
Marshal.StructureToPtr(infoHeader, media.formatPtr, false);
// Set the new format
hr = videoStreamConfig.SetFormat(media);
DsError.ThrowExceptionForHR(hr);
DsUtils.FreeAMMediaType(media);
media = null;
}
示例11: InitTuner
private void InitTuner(ICaptureGraphBuilder2 captureGraphBuilder)
{
Object o;
var hr = captureGraphBuilder.FindInterface(null, null, videoDevice, typeof(IAMTVTuner).GUID, out o);
if (hr >= 0)
{
tuner = (IAMTVTuner)o;
//tuner.put_Mode(AMTunerModeType.TV);
o = null;
//find crossbar
var list = findCrossbars(captureGraphBuilder, (IBaseFilter)tuner);
/*hr = captureGraphBuilder.FindInterface(null, null, (IBaseFilter)Tuner, typeof(IAMCrossbar).GUID, out o);
if (hr >= 0)
{
crossbar = (IAMCrossbar)o;
InitCrossbar();
}
else
crossbar = null;
*/
if (list.Count > 0)
{
crossbar = (IAMCrossbar)list[0];
InitCrossbar();
}
o = null;
// find amtvaudio
hr = captureGraphBuilder.FindInterface(null, null, videoDevice, typeof(IAMTVAudio).GUID, out o);
if (hr >= 0)
{
TVAudio = (IAMTVAudio)o;
}
o = null;
// find IAMAnalogVideoDecoder
hr = captureGraphBuilder.FindInterface(null, null, videoDevice, typeof(IAMAnalogVideoDecoder).GUID, out o);
if (hr >= 0)
{
analogVideoDecoder = (o as IAMAnalogVideoDecoder);
AnalogVideoStandard avs;
analogVideoDecoder.get_TVFormat(out avs);
}
o = null;
}
else
tuner = null;
}
示例12: findCrossbars
protected ArrayList findCrossbars(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter)
{
ArrayList list = new ArrayList();
Guid upstreamOnly = FindDirection.UpstreamOnly;
Guid pType = new Guid();
Guid gUID = typeof(IAMCrossbar).GUID;
object ppint = null;
object obj3 = null;
int num = graphBuilder.FindInterface(ref upstreamOnly, ref pType, deviceFilter, ref gUID, out ppint);
while ((num == 0) && (ppint != null))
{
if (ppint is IAMCrossbar)
{
list.Add(ppint as IAMCrossbar);
num = graphBuilder.FindInterface(ref upstreamOnly, ref pType, ppint as IBaseFilter, ref gUID, out obj3);
ppint = obj3;
}
else
{
ppint = null;
}
}
return list;
}
示例13: SetupTunerAndCrossbar
public static void SetupTunerAndCrossbar(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter)
{
if (DriverSettings.Instance.UsesTunerCrossbar)
{
object o;
int hr = graphBuilder.FindInterface(null, null, deviceFilter, typeof(IAMTVTuner).GUID, out o);
if (hr >= 0)
{
hr = graphBuilder.FindInterface(null, null, deviceFilter, typeof(IAMCrossbar).GUID, out o);
if (hr >= 0)
{
IAMCrossbar crossbar = (IAMCrossbar)o;
if (crossbar != null)
{
hr = crossbar.Route(DriverSettings.Instance.CrossbarOutputPin, DriverSettings.Instance.CrossbarInputPin);
DsError.ThrowExceptionForHR(hr);
}
}
}
}
}
示例14: InitializeResolution
/// <summary>
/// Initializes the resolution.
/// </summary>
/// <param name="capGraph">The cap graph.</param>
/// <param name="capFilter">The cap filter.</param>
void InitializeResolution(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter)
{
AMMediaType mediaType = null;
IAMStreamConfig videoStreamConfig = null;
IntPtr ptr;
int iCount = 0;
int iSize = 0;
int maxWidth = 0;
int maxHeight = 0;
int streamID = 0;
object obj;
capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out obj);
videoStreamConfig = obj as IAMStreamConfig;
videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize);
ptr = Marshal.AllocCoTaskMem(iSize);
for (int i = 0; i < iCount; i++)
{
videoStreamConfig.GetStreamCaps(i, out mediaType, ptr);
VideoInfoHeader videoInfo = new VideoInfoHeader();
Marshal.PtrToStructure(mediaType.formatPtr, videoInfo);
if (videoInfo.BmiHeader.Width > maxWidth && videoInfo.BmiHeader.Height > maxHeight)
{
streamID = i;
maxWidth = videoInfo.BmiHeader.Width;
maxHeight = videoInfo.BmiHeader.Height;
}
}
videoStreamConfig.GetStreamCaps(streamID, out mediaType, ptr);
int hr = videoStreamConfig.SetFormat(mediaType);
Marshal.FreeCoTaskMem(ptr);
DsError.ThrowExceptionForHR(hr);
DsUtils.FreeAMMediaType(mediaType);
mediaType = null;
}
示例15: CreateGraph
///////////////////////////////////////////////////////////////////////////////
// Methods and Eventhandling for Background tasks //
///////////////////////////////////////////////////////////////////////////////
#region THREAD
#endregion //THREAD
///////////////////////////////////////////////////////////////////////////////
// Methods for doing main class job //
///////////////////////////////////////////////////////////////////////////////
#region PRIVATEMETHODS
/// <summary>
/// Create a new filter graph and add filters (devices, compressors,
/// misc), but leave the filters unconnected. Call renderGraph()
/// to connect the filters.
/// </summary>
/// <returns>True if successful created the graph.</returns>
protected bool CreateGraph()
{
int hr;
try
{
// Garbage collect, ensure that previous filters are released
GC.Collect();
// Get the graphbuilder object
this.graphBuilder = new FilterGraph() as IFilterGraph2;
// Get a ICaptureGraphBuilder2 to help build the graph
this.captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2;
// Link the CaptureGraphBuilder to the filter graph
hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder);
DsError.ThrowExceptionForHR(hr);
#if DEBUG
this.rotCookie = new DsROTEntry(this.graphBuilder);
#endif
// Add the Video input device to the graph
//// Add the video device
//IBaseFilter videoSourceFilter;
//hr = this.graphBuilder.AddSourceFilterForMoniker(this.captureDeviceProperties.VideoInputDevice.DirectshowDevice.Mon, null, "Video Source", out videoSourceFilter);
//DsError.ThrowExceptionForHR(hr);
//this.VideoDeviceFilter = videoSourceFilter;
this.VideoDeviceFilter = DirectShowUtils.CreateFilter(
FilterCategory.VideoInputDevice,
this.captureDeviceProperties.VideoInputDevice.Name);
if (this.VideoDeviceFilter != null)
{
hr = graphBuilder.AddFilter(this.VideoDeviceFilter, "Video Source");
DsError.ThrowExceptionForHR(hr);
}
// Add the Audio input device to the graph
this.AudioDeviceFilter = DirectShowUtils.CreateFilter(
FilterCategory.AudioInputDevice,
this.captureDeviceProperties.AudioInputDevice);
if (this.AudioDeviceFilter != null)
{
hr = graphBuilder.AddFilter(this.AudioDeviceFilter, "Audio Source");
DsError.ThrowExceptionForHR(hr);
}
// Retrieve the stream control interface for the video device
// FindInterface will also add any required filters
// (WDM devices in particular may need additional
// upstream filters to function).
DsGuid cat;
DsGuid med;
// Try looking for an interleaved media type
object o;
cat = PinCategory.Capture;
med = MediaType.Interleaved;
Guid iid = typeof(IAMStreamConfig).GUID;
hr = captureGraphBuilder.FindInterface(cat, med, this.VideoDeviceFilter, iid, out o);
if (hr != 0)
{
// If not found, try looking for a video media type
med = MediaType.Video;
hr = captureGraphBuilder.FindInterface(
cat, med, this.VideoDeviceFilter, iid, out o);
if (hr != 0)
o = null;
}
videoStreamConfig = o as IAMStreamConfig;
this.SetConfigParms(
videoStreamConfig,
this.captureDeviceProperties.FrameRate,
this.captureDeviceProperties.VideoSize.Width,
this.captureDeviceProperties.VideoSize.Height);
// Render capture stream (only if necessary)
//.........这里部分代码省略.........