本文整理汇总了C#中ICaptureGraphBuilder2.SetFiltergraph方法的典型用法代码示例。如果您正苦于以下问题:C# ICaptureGraphBuilder2.SetFiltergraph方法的具体用法?C# ICaptureGraphBuilder2.SetFiltergraph怎么用?C# ICaptureGraphBuilder2.SetFiltergraph使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ICaptureGraphBuilder2
的用法示例。
在下文中一共展示了ICaptureGraphBuilder2.SetFiltergraph方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: BuildGraph
/// <summary>
/// Builds the directshow graph for this analog tvcard
/// </summary>
public override void BuildGraph()
{
if (_cardId == 0)
{
GetPreloadBitAndCardId();
_configuration = Configuration.readConfiguration(_cardId, _name, _devicePath);
Configuration.writeConfiguration(_configuration);
}
_lastSignalUpdate = DateTime.MinValue;
_tunerLocked = false;
Log.Log.WriteFile("HDPVR: build graph");
try
{
if (_graphState != GraphState.Idle)
{
Log.Log.WriteFile("HDPVR: graph already built!");
throw new TvException("Graph already built");
}
_graphBuilder = (IFilterGraph2)new FilterGraph();
_rotEntry = new DsROTEntry(_graphBuilder);
_capBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
_capBuilder.SetFiltergraph(_graphBuilder);
AddCrossBarFilter();
AddCaptureFilter();
AddEncoderFilter();
AddTsWriterFilterToGraph();
_qualityControl = QualityControlFactory.createQualityControl(_configuration, _filterEncoder, _filterCapture,
null, null);
if (_qualityControl == null)
{
Log.Log.WriteFile("HDPVR: No quality control support found");
}
_graphState = GraphState.Created;
_configuration.Graph.Crossbar.Name = _crossBarDevice.Name;
_configuration.Graph.Crossbar.VideoPinMap = _videoPinMap;
_configuration.Graph.Crossbar.AudioPinMap = _audioPinMap;
_configuration.Graph.Crossbar.VideoPinRelatedAudioMap = _videoPinRelatedAudioMap;
_configuration.Graph.Crossbar.VideoOut = _videoOutPinIndex;
_configuration.Graph.Crossbar.AudioOut = _audioOutPinIndex;
_configuration.Graph.Capture.Name = _captureDevice.Name;
_configuration.Graph.Capture.FrameRate = -1d;
_configuration.Graph.Capture.ImageHeight = -1;
_configuration.Graph.Capture.ImageWidth = -1;
Configuration.writeConfiguration(_configuration);
}
catch (Exception ex)
{
Log.Log.Write(ex);
Dispose();
_graphState = GraphState.Idle;
throw;
}
}
示例2: createGraph
// --------------------- Private Methods -----------------------
/// <summary>
/// Create a new filter graph and add filters (devices, compressors,
/// misc), but leave the filters unconnected. Call renderGraph()
/// to connect the filters.
/// </summary>
protected void createGraph()
{
Guid cat;
Guid med;
int hr;
Type comType = null;
object comObj = null;
// Ensure required properties are set
if ( videoDevice == null && audioDevice == null )
throw new ArgumentException( "The video and/or audio device have not been set. Please set one or both to valid capture devices.\n" );
// Skip if we are already created
if ( (int)graphState < (int)GraphState.Created )
{
// Garbage collect, ensure that previous filters are released
GC.Collect();
// Make a new filter graph
graphBuilder = (IGraphBuilder) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.FilterGraph, true ) );
// Get the Capture Graph Builder
Guid clsid = Clsid.CaptureGraphBuilder2;
Guid riid = typeof(ICaptureGraphBuilder2).GUID;
captureGraphBuilder = (ICaptureGraphBuilder2) DsBugWO.CreateDsInstance( ref clsid, ref riid );
// sampGrabber, ISampleGrabber to capture frames
comType=Type.GetTypeFromCLSID( Clsid.SampleGrabber, true );
if(comType==null)
throw new NotImplementedException (@"DirectShow SampleGrabber not installed/registered");
comObj=Activator.CreateInstance( comType );
sampGrabber = (ISampleGrabber) comObj; comObj = null;
// Link the CaptureGraphBuilder to the filter graph
hr = captureGraphBuilder.SetFiltergraph( graphBuilder );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Begin set up of SampGrabber <<<<<<----------------------------------------------------
AMMediaType media = new AMMediaType();
media.majorType= MediaType.Video;
media.subType = MediaSubType.RGB24;
media.formatType = FormatType.VideoInfo;
hr = sampGrabber.SetMediaType( media );
//if( hr<0 ) Marshal.ThrowExceptionForHR( hr );
// Finish set up of SampGrabber <<<<<<----------------------------------------------------
// Add the graph to the Running Object Table so it can be
// viewed with GraphEdit
#if DEBUG
DsROT.AddGraphToRot( graphBuilder, out rotCookie );
#endif
// Get the video device and add it to the filter graph
if ( VideoDevice != null )
{
videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( VideoDevice.MonikerString );
hr = graphBuilder.AddFilter( videoDeviceFilter, "Video Capture Device" );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Add SampGrabber Filter <<<<<<----------------------------------------------------
mediaEvt = (IMediaEventEx) graphBuilder;
baseGrabFlt = (IBaseFilter) sampGrabber;
hr = graphBuilder.AddFilter( baseGrabFlt, "DS.NET Grabber" );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
// Get the audio device and add it to the filter graph
if ( AudioDevice != null )
{
audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( AudioDevice.MonikerString );
hr = graphBuilder.AddFilter( audioDeviceFilter, "Audio Capture Device" );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
// Get the video compressor and add it to the filter graph
if ( VideoCompressor != null )
{
videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( VideoCompressor.MonikerString );
hr = graphBuilder.AddFilter( videoCompressorFilter, "Video Compressor" );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
// Get the audio compressor and add it to the filter graph
if ( AudioCompressor != null )
{
audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( AudioCompressor.MonikerString );
hr = graphBuilder.AddFilter( audioCompressorFilter, "Audio Compressor" );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
// Retrieve the stream control interface for the video device
// FindInterface will also add any required filters
//.........这里部分代码省略.........
示例3: Caps
/// <summary>
/// Returns the <see cref="CameraInfo"/> for the given <see cref="DsDevice"/>.
/// </summary>
/// <param name="dev">A <see cref="DsDevice"/> to parse name and capabilities for.</param>
/// <returns>The <see cref="CameraInfo"/> for the given device.</returns>
private CameraInfo Caps(DsDevice dev)
{
var camerainfo = new CameraInfo();
try
{
// Get the graphbuilder object
m_graphBuilder = (IFilterGraph2) new FilterGraph();
// Get the ICaptureGraphBuilder2
capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
// Add the video device
int hr = m_graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
//DsError.ThrowExceptionForHR(hr);
if (hr != 0)
{
return null;
}
hr = capGraph.SetFiltergraph(m_graphBuilder);
DsError.ThrowExceptionForHR(hr);
hr = m_graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
DsError.ThrowExceptionForHR(hr);
object o = null;
DsGuid cat = PinCategory.Capture;
DsGuid type = MediaType.Interleaved;
DsGuid iid = typeof (IAMStreamConfig).GUID;
// Check if Video capture filter is in use
hr = capGraph.RenderStream(cat, MediaType.Video, capFilter, null, null);
if (hr != 0)
{
return null;
}
//hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Interleaved, capFilter, typeof(IAMStreamConfig).GUID, out o);
//if (hr != 0)
//{
hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter,
typeof (IAMStreamConfig).GUID, out o);
DsError.ThrowExceptionForHR(hr);
//}
var videoStreamConfig = o as IAMStreamConfig;
int iCount = 0;
int iSize = 0;
try
{
if (videoStreamConfig != null) videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize);
}
catch (Exception ex)
{
//ErrorLogger.ProcessException(ex, false);
return null;
}
pscc = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof (VideoStreamConfigCaps)));
camerainfo.Name = dev.Name;
camerainfo.DirectshowDevice = dev;
for (int i = 0; i < iCount; i++)
{
VideoStreamConfigCaps scc;
try
{
AMMediaType curMedType;
if (videoStreamConfig != null) hr = videoStreamConfig.GetStreamCaps(i, out curMedType, pscc);
Marshal.ThrowExceptionForHR(hr);
scc = (VideoStreamConfigCaps) Marshal.PtrToStructure(pscc, typeof (VideoStreamConfigCaps));
var CSF = new CamSizeFPS();
CSF.FPS = (int) (10000000/scc.MinFrameInterval);
CSF.Height = scc.InputSize.Height;
CSF.Width = scc.InputSize.Width;
if (!InSizeFpsList(camerainfo.SupportedSizesAndFPS, CSF))
if (ParametersOK(CSF))
camerainfo.SupportedSizesAndFPS.Add(CSF);
}
catch (Exception ex)
{
//ErrorLogger.ProcessException(ex, false);
}
}
}
finally
{
//.........这里部分代码省略.........
示例4: ApplyVideoInput
private void ApplyVideoInput()
{
int iRet;
Dispose();
/*Frame = new byte[(width * height) * PixelSize];
CapturedFrame = new byte[(width * height) * PixelSize];
PreviewFrame = new byte[(width / PreviewDivider * height / PreviewDivider) * PixelSize];*/
if (VideoInput == null)
{
return;
}
//Original Code
GraphBuilder = (IGraphBuilder)new FilterGraph();
CaptureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
MediaControl = (IMediaControl)GraphBuilder;
iRet = CaptureGraphBuilder.SetFiltergraph(GraphBuilder);
if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetFiltergraph");
SampleGrabber = new SampleGrabber() as ISampleGrabber;
iRet = GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render");
if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 1");
SetResolution(width, height);
iRet = GraphBuilder.AddFilter(VideoInput, "Camera");
if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 2");
iRet = SampleGrabber.SetBufferSamples(true);
if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetBufferSamples");
iRet = SampleGrabber.SetOneShot(false);
if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetOneShot");
iRet = SampleGrabber.SetCallback(this, 1);
if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetCallback");
iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, SampleGrabber as IBaseFilter);
if (iRet < 0)
{
Console.WriteLine("TheKing--> Error Found in CaptureGraphBuilder.RenderStream, iRet = " + iRet+", Initialization TryNumber = " + counter);
if(counter == 1)
ApplyVideoInput();
}
//GraphBuilder.Connect()
//iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, null);
//if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 1");
//iRet = CaptureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter);
//if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 2, iRet = " + iRet);
if (UpdateThread != null)
{
UpdateThread.Abort();
}
//UpdateThread = new Thread(UpdateBuffer);
//UpdateThread.Start();
MediaControl.Run();
Marshal.ReleaseComObject(VideoInput);
}
示例5: InitializeFgm
private void InitializeFgm()
{
fgm = new FilgraphManagerClass();
iGB = (IGraphBuilder)fgm;
rotID = FilterGraph.AddToRot(iGB);
iCGB2 = CaptureGraphBuilder2Class.CreateInstance();
iCGB2.SetFiltergraph(iGB);
}
示例6: Init
private void Init()
{
graphBuilder = (IGraphBuilder)new FilterGraph();
//Create the media control for controlling the graph
mediaControl = (IMediaControl)graphBuilder;
mediaEvent = (IMediaEvent)graphBuilder;
volume = 100;
captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
// initialize the Capture Graph Builder
int hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder);
DsError.ThrowExceptionForHR(hr);
DeinterlaceLayoutList = new DeinterlaceList();
DeinterlaceLayoutList.Add(new DeInterlaceAlparyLayout(this));
DeinterlaceLayoutList.Add(new DeInterlaceDscalerLayout(this));
DeinterlaceLayoutList.Add(new DeInterlaceFFDShowLayout(this));
aspectRatio = 4.0f / 3.0f;
}
示例7: Setup
/// <summary>
/// グラフの生成
/// </summary>
/// <param name="output_file">出力ファイル</param>
public virtual void Setup(string output_file)
{
this.Dispose();
try
{
CxDSCameraParam param = this.Param;
// グラフビルダー.
// CoCreateInstance
GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph)));
#region フィルタ追加.
// 画像入力フィルタ.
IBaseFilter capture = CreateVideoCapture(param);
if (capture == null)
throw new System.IO.IOException();
this.GraphBuilder.AddFilter(capture, "CaptureFilter");
IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT);
this.CaptureFilter = capture;
this.CaptureOutPin = capture_out;
// サンプルグラバー.
IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber();
if (grabber == null)
throw new System.IO.IOException();
this.GraphBuilder.AddFilter(grabber, "SampleGrabber");
this.SampleGrabber = (ISampleGrabber)grabber;
#endregion
#region キャプチャビルダー:
{
int hr = 0;
CaptureBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_CaptureGraphBuilder2)));
hr = CaptureBuilder.SetFiltergraph(GraphBuilder);
if (string.IsNullOrEmpty(output_file))
{
// レンダラー.
IBaseFilter renderer = null;
renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer)));
if (renderer == null)
throw new System.IO.IOException();
this.GraphBuilder.AddFilter(renderer, "Renderer");
this.Renderer = renderer;
#if true
// IGraphBuilder.Connect の代わりに ICaptureGraphBuilder2.RenderStream を使用する.
// fig) [capture]-out->-in-[sample grabber]-out->-in-[null render]
hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, renderer);
#else
// ピンの取得.
IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT);
IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT);
IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT);
// ピンの接続.
GraphBuilder.Connect(capture_out, grabber_in);
GraphBuilder.Connect(grabber_out, renderer_in);
// ピンの保管.
//SampleGrabberInPin = grabber_in;
//SampleGrabberOutPin = grabber_out;
//RendererInPin = renderer_in;
#endif
}
else
{
IBaseFilter mux = null;
IFileSinkFilter sync = null;
hr = CaptureBuilder.SetOutputFileName(new Guid(GUID.MEDIASUBTYPE_Avi), output_file, ref mux, ref sync);
hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, mux);
this.Mux = mux;
this.Sync = sync;
}
}
#endregion
#region 保管: フレームサイズ.
VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber);
this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader;
this.SampleGrabberCB.FrameSize = new Size(
System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth),
System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight)
);
#endregion
#region 保管: デバイス名称.
try
{
if (string.IsNullOrEmpty(param.FilterInfo.Name) == false)
{
this.DeviceName = param.FilterInfo.Name;
}
else
//.........这里部分代码省略.........
示例8: SetupGraph
/// <summary>
/// Connects to the property changed events of the camera settings.
/// </summary>
//private void Initialize()
//{
// //Settings.Instance.Camera.OnCameraControlPropertyChanged += OnCameraControlPropertyChanged;
// //Settings.Instance.Camera.OnVideoProcAmpPropertyChanged += OnVideoProcAmpPropertyChanged;
// //Settings.Instance.Camera.OnVideoControlFlagsChanged += OnVideoControlFlagsChanged;
// //stopwatch = new Stopwatch();
//}
/// <summary>
/// Build the capture graph for grabber.
/// </summary>
/// <param name="dev">The index of the new capture device.</param>
/// <param name="frameRate">The framerate to use.</param>
/// <param name="width">The width to use.</param>
/// <param name="height">The height to use.</param>
/// <returns>True, if successful, otherwise false.</returns>
private bool SetupGraph(DsDevice dev, int frameRate, int width, int height)
{
int hr;
fps = frameRate; // Not measured, only to expose FPS externally
cameraControl = null;
capFilter = null;
// Get the graphbuilder object
graphBuilder = (IFilterGraph2)new FilterGraph();
mediaControl = graphBuilder as IMediaControl;
try
{
// Create the ICaptureGraphBuilder2
capGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
// Create the SampleGrabber interface
sampGrabber = (ISampleGrabber)new SampleGrabber();
// Start building the graph
hr = capGraph.SetFiltergraph(graphBuilder);
//if (hr != 0)
// ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " +
// DsError.GetErrorText(hr));
#if DEBUG
this.rotEntry = new DsROTEntry(this.graphBuilder);
#endif
this.capFilter = CreateFilter(
FilterCategory.VideoInputDevice,
dev.Name);
if (this.capFilter != null)
{
hr = graphBuilder.AddFilter(this.capFilter, "Video Source");
DsError.ThrowExceptionForHR(hr);
}
//// Add the video device
//hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
//if (hr != 0)
// ErrorLogger.WriteLine(
// "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " +
// DsError.GetErrorText(hr));
var baseGrabFlt = (IBaseFilter)sampGrabber;
ConfigureSampleGrabber(sampGrabber);
// Add the frame grabber to the graph
hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
//if (hr != 0)
// ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " +
// DsError.GetErrorText(hr));
// turn on the infrared leds ONLY FOR THE GENIUS WEBCAM
/*
if (!defaultMode)
{
m_icc = capFilter as IAMCameraControl;
CameraControlFlags CamFlags = new CameraControlFlags();
int pMin, pMax, pStep, pDefault;
hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags);
m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None);
}
*/
//IBaseFilter smartTee = new SmartTee() as IBaseFilter;
//// Add the smart tee filter to the graph
//hr = this.graphBuilder.AddFilter(smartTee, "Smart Tee");
//Marshal.ThrowExceptionForHR(hr);
// Connect the video source output to the smart tee
//hr = capGraph.RenderStream(null, null, capFilter, null, smartTee);
hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt);
//.........这里部分代码省略.........
示例9: BuildGraph
/// <summary>
/// Build the graph.
/// </summary>
protected virtual void BuildGraph()
{
captureBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
graphBuilder = (IFilterGraph2)new FilterGraph();
captureBuilder.SetFiltergraph(graphBuilder);
mediaControl = graphBuilder as IMediaControl;
}
示例10: SetupGraph
/// <summary> build the capture graph. </summary>
///
//public�ɂ����ShowCapPinDialog�ŎQ�Ƃ��Ďg���邩��
private void SetupGraph(DsDevice dev, string szOutputFileName)
{
int hr;
IBaseFilter capFilter = null;
IBaseFilter asfWriter = null;
//ICaptureGraphBuilder2 capGraph = null;
//�r�f�I�L���v�`�����ҏW�p�̃��\�b�h��������L���v�`���O���t�r���_
// Get the graphbuilder object
m_FilterGraph = (IFilterGraph2)new FilterGraph();
#if DEBUG
m_rot = new DsROTEntry( m_FilterGraph );
#endif
try
{
// Get the ICaptureGraphBuilder2
capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
// Start building the graph
hr = capGraph.SetFiltergraph( m_FilterGraph );
Marshal.ThrowExceptionForHR( hr );
// Add the capture device to the graph
hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter);
Marshal.ThrowExceptionForHR( hr );
asfWriter = ConfigAsf(capGraph, szOutputFileName);
hr = capGraph.RenderStream(null, null, capFilter, null, asfWriter);
Marshal.ThrowExceptionForHR( hr );
m_mediaCtrl = m_FilterGraph as IMediaControl;
}
finally
{
if (capFilter != null)
{
Marshal.ReleaseComObject(capFilter);
capFilter = null;
}
if (asfWriter != null)
{
Marshal.ReleaseComObject(asfWriter);
asfWriter = null;
}
if (capGraph != null)
{
Marshal.ReleaseComObject(capGraph);
capGraph = null;
}
}
}
示例11: createGraph
/// <summary>
/// Create a new filter graph and add filters (devices, compressors,
/// misc), but leave the filters unconnected. Call renderGraph()
/// to connect the filters.
/// </summary>
private void createGraph()
{
DsGuid cat;
DsGuid med;
// Ensure required properties are set
if (VideoDevice == null && AudioDevice == null)
throw new ArgumentException("The video and/or audio device have not been set. Please set one or both to valid capture devices.\n");
// Skip if we are already created
if ((int)_graphState < (int)GraphState.Created)
{
// Garbage collect, ensure that previous filters are released
GC.Collect();
// Make a new filter graph
//_graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));
_graphBuilder = (IGraphBuilder)new FilterGraph();
// Get the Capture Graph Builder
//Guid clsid = Clsid.CaptureGraphBuilder2;
//Guid riid = typeof(ICaptureGraphBuilder2).GUID;
//_captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid);
_captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
// Link the CaptureGraphBuilder to the filter graph
Marshal.ThrowExceptionForHR(_captureGraphBuilder.SetFiltergraph(_graphBuilder));
// Add the graph to the Running Object Table so it can be
// viewed with GraphEdit
#if DEBUG
//DsROT.AddGraphToRot(_graphBuilder, out _rotCookie);
#endif
// Get the video device and add it to the filter graph
if (VideoDevice != null)
{
_videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString);
Marshal.ThrowExceptionForHR(_graphBuilder.AddFilter(_videoDeviceFilter, "Video Capture Device"));
}
// Get the audio device and add it to the filter graph
if (AudioDevice != null)
{
_audioDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(AudioDevice.MonikerString);
Marshal.ThrowExceptionForHR(_graphBuilder.AddFilter(_audioDeviceFilter, "Audio Capture Device"));
}
// Get the video compressor and add it to the filter graph
if (VideoCompressor != null)
{
_videoCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(VideoCompressor.MonikerString);
Marshal.ThrowExceptionForHR(_graphBuilder.AddFilter(_videoCompressorFilter, "Video Compressor"));
}
// Get the audio compressor and add it to the filter graph
if (AudioCompressor != null)
{
_audioCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(AudioCompressor.MonikerString);
Marshal.ThrowExceptionForHR(_graphBuilder.AddFilter(_audioCompressorFilter, "Audio Compressor"));
}
// Retrieve the stream control interface for the video device
// FindInterface will also add any required filters
// (WDM devices in particular may need additional
// upstream filters to function).
// Try looking for an interleaved media type
object o;
cat = DsGuid.FromGuid(PinCategory.Capture);
med = DsGuid.FromGuid(MediaType.Interleaved);
Guid iid = typeof(IAMStreamConfig).GUID;
int hr = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out o);
//int hr = _captureGraphBuilder.FindInterface(ref cat, ref med, _videoDeviceFilter, ref iid, out o);
if (hr != 0)
{
// If not found, try looking for a video media type
med = MediaType.Video;
hr = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out o);
//hr = _captureGraphBuilder.FindInterface(ref cat, ref med, _videoDeviceFilter, ref iid, out o);
if (hr != 0)
o = null;
}
_videoStreamConfig = o as IAMStreamConfig;
// Retrieve the stream control interface for the audio device
o = null;
cat = DsGuid.FromGuid(PinCategory.Capture);
med = DsGuid.FromGuid(MediaType.Audio);
iid = typeof(IAMStreamConfig).GUID;
hr = _captureGraphBuilder.FindInterface(cat, med, _audioDeviceFilter, iid, out o);
// hr = _captureGraphBuilder.FindInterface(ref cat, ref med, _audioDeviceFilter, ref iid, out o);
if (hr != 0)
//.........这里部分代码省略.........
示例12: InitializeCapture
private void InitializeCapture()
{
graphBuilder = (IGraphBuilder)new FilterGraph();
mediaControl = (IMediaControl)graphBuilder;
captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
DsError.ThrowExceptionForHR(hr);
IBaseFilter videoInput = GetVideoInputObject();
if (null != videoInput)
{
SetConfigurations(videoInput);
sampleGrabber = new SampleGrabber() as ISampleGrabber;
hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Render");
DsError.ThrowExceptionForHR(hr);
hr = graphBuilder.AddFilter(videoInput, "Camera");
DsError.ThrowExceptionForHR(hr);
AMMediaType type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.ARGB32, formatType = FormatType.VideoInfo };
hr = sampleGrabber.SetMediaType(type);
DsError.ThrowExceptionForHR(hr);
DsUtils.FreeAMMediaType(type);
sampleGrabber.SetBufferSamples(false);
sampleGrabber.SetOneShot(false);
sampleGrabber.GetConnectedMediaType(new AMMediaType());
sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);
hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, sampleGrabber as IBaseFilter);
DsError.ThrowExceptionForHR(hr);
Marshal.ReleaseComObject(videoInput);
}
}
示例13: InitializeFgm
/// <summary>
/// Creates and initializes the Fgm, retrieves interfaces from it, starts the monitoring
/// thread etc. the first time. Stops it subsequent times.
/// </summary>
private void InitializeFgm()
{
if (fgm == null)
{
fgm = new FilgraphManagerClass();
iGB = (IGraphBuilder)fgm;
iCGB2 = MDShow.CaptureGraphBuilder2Class.CreateInstance();
iCGB2.SetFiltergraph(iGB);
rotID = FilterGraph.AddToRot(iGB);
// Initialize class that will monitor events on the fgm
fgmEventMonitor = new FgmEventMonitor((IMediaEvent)fgm);
fgmEventMonitor.FgmEvent += new FgmEventMonitor.FgmEventHandler(FgmEvent);
}
else
{
fgm.Stop();
}
}
示例14: CreateGraph
public void CreateGraph()
{
try
{
int result = 0;
// フィルタグラフマネージャ作成
graphBuilder = new FilterGraph() as IFilterGraph2;
// キャプチャグラフビルダ作成
captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2;
//captureGraphBuilder(キャプチャグラフビルダ)をgraphBuilder(フィルタグラフマネージャ)に追加.
result = captureGraphBuilder.SetFiltergraph(graphBuilder);
DsError.ThrowExceptionForHR(result);
// ソースフィルタ作成
// キャプチャデバイスをソースフィルタに対応付ける
captureFilter = null;
result = graphBuilder.AddSourceFilterForMoniker(
_capDevice.Mon, null, _capDevice.Name, out captureFilter);
DsError.ThrowExceptionForHR(result);
// サンプルグラバ作成
sampleGrabber = new SampleGrabber() as ISampleGrabber;
// フィルタと関連付ける
IBaseFilter grabFilter = sampleGrabber as IBaseFilter;
// キャプチャするオーディオのフォーマットを設定
AMMediaType amMediaType = new AMMediaType();
amMediaType.majorType = MediaType.Audio;
amMediaType.subType = MediaSubType.PCM;
amMediaType.formatPtr = IntPtr.Zero;
result = sampleGrabber.SetMediaType(amMediaType);
DsError.ThrowExceptionForHR(result);
DsUtils.FreeAMMediaType(amMediaType);
// callback 登録
sampleGrabber.SetOneShot(false);
DsError.ThrowExceptionForHR(result);
result = sampleGrabber.SetBufferSamples(true);
DsError.ThrowExceptionForHR(result);
// キャプチャするフォーマットを取得
object o;
result = captureGraphBuilder.FindInterface(
DsGuid.FromGuid(PinCategory.Capture),
DsGuid.FromGuid(MediaType.Audio),
captureFilter,
typeof(IAMStreamConfig).GUID, out o);
DsError.ThrowExceptionForHR(result);
IAMStreamConfig config = o as IAMStreamConfig;
AMMediaType media;
result = config.GetFormat(out media);
DsError.ThrowExceptionForHR(result);
WaveFormatEx wf = new WaveFormatEx();
Marshal.PtrToStructure(media.formatPtr, wf);
CaptureOption opt = new CaptureOption(wf);
_sampler = new DSAudioSampler(opt);
DsUtils.FreeAMMediaType(media);
Marshal.ReleaseComObject(config);
result = sampleGrabber.SetCallback(_sampler, 1);
DsError.ThrowExceptionForHR(result);
//grabFilter(変換フィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加.
result = graphBuilder.AddFilter(grabFilter, "Audio Grab Filter");
DsError.ThrowExceptionForHR(result);
//キャプチャフィルタをサンプルグラバーフィルタに接続する
result = captureGraphBuilder.RenderStream(
DsGuid.FromGuid(PinCategory.Capture),
DsGuid.FromGuid(MediaType.Audio),
captureFilter, null, grabFilter);
DsError.ThrowExceptionForHR(result);
}
catch (Exception ex)
{
System.Windows.MessageBox.Show(ex.Message);
}
}
示例15: createGraph
// --------------------- Private Methods -----------------------
/// <summary>
/// Create a new filter graph and add filters (devices, compressors,
/// misc), but leave the filters unconnected. Call renderGraph()
/// to connect the filters.
/// </summary>
///
protected void createGraph()
{
Guid cat;
Guid med;
int hr;
Type comType = null;
object comObj = null;
// Ensure required properties are set
if (videoDevice == null && audioDevice == null)
throw new ArgumentException("The video and/or audio device have not been set. Please set one or both to valid capture devices.\n");
// Skip if we are already created
if ((int)graphState < (int)GraphState.Created)
{
// Garbage collect, ensure that previous filters are released
GC.Collect();
// Make a new filter graph
graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));
// Get the Capture Graph Builder
Guid clsid = Clsid.CaptureGraphBuilder2;
Guid riid = typeof(ICaptureGraphBuilder2).GUID;
captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid);
// Link the CaptureGraphBuilder to the filter graph
hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
if (hr < 0) Marshal.ThrowExceptionForHR(hr);
comType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
if (comType == null)
throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!");
comObj = Activator.CreateInstance(comType);
sampGrabber = (ISampleGrabber)comObj; comObj = null;
baseGrabFlt = (IBaseFilter)sampGrabber;
// Add the graph to the Running Object Table so it can be
// viewed with GraphEdit
#if DEBUG
DsROT.AddGraphToRot(graphBuilder, out rotCookie);
#endif
AMMediaType media = new AMMediaType();
// Get the video device and add it to the filter graph
if (VideoDevice != null)
{
videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString);
hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device");
if (hr < 0) Marshal.ThrowExceptionForHR(hr);
// Console.WriteLine("MediaEnineCheck ==> Inside StartVideoCapture.cs before MediaSudType");
media.majorType = MediaType.Video;
media.subType = MediaSubType.RGB24; //Rajib
media.formatType = FormatType.VideoInfo; // ???
hr = sampGrabber.SetMediaType(media);
if (hr < 0)
Marshal.ThrowExceptionForHR(hr);
hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
if (hr < 0) Marshal.ThrowExceptionForHR(hr);
}
// Get the audio device and add it to the filter graph
if (AudioDevice != null)
{
audioDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(AudioDevice.MonikerString);
hr = graphBuilder.AddFilter(audioDeviceFilter, "Audio Capture Device");
if (hr < 0) Marshal.ThrowExceptionForHR(hr);
}
// Get the video compressor and add it to the filter graph
if (VideoCompressor != null)
{
videoCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(VideoCompressor.MonikerString);
hr = graphBuilder.AddFilter(videoCompressorFilter, "Video Compressor");
if (hr < 0) Marshal.ThrowExceptionForHR(hr);
}
// Get the audio compressor and add it to the filter graph
if (AudioCompressor != null)
{
audioCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(AudioCompressor.MonikerString);
hr = graphBuilder.AddFilter(audioCompressorFilter, "Audio Compressor");
if (hr < 0) Marshal.ThrowExceptionForHR(hr);
}
// Retrieve the stream control interface for the video device
// FindInterface will also add any required filters
// (WDM devices in particular may need additional
// upstream filters to function).
//.........这里部分代码省略.........