当前位置: 首页>>代码示例>>C#>>正文


C# ICaptureGraphBuilder2.RenderStream方法代码示例

本文整理汇总了C#中ICaptureGraphBuilder2.RenderStream方法的典型用法代码示例。如果您正苦于以下问题:C# ICaptureGraphBuilder2.RenderStream方法的具体用法?C# ICaptureGraphBuilder2.RenderStream怎么用?C# ICaptureGraphBuilder2.RenderStream使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在ICaptureGraphBuilder2的用法示例。


在下文中一共展示了ICaptureGraphBuilder2.RenderStream方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: Player_Connect


//.........这里部分代码省略.........

                    var mt = new AM_MEDIA_TYPE();
                    mt.majortype = new Guid(GUID.MEDIATYPE_Audio);
                    mt.subtype = new Guid(GUID.MEDIASUBTYPE_PCM);
                    mt.formattype = new Guid(GUID.FORMAT_WaveFormatEx);
                    grabber.SetMediaType(mt);
                    grabber.SetBufferSamples(false);			// サンプルコピー 無効.
                    grabber.SetOneShot(false);					// One Shot 無効.
                    //grabber.SetCallback(AudioGrabberCB, 0);	// 0:SampleCB メソッドを呼び出すよう指示する.
                    grabber.SetCallback(AudioGrabberCB, 1);		// 1:BufferCB メソッドを呼び出すよう指示する.
                }
            }
            #endregion

            #region 映像出力用: レンダラーを生成します.
            {
                VideoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer);
                if (VideoRenderer == null)
                    throw new System.IO.IOException("Failed to create a VideoRenderer.");
                Graph.AddFilter(VideoRenderer, "VideoRenderer");
            }
            #endregion

            #region 音声出力用: レンダラーを生成します.
            {
                AudioRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer);
                if (AudioRenderer == null)
                    throw new System.IO.IOException("Failed to create a AudioRenderer.");
                Graph.AddFilter(AudioRenderer, "AudioRenderer");
            }
            #endregion

            #region フィルタの接続:
            if (filename.EndsWith(".avi", StringComparison.InvariantCultureIgnoreCase))
            {
                #region AVI 形式ファイル用の初期化:
                unsafe
                {
                    HRESULT hr;

                    // AVI 分離器の追加:
                    Splitter = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_AviSplitter);
                    if (Splitter == null)
                        throw new System.IO.IOException("Failed to create a Splitter.");
                    Graph.AddFilter(Splitter, "Splitter");

                    // フィルタの接続: (AVI 分離器)
                    hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, IntPtr.Zero, VideoSource, null, Splitter);
                    if (hr < HRESULT.S_OK)
                        throw new CxDSException(hr);

                    // フィルタの接続: (映像入力)
                    var mediatype_video = new Guid(GUID.MEDIATYPE_Video);
                    hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), Splitter, VideoGrabber, VideoRenderer);
                    if (hr < HRESULT.S_OK)
                        throw new CxDSException(hr);

                    // フィルタの接続: (音声入力) ※ Audioless も有る.
                    try
                    {
                        var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio);
                        hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), Splitter, AudioGrabber, AudioRenderer);
                    }
                    catch (System.Exception ex)
                    {
                        Debug.WriteLine(ex.StackTrace);
                    }
                }
                #endregion
            }
            else if (
                filename.EndsWith(".asf", StringComparison.InvariantCultureIgnoreCase) ||
                filename.EndsWith(".wmv", StringComparison.InvariantCultureIgnoreCase))
            {
                #region WMV 形式ファイル用の初期化:
                unsafe
                {
                    HRESULT hr;

                    // フィルタの接続: (映像入力)
                    var mediatype_video = new Guid(GUID.MEDIATYPE_Video);
                    hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), VideoSource, VideoGrabber, VideoRenderer);
                    if (hr < HRESULT.S_OK)
                        throw new CxDSException(hr);

                    // フィルタの接続: (音声入力)
                    var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio);
                    hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), VideoSource, AudioGrabber, AudioRenderer);
                    if (hr < HRESULT.S_OK)
                        throw new CxDSException(hr);
                }
                #endregion
            }
            #endregion

            // 同期用: サンプルグラバーのイベント登録:
            VideoGrabberCB.Enable = true;
            VideoGrabberCB.Notify += VideoGrabberCB_Notify;
            VideoInfoHeader = Axi.GetVideoInfo((ISampleGrabber)VideoGrabber);
        }
开发者ID:cogorou,项目名称:DSLab,代码行数:101,代码来源:MainForm.cs

示例2: CreateGraph

        public void CreateGraph()
        {
            try
            {
                int result = 0;

                // フィルタグラフマネージャ作成
                graphBuilder = new FilterGraph() as IFilterGraph2;

                // キャプチャグラフビルダ作成
                captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2;

                //captureGraphBuilder(キャプチャグラフビルダ)をgraphBuilder(フィルタグラフマネージャ)に追加.
                result = captureGraphBuilder.SetFiltergraph(graphBuilder);
                DsError.ThrowExceptionForHR(result);

                // ソースフィルタ作成
                // キャプチャデバイスをソースフィルタに対応付ける
                captureFilter = null;
                result = graphBuilder.AddSourceFilterForMoniker(
                    _capDevice.Mon, null, _capDevice.Name, out captureFilter);
                DsError.ThrowExceptionForHR(result);

                // サンプルグラバ作成
                sampleGrabber = new SampleGrabber() as ISampleGrabber;

                // フィルタと関連付ける
                IBaseFilter grabFilter = sampleGrabber as IBaseFilter;

                // キャプチャするオーディオのフォーマットを設定
                AMMediaType amMediaType = new AMMediaType();
                amMediaType.majorType = MediaType.Audio;
                amMediaType.subType = MediaSubType.PCM;
                amMediaType.formatPtr = IntPtr.Zero;
                result = sampleGrabber.SetMediaType(amMediaType);
                DsError.ThrowExceptionForHR(result);
                DsUtils.FreeAMMediaType(amMediaType);

                // callback 登録
                sampleGrabber.SetOneShot(false);
                DsError.ThrowExceptionForHR(result);

                result = sampleGrabber.SetBufferSamples(true);
                DsError.ThrowExceptionForHR(result);

                // キャプチャするフォーマットを取得
                object o;
                result = captureGraphBuilder.FindInterface(
                    DsGuid.FromGuid(PinCategory.Capture),
                    DsGuid.FromGuid(MediaType.Audio),
                    captureFilter,
                    typeof(IAMStreamConfig).GUID, out o);
                DsError.ThrowExceptionForHR(result);
                IAMStreamConfig config = o as IAMStreamConfig;
                AMMediaType media;
                result = config.GetFormat(out media);
                DsError.ThrowExceptionForHR(result);

                WaveFormatEx wf = new WaveFormatEx();
                Marshal.PtrToStructure(media.formatPtr, wf);

                CaptureOption opt = new CaptureOption(wf);
                _sampler = new DSAudioSampler(opt);

                DsUtils.FreeAMMediaType(media);
                Marshal.ReleaseComObject(config);

                result = sampleGrabber.SetCallback(_sampler, 1);
                DsError.ThrowExceptionForHR(result);

                //grabFilter(変換フィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加.
                result = graphBuilder.AddFilter(grabFilter, "Audio Grab Filter");
                DsError.ThrowExceptionForHR(result);

                //キャプチャフィルタをサンプルグラバーフィルタに接続する
                result = captureGraphBuilder.RenderStream(
                    DsGuid.FromGuid(PinCategory.Capture),
                    DsGuid.FromGuid(MediaType.Audio),
                    captureFilter, null, grabFilter);
                DsError.ThrowExceptionForHR(result);
            }
            catch (Exception ex)
            {
                System.Windows.MessageBox.Show(ex.Message);
            }
        }
开发者ID:hirekoke,项目名称:BeatDancer,代码行数:86,代码来源:Capture.cs

示例3: Setup

        /// <summary>
        /// グラフの生成
        /// </summary>
        /// <param name="output_file">出力ファイル</param>
        public virtual void Setup(string output_file)
        {
            this.Dispose();

            try
            {
                CxDSCameraParam param = this.Param;

                // グラフビルダー.
                // CoCreateInstance
                GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph)));

                #region フィルタ追加.
                // 画像入力フィルタ.
                IBaseFilter capture = CreateVideoCapture(param);
                if (capture == null)
                    throw new System.IO.IOException();
                this.GraphBuilder.AddFilter(capture, "CaptureFilter");
                IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                this.CaptureFilter = capture;
                this.CaptureOutPin = capture_out;

                // サンプルグラバー.
                IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber();
                if (grabber == null)
                    throw new System.IO.IOException();
                this.GraphBuilder.AddFilter(grabber, "SampleGrabber");
                this.SampleGrabber = (ISampleGrabber)grabber;
                #endregion

                #region キャプチャビルダー:
                {
                    int hr = 0;
                    CaptureBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_CaptureGraphBuilder2)));
                    hr = CaptureBuilder.SetFiltergraph(GraphBuilder);

                    if (string.IsNullOrEmpty(output_file))
                    {
                        // レンダラー.
                        IBaseFilter renderer = null;
                        renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer)));
                        if (renderer == null)
                            throw new System.IO.IOException();
                        this.GraphBuilder.AddFilter(renderer, "Renderer");
                        this.Renderer = renderer;

            #if true
                        // IGraphBuilder.Connect の代わりに ICaptureGraphBuilder2.RenderStream を使用する.
                        // fig) [capture]-out->-in-[sample grabber]-out->-in-[null render]
                        hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, renderer);

            #else
                        // ピンの取得.
                        IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT);
                        IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                        IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT);

                        // ピンの接続.
                        GraphBuilder.Connect(capture_out, grabber_in);
                        GraphBuilder.Connect(grabber_out, renderer_in);

                        // ピンの保管.
                        //SampleGrabberInPin = grabber_in;
                        //SampleGrabberOutPin = grabber_out;
                        //RendererInPin = renderer_in;
            #endif
                    }
                    else
                    {
                        IBaseFilter mux = null;
                        IFileSinkFilter sync = null;
                        hr = CaptureBuilder.SetOutputFileName(new Guid(GUID.MEDIASUBTYPE_Avi), output_file, ref mux, ref sync);
                        hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, mux);
                        this.Mux = mux;
                        this.Sync = sync;
                    }
                }
                #endregion

                #region 保管: フレームサイズ.
                VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber);
                this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader;
                this.SampleGrabberCB.FrameSize = new Size(
                    System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth),
                    System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight)
                    );
                #endregion

                #region 保管: デバイス名称.
                try
                {
                    if (string.IsNullOrEmpty(param.FilterInfo.Name) == false)
                    {
                        this.DeviceName = param.FilterInfo.Name;
                    }
                    else
//.........这里部分代码省略.........
开发者ID:cogorou,项目名称:DSLab,代码行数:101,代码来源:CxDSCamera.cs

示例4: Camera_Connect

        /// <summary>
        /// カメラの接続
        /// </summary>
        /// <param name="filterInfo"></param>
        /// <param name="pinno"></param>
        /// <param name="frameSize"></param>
        private void Camera_Connect(CxFilterInfo filterInfo, int pinno, Size frameSize)
        {
            #region グラフビルダーの生成:
            {
                Graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph);
                if (Graph == null)
                    throw new System.IO.IOException("Failed to create a GraphBuilder.");

                Builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2);
                if (Builder == null)
                    throw new System.IO.IOException("Failed to create a GraphBuilder.");
                Builder.SetFiltergraph(Graph);
            }
            #endregion

            #region 映像入力用: ソースフィルタを生成します.
            {
                VideoSource = Axi.CreateFilter(GUID.CLSID_VideoInputDeviceCategory, filterInfo.CLSID, filterInfo.Index);
                if (VideoSource == null)
                    throw new System.IO.IOException("Failed to create a VideoSource.");
                Graph.AddFilter(VideoSource, "VideoSource");

                // フレームサイズを設定します.
                // ※注) この操作は、ピンを接続する前に行う必要があります.
                IPin pin = Axi.FindPin(VideoSource, pinno, PIN_DIRECTION.PINDIR_OUTPUT);
                Axi.SetFormatSize(pin, frameSize.Width, frameSize.Height);
            }
            #endregion

            #region 映像捕獲用: サンプルグラバーを生成します.
            {
                VideoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber);
                if (VideoGrabber == null)
                    throw new System.IO.IOException("Failed to create a VideoGrabber.");
                Graph.AddFilter(VideoGrabber, "VideoGrabber");

                // サンプルグラバフィルタの入力形式設定.
                // SetMediaType で必要なメディア タイプを指定します。
                //   http://msdn.microsoft.com/ja-jp/library/cc369546.aspx
                // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。
                // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。
                // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。
                // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx
                // subtype  : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx
                {
                    var grabber = (ISampleGrabber)VideoGrabber;

                    var mt = new AM_MEDIA_TYPE();
                    mt.majortype = new Guid(GUID.MEDIATYPE_Video);
                    mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24);
                    mt.formattype = new Guid(GUID.FORMAT_VideoInfo);
                    grabber.SetMediaType(mt);
                    grabber.SetBufferSamples(false);			// サンプルコピー 無効.
                    grabber.SetOneShot(false);					// One Shot 無効.
                    //grabber.SetCallback(VideoGrabberCB, 0);	// 0:SampleCB メソッドを呼び出すよう指示する.
                    grabber.SetCallback(VideoGrabberCB, 1);		// 1:BufferCB メソッドを呼び出すよう指示する.
                }
            }
            #endregion

            #region 映像出力用: レンダラーを生成します.
            {
                VideoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer);
                if (VideoRenderer == null)
                    throw new System.IO.IOException("Failed to create a VideoRenderer.");
                Graph.AddFilter(VideoRenderer, "VideoRenderer");
            }
            #endregion

            #region フィルタの接続:
            unsafe
            {
                var mediatype = new Guid(GUID.MEDIATYPE_Video);
                var hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype), VideoSource, VideoGrabber, VideoRenderer);
                if (hr < HRESULT.S_OK)
                    throw new CxDSException(hr);
            }
            #endregion

            // 同期用: サンプルグラバーのイベント登録:
            VideoGrabberCB.Enable = true;
            VideoGrabberCB.Notify += VideoGrabberCB_Notify;
            VideoInfoHeader = Axi.GetVideoInfo((ISampleGrabber)VideoGrabber);

            // カメラ制御インターフェースの抽出.
            CameraControl = Axi.GetInterface<IAMCameraControl>(this.Graph);
        }
开发者ID:cogorou,项目名称:DSLab,代码行数:93,代码来源:MainForm.cs

示例5: Caps

        /// <summary>
        /// Returns the <see cref="CameraInfo"/> for the given <see cref="DsDevice"/>.
        /// </summary>
        /// <param name="dev">A <see cref="DsDevice"/> to parse name and capabilities for.</param>
        /// <returns>The <see cref="CameraInfo"/> for the given device.</returns>
        private CameraInfo Caps(DsDevice dev)
        {
            var camerainfo = new CameraInfo();

            try
            {
                // Get the graphbuilder object
                m_graphBuilder = (IFilterGraph2) new FilterGraph();

                // Get the ICaptureGraphBuilder2
                capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Add the video device
                int hr = m_graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
                //DsError.ThrowExceptionForHR(hr);

                if (hr != 0)
                {
                    return null;
                }

                hr = capGraph.SetFiltergraph(m_graphBuilder);
                DsError.ThrowExceptionForHR(hr);

                hr = m_graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                DsError.ThrowExceptionForHR(hr);

                object o = null;
                DsGuid cat = PinCategory.Capture;
                DsGuid type = MediaType.Interleaved;
                DsGuid iid = typeof (IAMStreamConfig).GUID;

                // Check if Video capture filter is in use
                hr = capGraph.RenderStream(cat, MediaType.Video, capFilter, null, null);
                if (hr != 0)
                {
                    return null;
                }

                //hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Interleaved, capFilter, typeof(IAMStreamConfig).GUID, out o);
                //if (hr != 0)
                //{
                hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter,
                                            typeof (IAMStreamConfig).GUID, out o);
                DsError.ThrowExceptionForHR(hr);
                //}

                var videoStreamConfig = o as IAMStreamConfig;

                int iCount = 0;
                int iSize = 0;

                try
                {
                    if (videoStreamConfig != null) videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize);
                }
                catch (Exception ex)
                {
                    //ErrorLogger.ProcessException(ex, false);
                    return null;
                }

                pscc = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof (VideoStreamConfigCaps)));

                camerainfo.Name = dev.Name;
                camerainfo.DirectshowDevice = dev;

                for (int i = 0; i < iCount; i++)
                {
                    VideoStreamConfigCaps scc;

                    try
                    {
                        AMMediaType curMedType;
                        if (videoStreamConfig != null) hr = videoStreamConfig.GetStreamCaps(i, out curMedType, pscc);
                        Marshal.ThrowExceptionForHR(hr);
                        scc = (VideoStreamConfigCaps) Marshal.PtrToStructure(pscc, typeof (VideoStreamConfigCaps));

                        var CSF = new CamSizeFPS();
                        CSF.FPS = (int) (10000000/scc.MinFrameInterval);
                        CSF.Height = scc.InputSize.Height;
                        CSF.Width = scc.InputSize.Width;

                        if (!InSizeFpsList(camerainfo.SupportedSizesAndFPS, CSF))
                            if (ParametersOK(CSF))
                                camerainfo.SupportedSizesAndFPS.Add(CSF);
                    }
                    catch (Exception ex)
                    {
                        //ErrorLogger.ProcessException(ex, false);
                    }
                }
            }
            finally
            {
//.........这里部分代码省略.........
开发者ID:jonbyte,项目名称:EyeSpark,代码行数:101,代码来源:DirectShowDevices.cs

示例6: RenderHelper

        protected void RenderHelper(ICaptureGraphBuilder2 graphBuilder, ISampleGrabberCB callback, string typeName,
                                    IPin pin,
                                    IBaseFilter compressor, IBaseFilter destination)
        {
            if (graphBuilder == null) throw new ArgumentNullException(GraphBuilderParameterName);
            if (pin == null) throw new ArgumentNullException(PinParameterName);
            if (destination == null) throw new ArgumentNullException(DestinationParameterName);

            int hr;
            IBaseFilter ibfSampleGrabber = null;

            try
            {
                // If no callback was provided, don't create a samplegrabber
                if (callback != null)
                {
                    var isg = (ISampleGrabber) new SampleGrabber();
                    ibfSampleGrabber = (IBaseFilter) isg;
                    _cleanup.Add(ibfSampleGrabber);

                    hr = isg.SetCallback(callback, 1);
                    DESError.ThrowExceptionForHR(hr);

                    hr = Graph.AddFilter(ibfSampleGrabber, typeName + " sample grabber");
                    DESError.ThrowExceptionForHR(hr);
                }

                // If a compressor was provided, add it to the graph and connect it up
                if (compressor != null)
                {
                    // Connect the pin.
                    hr = Graph.AddFilter(compressor, typeName + " Compressor");
                    DESError.ThrowExceptionForHR(hr);

                    FilterGraphTools.ConnectFilters(Graph, pin, ibfSampleGrabber, true);

                    FilterGraphTools.ConnectFilters(Graph, ibfSampleGrabber, compressor, true);

                    FilterGraphTools.ConnectFilters(Graph, compressor, destination, true);
                }
                else
                {
                    // Just connect the SampleGrabber (if any)
                    hr = graphBuilder.RenderStream(null, null, pin, ibfSampleGrabber, destination);
                    DESError.ThrowExceptionForHR(hr);
                }
            }
            finally
            {
                if (ibfSampleGrabber != null)
                {
                    Marshal.ReleaseComObject(ibfSampleGrabber);
                }
            }
        }
开发者ID:naik899,项目名称:VideoMaker,代码行数:55,代码来源:AbstractRenderer.cs

示例7: SetupGraph

    /// <summary>
    /// Connects to the property changed events of the camera settings.
    /// </summary>
    //private void Initialize()
    //{
    //    //Settings.Instance.Camera.OnCameraControlPropertyChanged += OnCameraControlPropertyChanged;
    //    //Settings.Instance.Camera.OnVideoProcAmpPropertyChanged += OnVideoProcAmpPropertyChanged;
    //    //Settings.Instance.Camera.OnVideoControlFlagsChanged += OnVideoControlFlagsChanged;

    //    //stopwatch = new Stopwatch();
    //}

    /// <summary>
    /// Build the capture graph for grabber. 
    /// </summary>
    /// <param name="dev">The index of the new capture device.</param>
    /// <param name="frameRate">The framerate to use.</param>
    /// <param name="width">The width to use.</param>
    /// <param name="height">The height to use.</param>
    /// <returns>True, if successful, otherwise false.</returns>
    private bool SetupGraph(DsDevice dev, int frameRate, int width, int height)
    {
      int hr;
      fps = frameRate; // Not measured, only to expose FPS externally 
      cameraControl = null;
      capFilter = null;

      // Get the graphbuilder object
      graphBuilder = (IFilterGraph2)new FilterGraph();
      mediaControl = graphBuilder as IMediaControl;

      try
      {
        // Create the ICaptureGraphBuilder2
        capGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

        // Create the SampleGrabber interface
        sampGrabber = (ISampleGrabber)new SampleGrabber();

        // Start building the graph
        hr = capGraph.SetFiltergraph(graphBuilder);
        //if (hr != 0)
        //    ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " +
        //                          DsError.GetErrorText(hr));

#if DEBUG
        this.rotEntry = new DsROTEntry(this.graphBuilder);
#endif

        this.capFilter = CreateFilter(
       FilterCategory.VideoInputDevice,
       dev.Name);
        if (this.capFilter != null)
        {
          hr = graphBuilder.AddFilter(this.capFilter, "Video Source");
          DsError.ThrowExceptionForHR(hr);
        }

        //// Add the video device
        //hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
        //if (hr != 0)
        //    ErrorLogger.WriteLine(
        //        "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " +
        //        DsError.GetErrorText(hr));

        var baseGrabFlt = (IBaseFilter)sampGrabber;

        ConfigureSampleGrabber(sampGrabber);

        // Add the frame grabber to the graph
        hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");

        //if (hr != 0)
        //    ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " +
        //                          DsError.GetErrorText(hr));

        // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM
        /*
        if (!defaultMode)
        {
            m_icc = capFilter as IAMCameraControl;
            CameraControlFlags CamFlags = new CameraControlFlags();
            int pMin, pMax, pStep, pDefault;

            hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags);
            m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None);
        }
        */


        //IBaseFilter smartTee = new SmartTee() as IBaseFilter;

        //// Add the smart tee filter to the graph
        //hr = this.graphBuilder.AddFilter(smartTee, "Smart Tee");
        //Marshal.ThrowExceptionForHR(hr);

        // Connect the video source output to the smart tee
        //hr = capGraph.RenderStream(null, null, capFilter, null, smartTee);

        hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt);
//.........这里部分代码省略.........
开发者ID:DeSciL,项目名称:Ogama,代码行数:101,代码来源:DirectShowCamera.cs

示例8: AddGSSF

        private void AddGSSF(ImageHandler ih, ICaptureGraphBuilder2 icgb2, out IPin pPin)
        {
            int hr;

            // Our data source.  An error here means the GSSF2 sample hasn't
            // been registered.
            IBaseFilter ipsb = (IBaseFilter)new GenericSampleSourceFilter2();

            try
            {
                // Get the pin from the filter so we can configure it
                pPin = DsFindPin.ByDirection(ipsb, PinDirection.Output, 0);

                // Configure the pin using the provided BitmapInfo
                ConfigurePusher((IGenericSampleConfig2)pPin, ih);

                // Add the filter to the graph
                hr = m_pGraph.AddFilter(ipsb, "GenericSampleSourceFilter");
                Marshal.ThrowExceptionForHR(hr);

                // Connect the filters together, use the default renderer
                hr = icgb2.RenderStream(null, null, pPin, null, m_pEVR);
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                Marshal.ReleaseComObject(ipsb);
            }
        }
开发者ID:adambyram,项目名称:pimaker,代码行数:29,代码来源:DShowPlayer.cs

示例9: CreateGraph


//.........这里部分代码省略.........
          // Get the video compressor and add it to the filter graph
          // Create the filter for the selected video compressor
          this.AudioCompressorFilter = DirectShowUtils.CreateFilter(
            FilterCategory.AudioCompressorCategory,
            this.captureDeviceProperties.AudioCompressor);
          if (this.AudioCompressorFilter != null)
          {
            hr = this.graphBuilder.AddFilter(this.AudioCompressorFilter, "Audio Compressor");
            DsError.ThrowExceptionForHR(hr);
          }

          // Ensure required properties set
          if (this.captureDeviceProperties.Filename == null || this.captureDeviceProperties.Filename == string.Empty)
          {
            throw new ArgumentException("The Filename property has not been set to a file.\n");
          }

          // Render the file writer portion of graph (mux -> file)
          hr = this.captureGraphBuilder.SetOutputFileName(
            MediaSubType.Avi,
            this.captureDeviceProperties.Filename,
            out this.muxFilter,
            out this.fileWriterFilter);
          DsError.ThrowExceptionForHR(hr);

          // Render video (video -> mux)
          if (this.VideoDeviceFilter != null &&
            ((this.captureDeviceProperties.CaptureMode & CaptureMode.VideoCapture) == CaptureMode.VideoCapture))
          {
            // Try interleaved first, because if the device supports it,
            // it's the only way to get audio as well as video
            cat = PinCategory.Capture;
            med = MediaType.Interleaved;
            hr = captureGraphBuilder.RenderStream(
              cat,
              med,
              this.VideoDeviceFilter,
              this.VideoCompressorFilter,
              muxFilter);

            // If interleaved fails try video
            if (hr < 0)
            {
              med = MediaType.Video;
              hr = captureGraphBuilder.RenderStream(
                cat,
                med,
                this.VideoDeviceFilter,
                this.VideoCompressorFilter,
                muxFilter);

              if (hr == -2147220969)
              {
                throw new ArgumentException("Video device is already in use");
              }

              DsError.ThrowExceptionForHR(hr);
            }

            hr = captureGraphBuilder.RenderStream(
              null,
              null,
              this.VideoDeviceFilter,
              this.VideoCompressorFilter,
              muxFilter);
          }
开发者ID:DeSciL,项目名称:Ogama,代码行数:67,代码来源:DXCapture.cs

示例10: SetupGraph

        /// <summary> build the capture graph. </summary>
        /// 
        //public�ɂ����ShowCapPinDialog�ŎQ�Ƃ��Ďg���邩��
        private void SetupGraph(DsDevice dev, string szOutputFileName)
        {
            int hr;

            IBaseFilter capFilter = null;
            IBaseFilter asfWriter = null;
            //ICaptureGraphBuilder2 capGraph = null;
            //�r�f�I�L���v�`�����ҏW�p�̃��\�b�h��������L���v�`���O���t�r���_

            // Get the graphbuilder object
            m_FilterGraph = (IFilterGraph2)new FilterGraph();

            #if DEBUG
            m_rot = new DsROTEntry( m_FilterGraph );
            #endif

            try
            {
                // Get the ICaptureGraphBuilder2
                capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Start building the graph
                hr = capGraph.SetFiltergraph( m_FilterGraph );
                Marshal.ThrowExceptionForHR( hr );

                // Add the capture device to the graph
                hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter);
                Marshal.ThrowExceptionForHR( hr );

                asfWriter = ConfigAsf(capGraph, szOutputFileName);

                hr = capGraph.RenderStream(null, null, capFilter, null, asfWriter);
                Marshal.ThrowExceptionForHR( hr );

                m_mediaCtrl = m_FilterGraph as IMediaControl;

            }
            finally
            {
                if (capFilter != null)
                {
                    Marshal.ReleaseComObject(capFilter);
                    capFilter = null;
                }
                if (asfWriter != null)
                {
                    Marshal.ReleaseComObject(asfWriter);
                    asfWriter = null;
                }
                if (capGraph != null)
                {
                    Marshal.ReleaseComObject(capGraph);
                    capGraph = null;
                }
            }
        }
开发者ID:opu-yokotalab,项目名称:learn-capture,代码行数:59,代码来源:Capture.cs

示例11: InitializeCapture

        private void InitializeCapture()
        {
            graphBuilder = (IGraphBuilder)new FilterGraph();
            mediaControl = (IMediaControl)graphBuilder;

            captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
            hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
            DsError.ThrowExceptionForHR(hr);

            IBaseFilter videoInput = GetVideoInputObject();
            if (null != videoInput)
            {
                SetConfigurations(videoInput);

                sampleGrabber = new SampleGrabber() as ISampleGrabber;
                hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Render");
                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.AddFilter(videoInput, "Camera");
                DsError.ThrowExceptionForHR(hr);

                AMMediaType type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.ARGB32, formatType = FormatType.VideoInfo };
                hr = sampleGrabber.SetMediaType(type);
                DsError.ThrowExceptionForHR(hr);
                DsUtils.FreeAMMediaType(type);

                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.GetConnectedMediaType(new AMMediaType());

                sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);
                hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, sampleGrabber as IBaseFilter);
                DsError.ThrowExceptionForHR(hr);

                Marshal.ReleaseComObject(videoInput);
            }
        }
开发者ID:flair2005,项目名称:CameraPositioner,代码行数:37,代码来源:VideoCaptureComponent.cs

示例12: RenderHelper

    /// <summary>
    /// Common routine used by RenderTo*  
    /// </summary>
    /// <param name="icgb">ICaptureGraphBuilder2 to use</param>
    /// <param name="pCallback">Callback to use (or null)</param>
    /// <param name="sType">string to use in creating filter graph object descriptions</param>
    /// <param name="pPin">Pin to connect from</param>
    /// <param name="ibfCompressor">Compressor to use, or null for none</param>
    /// <param name="pOutput">Endpoint (renderer or file writer) to connect to</param>
    private void RenderHelper(ICaptureGraphBuilder2 icgb, AVCallback pCallback, string sType, IPin pPin, IBaseFilter ibfCompressor, IBaseFilter pOutput)
    {
      int hr;
      IBaseFilter ibfSampleGrabber = null;

      try
      {
        // If no callback was provided, don't create a samplegrabber
        if (pCallback != null)
        {
          ISampleGrabber isg = (ISampleGrabber)new SampleGrabber();
          ibfSampleGrabber = (IBaseFilter)isg;

          hr = isg.SetCallback(pCallback, 1);
          DESError.ThrowExceptionForHR(hr);

          hr = m_pGraph.AddFilter(ibfSampleGrabber, sType + " sample grabber");
          DESError.ThrowExceptionForHR(hr);
        }

        // If a compressor was provided, add it to the graph and connect it up
        if (ibfCompressor != null)
        {
          // Connect the pin.
          hr = m_pGraph.AddFilter(ibfCompressor, sType + " Compressor");
          DESError.ThrowExceptionForHR(hr);

          hr = icgb.RenderStream(null, null, pPin, ibfSampleGrabber, ibfCompressor);
          DESError.ThrowExceptionForHR(hr);

          // Connect the pin.
          hr = icgb.RenderStream(null, null, ibfCompressor, null, pOutput);
          DESError.ThrowExceptionForHR(hr);
        }
        else
        {
          // Just connect the SampleGrabber (if any)
          hr = icgb.RenderStream(null, null, pPin, ibfSampleGrabber, pOutput);
          DESError.ThrowExceptionForHR(hr);
        }
      }
      finally
      {
        if (ibfSampleGrabber != null)
        {
          Marshal.ReleaseComObject(ibfSampleGrabber);
        }
      }
    }
开发者ID:DeSciL,项目名称:Ogama,代码行数:58,代码来源:DESCombine.cs

示例13: ApplyVideoInput

        private void ApplyVideoInput()
        {
            int iRet;
            Dispose();

            /*Frame = new byte[(width * height) * PixelSize];
            CapturedFrame = new byte[(width * height) * PixelSize];
            PreviewFrame = new byte[(width / PreviewDivider * height / PreviewDivider) * PixelSize];*/

            if (VideoInput == null)
            {
                return;
            }

            //Original Code
            GraphBuilder = (IGraphBuilder)new FilterGraph();
            CaptureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
            MediaControl = (IMediaControl)GraphBuilder;
            iRet = CaptureGraphBuilder.SetFiltergraph(GraphBuilder);
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetFiltergraph");

            SampleGrabber = new SampleGrabber() as ISampleGrabber;
            iRet = GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render");
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 1");

            SetResolution(width, height);
            iRet = GraphBuilder.AddFilter(VideoInput, "Camera");

            if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 2");
            iRet = SampleGrabber.SetBufferSamples(true);
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetBufferSamples");
            iRet = SampleGrabber.SetOneShot(false);
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetOneShot");

            iRet = SampleGrabber.SetCallback(this, 1);

            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetCallback");

            iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, SampleGrabber as IBaseFilter);
            if (iRet < 0)
            {
                Console.WriteLine("TheKing--> Error Found in  CaptureGraphBuilder.RenderStream, iRet = " + iRet+", Initialization TryNumber = " + counter);
                if(counter == 1)
                    ApplyVideoInput();
            }

            //GraphBuilder.Connect()
            //iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, null);
            //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 1");

            //iRet = CaptureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter);
            //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 2, iRet = " + iRet);

            if (UpdateThread != null)
            {
                UpdateThread.Abort();
            }

            //UpdateThread = new Thread(UpdateBuffer);
            //UpdateThread.Start();

            MediaControl.Run();

            Marshal.ReleaseComObject(VideoInput);
        }
开发者ID:RajibTheKing,项目名称:DesktopClient,代码行数:65,代码来源:DirectShowDevice.cs


注:本文中的ICaptureGraphBuilder2.RenderStream方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。