本文整理汇总了C#中IGraphBuilder.Connect方法的典型用法代码示例。如果您正苦于以下问题:C# IGraphBuilder.Connect方法的具体用法?C# IGraphBuilder.Connect怎么用?C# IGraphBuilder.Connect使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IGraphBuilder
的用法示例。
在下文中一共展示了IGraphBuilder.Connect方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Transcode
public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard)
{
try
{
if (!Supports(format)) return false;
string ext = System.IO.Path.GetExtension(info.file);
if (ext.ToLowerInvariant() != ".dvr-ms" && ext.ToLowerInvariant() != ".sbe")
{
Log.Info("DVRMS2WMV: wrong file format");
return false;
}
Log.Info("DVRMS2WMV: create graph");
graphBuilder = (IGraphBuilder)new FilterGraph();
_rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);
Log.Info("DVRMS2WMV: add streambuffersource");
bufferSource = (IStreamBufferSource)new StreamBufferSource();
IBaseFilter filter = (IBaseFilter)bufferSource;
graphBuilder.AddFilter(filter, "SBE SOURCE");
Log.Info("DVRMS2WMV: load file:{0}", info.file);
IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource;
int hr = fileSource.Load(info.file, null);
//add mpeg2 audio/video codecs
string strVideoCodec = "";
string strAudioCodec = "";
using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
{
strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "MPC - MPEG-2 Video Decoder (Gabest)");
strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "MPC - MPA Decoder Filter");
}
Log.Info("DVRMS2WMV: add mpeg2 video codec:{0}", strVideoCodec);
Mpeg2VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
if (hr != 0)
{
Log.Error("DVRMS2WMV:FAILED:Add mpeg2 video to filtergraph :0x{0:X}", hr);
Cleanup();
return false;
}
Log.Info("DVRMS2WMV: add mpeg2 audio codec:{0}", strAudioCodec);
Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
if (Mpeg2AudioCodec == null)
{
Log.Error("DVRMS2WMV:FAILED:unable to add mpeg2 audio codec");
Cleanup();
return false;
}
Log.Info("DVRMS2WMV: connect streambufer source->mpeg audio/video decoders");
//connect output #0 of streambuffer source->mpeg2 audio codec pin 1
//connect output #1 of streambuffer source->mpeg2 video codec pin 1
IPin pinOut0, pinOut1;
IPin pinIn0, pinIn1;
pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio
pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video
if (pinOut0 == null || pinOut1 == null)
{
Log.Error("DVRMS2WMV:FAILED:unable to get pins of source");
Cleanup();
return false;
}
pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video
pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio
if (pinIn0 == null || pinIn1 == null)
{
Log.Error("DVRMS2WMV:FAILED:unable to get pins of mpeg2 video/audio codec");
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut0, pinIn1);
if (hr != 0)
{
Log.Error("DVRMS2WMV:FAILED:unable to connect audio pins :0x{0:X}", hr);
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut1, pinIn0);
if (hr != 0)
{
Log.Error("DVRMS2WMV:FAILED:unable to connect video pins :0x{0:X}", hr);
Cleanup();
return false;
}
string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv");
if (!AddWmAsfWriter(outputFilename, quality, standard)) return false;
Log.Info("DVRMS2WMV: start pre-run");
mediaControl = graphBuilder as IMediaControl;
mediaSeeking = bufferSource as IStreamBufferMediaSeeking;
mediaEvt = graphBuilder as IMediaEventEx;
mediaPos = graphBuilder as IMediaPosition;
//get file duration
long lTime = 5 * 60 * 60;
lTime *= 10000000;
long pStop = 0;
hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
AMSeekingSeekingFlags.NoPositioning);
if (hr == 0)
{
long lStreamPos;
mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
m_dDuration = lStreamPos;
lTime = 0;
mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
//.........这里部分代码省略.........
示例2: ConnectPin
/// <summary>
/// helper function to connect 2 filters
/// </summary>
/// <param name="graphBuilder">graph builder interface</param>
/// <param name="pinSource">souce pin</param>
/// <param name="filterDest">destination filter</param>
/// <param name="destPinIndex">input pin index</param>
public static bool ConnectPin(IGraphBuilder graphBuilder, IPin pinSource, IBaseFilter filterDest, int destPinIndex)
{
IPin pin;
pinSource.ConnectedTo(out pin);
if (pin != null)
{
Release.ComObject("Connect Pin", pin);
return false;
}
IPin pinDest = DsFindPin.ByDirection(filterDest, PinDirection.Input, destPinIndex);
if (pinDest == null)
return false;
int hr = graphBuilder.Connect(pinSource, pinDest);
if (hr != 0)
{
Release.ComObject("Connect Pin", pinDest);
return false;
}
Release.ComObject("Connect Pin", pinDest);
return true;
}
示例3: SetupGraph
/// <summary>
/// build the capture graph for grabber.
/// </summary>
/// <param name="strCapture">The STR capture.</param>
/// <param name="strCompressor">The STR compressor.</param>
/// <param name="strFileName">Name of the STR file.</param>
/// <param name="iFrameRate">The i frame rate.</param>
/// <param name="iWidth">Width of the i.</param>
/// <param name="iHeight">Height of the i.</param>
/// <param name="owner">The owner.</param>
/// <param name="record">if set to <c>true</c> [record].</param>
private void SetupGraph(string strCapture, string strCompressor, string strFileName, int iFrameRate, int iWidth, int iHeight, IntPtr owner, bool record)
{
ICaptureGraphBuilder2 captureGraphBuilder = null;
ISampleGrabber sampGrabber = null;
IBaseFilter theIPinTee = null;
IBaseFilter mux = null;
IFileSinkFilter sink = null;
IBaseFilter captureDevice = null;
IBaseFilter captureCompressor = null;
IBaseFilter theRenderer = null;
int hr = 0;
try
{
//Create the filter for the selected video input
captureDevice = CreateFilter(FilterCategory.VideoInputDevice, strCapture);
//Create the filter for the selected video compressor
captureCompressor = CreateFilter(FilterCategory.VideoCompressorCategory, strCompressor);
//Create the Graph
_graphBuilder = (IGraphBuilder)new FilterGraph();
//Create the Capture Graph Builder
captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
// Attach the filter graph to the capture graph
hr = captureGraphBuilder.SetFiltergraph(this._graphBuilder);
checkHR(hr, "Error attaching filter graph to capture graph");
//Add the Video input device to the graph
hr = _graphBuilder.AddFilter(captureDevice, "QUAVS input filter");
checkHR(hr, "Error attaching video input");
//setup cature device
SetConfigParms(captureGraphBuilder, captureDevice, iFrameRate, iWidth, iHeight);
//Add a sample grabber
sampGrabber = (ISampleGrabber)new SampleGrabber();
ConfigureSampleGrabber(sampGrabber);
hr = _graphBuilder.AddFilter((IBaseFilter)sampGrabber, "QUAVS SampleGrabber");
checkHR(hr, "Error adding sample grabber");
//connect capture device to SampleGrabber
hr = _graphBuilder.Connect(GetPin(captureDevice, "Capture"), GetPin((IBaseFilter)sampGrabber, "Input"));
checkHR(hr, "Error attaching sample grabber to capture pin");
//Add Ininite Pin Tee
theIPinTee = (IBaseFilter)new InfTee();
hr = _graphBuilder.AddFilter(theIPinTee, "QUAVS Pin Tee");
checkHR(hr, "Error adding infinite tee pin");
//connect capture SampleGrabber to IPinTee
hr = _graphBuilder.Connect(GetPin((IBaseFilter)sampGrabber, "Output"), GetPin(theIPinTee, "Input"));
checkHR(hr, "Error adding SampleGrabber");
if (record)
{
//Add the Video compressor filter to the graph
hr = _graphBuilder.AddFilter(captureCompressor, "QUAVS compressor filter");
checkHR(hr, "Error adding compressor filter");
//connect capture IPinTee output1 to compressor
hr = _graphBuilder.Connect(GetPin(theIPinTee, "Output1"), GetPin(captureCompressor, "Input"));
checkHR(hr, "Error adding TO DO");
//Create the file writer part of the graph. SetOutputFileName does this for us, and returns the mux and sink
hr = captureGraphBuilder.SetOutputFileName(MediaSubType.Avi, strFileName, out mux, out sink);
checkHR(hr, "Error adding mux filter or setting output file name");
//connect compressor to mux output
hr = _graphBuilder.Connect(GetPin(captureCompressor, "Output"), GetPin(mux, "Input 01"));
checkHR(hr, "Error connecting the compressor to mux");
// Get the default video renderer
theRenderer = new VideoRendererDefault() as IBaseFilter;
hr = _graphBuilder.AddFilter(theRenderer, "Renderer");
checkHR(hr, "Error adding screen renderer");
//connect capture TO DO
hr = _graphBuilder.Connect(GetPin(theIPinTee, "Output2"), GetPin(theRenderer, "VMR Input0"));
checkHR(hr, "Error connecting screen renderer");
}
else
{
// Get the default video renderer
theRenderer = new VideoRendererDefault() as IBaseFilter;
hr = _graphBuilder.AddFilter(theRenderer, "Renderer");
checkHR(hr, "Error adding screen renderer");
//.........这里部分代码省略.........
示例4: Setup
/// <summary>
/// グラフの生成
/// </summary>
/// <param name="output_file">出力ファイル</param>
public virtual void Setup(string output_file)
{
this.Dispose();
try
{
CxDSCameraParam param = this.Param;
// グラフビルダー.
// CoCreateInstance
GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph)));
#region フィルタ追加.
// 画像入力フィルタ.
IBaseFilter capture = CreateVideoCapture(param);
if (capture == null)
throw new System.IO.IOException();
this.GraphBuilder.AddFilter(capture, "CaptureFilter");
IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT);
this.CaptureFilter = capture;
this.CaptureOutPin = capture_out;
// サンプルグラバー.
IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber();
if (grabber == null)
throw new System.IO.IOException();
this.GraphBuilder.AddFilter(grabber, "SampleGrabber");
this.SampleGrabber = (ISampleGrabber)grabber;
#endregion
#region キャプチャビルダー:
{
int hr = 0;
CaptureBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_CaptureGraphBuilder2)));
hr = CaptureBuilder.SetFiltergraph(GraphBuilder);
if (string.IsNullOrEmpty(output_file))
{
// レンダラー.
IBaseFilter renderer = null;
renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer)));
if (renderer == null)
throw new System.IO.IOException();
this.GraphBuilder.AddFilter(renderer, "Renderer");
this.Renderer = renderer;
#if true
// IGraphBuilder.Connect の代わりに ICaptureGraphBuilder2.RenderStream を使用する.
// fig) [capture]-out->-in-[sample grabber]-out->-in-[null render]
hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, renderer);
#else
// ピンの取得.
IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT);
IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT);
IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT);
// ピンの接続.
GraphBuilder.Connect(capture_out, grabber_in);
GraphBuilder.Connect(grabber_out, renderer_in);
// ピンの保管.
//SampleGrabberInPin = grabber_in;
//SampleGrabberOutPin = grabber_out;
//RendererInPin = renderer_in;
#endif
}
else
{
IBaseFilter mux = null;
IFileSinkFilter sync = null;
hr = CaptureBuilder.SetOutputFileName(new Guid(GUID.MEDIASUBTYPE_Avi), output_file, ref mux, ref sync);
hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, mux);
this.Mux = mux;
this.Sync = sync;
}
}
#endregion
#region 保管: フレームサイズ.
VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber);
this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader;
this.SampleGrabberCB.FrameSize = new Size(
System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth),
System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight)
);
#endregion
#region 保管: デバイス名称.
try
{
if (string.IsNullOrEmpty(param.FilterInfo.Name) == false)
{
this.DeviceName = param.FilterInfo.Name;
}
else
//.........这里部分代码省略.........
示例5: ConnectFilters
public static void ConnectFilters(IGraphBuilder graphBuilder, IPin sourcePin, IPin destinationPin,
bool useIntelligentConnect)
{
int hr = 0;
if (graphBuilder == null)
throw new ArgumentNullException("graphBuilder");
if (sourcePin == null)
throw new ArgumentNullException("sourcePin");
if (destinationPin == null)
throw new ArgumentNullException("destinationPin");
if (useIntelligentConnect)
{
hr = graphBuilder.Connect(sourcePin, destinationPin);
DsError.ThrowExceptionForHR(hr);
}
else
{
hr = graphBuilder.ConnectDirect(sourcePin, destinationPin, null);
DsError.ThrowExceptionForHR(hr);
}
}
示例6: Transcode
//.........这里部分代码省略.........
if (usingAAC == false)
{
Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec);
AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
if (AudioCodec == null)
{
Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec");
Cleanup();
return false;
}
}
else
{
Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec);
AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
if (AudioCodec == null)
{
Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec");
Cleanup();
return false;
}
}
Log.Info("TSReader2MP4: connect tsreader->audio/video decoders");
//connect output #0 (audio) of tsreader->audio decoder input pin 0
//connect output #1 (video) of tsreader->video decoder input pin 0
pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
if (pinIn0 == null || pinIn1 == null)
{
Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs");
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut0, pinIn0);
if (hr != 0)
{
Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr);
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut1, pinIn1);
if (hr != 0)
{
Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr);
Cleanup();
return false;
}
//add encoders, muxer & filewriter
if (!AddCodecs(graphBuilder, info)) return false;
//setup graph controls
mediaControl = graphBuilder as IMediaControl;
mediaSeeking = tsreaderSource as IMediaSeeking;
mediaEvt = graphBuilder as IMediaEventEx;
mediaPos = graphBuilder as IMediaPosition;
//get file duration
Log.Info("TSReader2MP4: Get duration of recording");
long lTime = 5 * 60 * 60;
lTime *= 10000000;
long pStop = 0;
hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
AMSeekingSeekingFlags.NoPositioning);
if (hr == 0)
{
long lStreamPos;
mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
m_dDuration = lStreamPos;
示例7: BuildGraph
//.........这里部分代码省略.........
}
mediaTypeEnum.Reset();
Marshal.ReleaseComObject(mediaTypeEnum);
i++;
}
/* This is the windowed renderer. This is *NEEDED* in order
* for interactive menus to work with the other VMR9 in renderless mode */
var dummyRenderer = (IBaseFilter)new VideoMixingRenderer9();
var dummyRendererConfig = (IVMRFilterConfig9)dummyRenderer;
/* In order for this interactive menu trick to work, the VMR9
* must be set to Windowed. We will make sure the window is hidden later on */
hr = dummyRendererConfig.SetRenderingMode(VMR9Mode.Windowed);
DsError.ThrowExceptionForHR(hr);
hr = dummyRendererConfig.SetNumberOfStreams(1);
DsError.ThrowExceptionForHR(hr);
hr = m_graph.AddFilter(dummyRenderer, "Dummy Windowed");
DsError.ThrowExceptionForHR(hr);
if (dvdAudioPin != null)
{
/* This should render out to the default audio device. We
* could modify this code here to go out any audio
* device, such as SPDIF or another sound card */
hr = m_graph.Render(dvdAudioPin);
DsError.ThrowExceptionForHR(hr);
}
/* Get the first input pin on our dummy renderer */
m_dummyRendererPin = DsFindPin.ByConnectionStatus(dummyRenderer, /* Filter to search */
PinConnectedStatus.Unconnected,
0);
/* Get an available pin on our real renderer */
IPin rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
PinConnectedStatus.Unconnected,
0); /* Pin index */
/* Connect the pin to the renderer */
hr = m_graph.Connect(dvdVideoPin, rendererPin);
DsError.ThrowExceptionForHR(hr);
/* Get the next available pin on our real renderer */
rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
PinConnectedStatus.Unconnected,
0); /* Pin index */
/* Render the sub picture, which will connect
* the DVD navigator to the codec, not the renderer */
hr = m_graph.Render(dvdSubPicturePin);
DsError.ThrowExceptionForHR(hr);
/* These are the subtypes most likely to be our dvd subpicture */
var preferedSubpictureTypes = new[]{MediaSubType.ARGB4444,
MediaSubType.AI44,
MediaSubType.AYUV,
MediaSubType.ARGB32};
IPin dvdSubPicturePinOut = null;
/* Find what should be the subpicture pin out */
foreach (var guidType in preferedSubpictureTypes)
{
示例8: Connect
/// <summary>Connects together to graph filters.</summary>
/// <param name="graph">The graph on which the filters exist.</param>
/// <param name="source">The source filter.</param>
/// <param name="outPinName">The name of the output pin on the source filter.</param>
/// <param name="destination">The destination filter.</param>
/// <param name="inPinName">The name of the input pin on the destination filter.</param>
protected void Connect(IGraphBuilder graph, IBaseFilter source, string outPinName,
IBaseFilter destination, string inPinName)
{
IPin outPin = source.FindPin(outPinName);
DisposalCleanup.Add(outPin);
IPin inPin = destination.FindPin(inPinName);
DisposalCleanup.Add(inPin);
graph.Connect(outPin, inPin);
}
示例9: RemoveFromGraph
public static void RemoveFromGraph(IGraphBuilder graphBuilder)
{
IBaseFilter vob = null;
using (Settings xmlreader = new MPSettings())
{
string engineType = xmlreader.GetValueAsString("subtitles", "engine", "DirectVobSub");
XySubFilter = engineType.Equals("XySubFilter");
}
if (!XySubFilter)
{
DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubAutoload, out vob);
if (vob == null)
{
//Try the "normal" filter then.
DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubNormal, out vob);
}
}
if (vob == null)
{
DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.XySubFilterAutoload, out vob);
if (vob != null)
{
//remove the XySubFilter filter from the graph
graphBuilder.RemoveFilter(vob);
DirectShowUtil.ReleaseComObject(vob);
vob = null;
return;
}
//Try the XySubFilter "normal" filter then.
DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.XySubFilterNormal, out vob);
if (vob != null)
{
//remove the XySubFilter filter from the graph
graphBuilder.RemoveFilter(vob);
DirectShowUtil.ReleaseComObject(vob);
vob = null;
}
return;
}
Log.Info("VideoPlayerVMR9: DirectVobSub in graph, removing...");
// Check where video inputs are connected
IPin pinVideoIn = DsFindPin.ByDirection(vob, PinDirection.Input, 0);
IPin pinVideoOut = DsFindPin.ByDirection(vob, PinDirection.Output, 0);
//find directvobsub's video output pin source input pin
IPin pinVideoTo = null;
if (pinVideoOut != null)
{
pinVideoOut.ConnectedTo(out pinVideoTo);
}
//find directvobsub's video input pin source output pin
IPin pinVideoFrom = null;
if (pinVideoIn != null)
{
pinVideoIn.ConnectedTo(out pinVideoFrom);
}
int hr = 0;
if (pinVideoFrom != null)
{
hr = pinVideoFrom.Disconnect();
if (hr != 0)
{
Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting source pin");
}
}
if (pinVideoTo != null)
{
hr = pinVideoTo.Disconnect();
if (hr != 0)
{
Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting destination pin");
}
}
//remove the DirectVobSub filter from the graph
graphBuilder.RemoveFilter(vob);
DirectShowUtil.ReleaseComObject(vob);
vob = null;
//reconnect the source output pin to the vmr9/evr filter
if (pinVideoFrom != null)
{
if (pinVideoTo != null)
{
hr = graphBuilder.Connect(pinVideoFrom, pinVideoTo);
}
//hr = graphBuilder.Render(pinVideoFrom);
DirectShowUtil.ReleaseComObject(pinVideoFrom);
pinVideoFrom = null;
}
if (pinVideoTo != null)
//.........这里部分代码省略.........
示例10: ConnectFilter
/// <summary>
/// Connects two filters to one another
/// </summary>
/// <param name="graphBuilder">current graph reference</param>
/// <param name="sourceFilter">source filter</param>
/// <param name="destinationFilter">destination filetr</param>
/// <param name="deviceName">filter name</param>
/// <returns></returns>
public static bool ConnectFilter(IGraphBuilder graphBuilder, IBaseFilter sourceFilter, IBaseFilter destinationFilter,
string deviceName)
{
//Log.Log.WriteFile("analog: ConnectFilter()");
IPin pinIn = DsFindPin.ByDirection(destinationFilter, PinDirection.Input, 0);
Log.Log.WriteFile("analog: PinDest:{0}", LogPinInfo(pinIn));
for (int i = 0; i <= 10; ++i)
{
IPin pinOut = DsFindPin.ByDirection(sourceFilter, PinDirection.Output, i);
if (pinOut == null)
return false;
Log.Log.WriteFile("analog: pinSource {0}:{1}", i, LogPinInfo(pinOut));
//Hauppauge hack - sorry, i attempted to do this right, but hauppauge drivers report incorrect values
//and it takes a very long time to reject the audio to video connection - diehard2
int hr = -1;
IPin testPin = null;
try
{
pinOut.ConnectedTo(out testPin);
}
catch (Exception ex)
{
Log.Log.WriteFile("Error while connecting a filter: ", ex);
}
if (testPin != null)
{
Release.ComObject("outPin", pinOut);
Release.ComObject("testPin", testPin);
Log.Log.WriteFile("analog: skipping pin");
continue;
}
if (deviceName.Contains("Hauppauge") &&
(LogPinInfo(pinOut).Contains("Audio") || LogPinInfo(pinIn).Contains("Audio")))
{
if (LogPinInfo(pinOut).Contains("Audio") && LogPinInfo(pinIn).Contains("Audio"))
hr = graphBuilder.Connect(pinOut, pinIn);
}
else
hr = graphBuilder.Connect(pinOut, pinIn);
if (hr == 0)
{
Log.Log.WriteFile("analog: pins connected");
Release.ComObject("pinIn", pinIn);
Release.ComObject("pinOut", pinOut);
return true;
}
Release.ComObject("pinOut", pinOut);
Release.ComObject("testPin", testPin);
}
Release.ComObject("pinIn", pinIn);
return false;
}
示例11: Init
/// <summary>
/// Worker thread that captures the images
/// </summary>
private void Init()
{
try
{
log.Trace("Start worker thread");
// Create the main graph
_graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;
// Create the webcam source
_sourceObject = FilterInfo.CreateFilter(_monikerString);
// Create the grabber
_grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
_grabberObject = _grabber as IBaseFilter;
// Add the source and grabber to the main graph
_graph.AddFilter(_sourceObject, "source");
_graph.AddFilter(_grabberObject, "grabber");
using (AMMediaType mediaType = new AMMediaType())
{
mediaType.MajorType = MediaTypes.Video;
mediaType.SubType = MediaSubTypes.RGB32;
_grabber.SetMediaType(mediaType);
if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
{
if (_grabber.GetConnectedMediaType(mediaType) == 0)
{
// During startup, this code can be too fast, so try at least 3 times
int retryCount = 0;
bool succeeded = false;
while ((retryCount < 3) && !succeeded)
{
// Tried again
retryCount++;
try
{
// Retrieve the grabber information
VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
_capGrabber.Width = header.BmiHeader.Width;
_capGrabber.Height = header.BmiHeader.Height;
// Succeeded
succeeded = true;
}
catch
{
// Trace
log.InfoFormat("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);
// Sleep
Thread.Sleep(50);
}
}
}
}
_graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
_grabber.SetBufferSamples(false);
_grabber.SetOneShot(false);
_grabber.SetCallback(_capGrabber, 1);
log.Trace("_grabber set up");
// Get the video window
IVideoWindow wnd = (IVideoWindow)_graph;
wnd.put_AutoShow(false);
wnd = null;
// Create the control and run
_control = (IMediaControl)_graph;
_control.Run();
log.Trace("control runs");
// Wait for the stop signal
//while (!_stopSignal.WaitOne(0, true))
//{
// Thread.Sleep(10);
//}
}
}catch (Exception ex)
{
// Trace
log.Debug(ex);
Release();
}
}
示例12: Transcode
//.........这里部分代码省略.........
if (usingAAC == false)
{
Log.Info("TSReader2WMV: add mpeg2 audio codec:{0}", strAudioCodec);
AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
if (AudioCodec == null)
{
Log.Error("TSReader2WMV: FAILED:unable to add mpeg2 audio codec");
Cleanup();
return false;
}
}
else
{
Log.Info("TSReader2WMV: add aac audio codec:{0}", strAACAudioCodec);
AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
if (AudioCodec == null)
{
Log.Error("TSReader2WMV: FAILED:unable to add aac audio codec");
Cleanup();
return false;
}
}
Log.Info("TSReader2WMV: connect tsreader->audio/video decoders");
//connect output #0 (audio) of tsreader->audio decoder input pin 0
//connect output #1 (video) of tsreader->video decoder input pin 0
pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
if (pinIn0 == null || pinIn1 == null)
{
Log.Error("TSReader2WMV: FAILED: unable to get pins of video/audio codecs");
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut0, pinIn0);
if (hr != 0)
{
Log.Error("TSReader2WMV: FAILED: unable to connect audio pins :0x{0:X}", hr);
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut1, pinIn1);
if (hr != 0)
{
Log.Error("TSReader2WMV: FAILED: unable to connect video pins :0x{0:X}", hr);
Cleanup();
return false;
}
string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv");
if (!AddWmAsfWriter(outputFilename, quality, standard)) return false;
Log.Info("TSReader2WMV: start pre-run");
mediaControl = graphBuilder as IMediaControl;
mediaSeeking = tsreaderSource as IMediaSeeking;
mediaEvt = graphBuilder as IMediaEventEx;
mediaPos = graphBuilder as IMediaPosition;
//get file duration
long lTime = 5 * 60 * 60;
lTime *= 10000000;
long pStop = 0;
hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
AMSeekingSeekingFlags.NoPositioning);
if (hr == 0)
{
long lStreamPos;
mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
m_dDuration = lStreamPos;
lTime = 0;
示例13: AddCodecs
//.........这里部分代码省略.........
Log.Error("DVRMS2DIVX:FAILED:Unable to create AviMux");
Cleanup();
return false;
}
hr = graphBuilder.AddFilter(aviMuxer, "AviMux");
if (hr != 0)
{
Log.Error("DVRMS2DIVX:FAILED:Add AviMux to filtergraph :0x{0:X}", hr);
Cleanup();
return false;
}
//connect output of mpeg2 codec to xvid codec
Log.Info("DVRMS2DIVX: connect mpeg2 video codec->divx codec");
IPin pinOut, pinIn;
pinIn = DsFindPin.ByDirection(divxCodec, PinDirection.Input, 0);
if (pinIn == null)
{
Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of divx codec:0x{0:X}", hr);
Cleanup();
return false;
}
pinOut = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Output, 0);
if (pinOut == null)
{
Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of mpeg2 video codec :0x{0:X}", hr);
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut, pinIn);
if (hr != 0)
{
Log.Error("DVRMS2DIVX:FAILED:unable to connect mpeg2 video codec->divx:0x{0:X}", hr);
Cleanup();
return false;
}
//connect output of mpeg2 audio codec to mpeg3 codec
Log.Info("DVRMS2DIVX: connect mpeg2 audio codec->mp3 codec");
pinIn = DsFindPin.ByDirection(mp3Codec, PinDirection.Input, 0);
if (pinIn == null)
{
Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of mp3 codec:0x{0:X}", hr);
Cleanup();
return false;
}
pinOut = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Output, 0);
if (pinOut == null)
{
Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of mpeg2 audio codec :0x{0:X}", hr);
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut, pinIn);
if (hr != 0)
{
Log.Error("DVRMS2DIVX:FAILED:unable to connect mpeg2 audio codec->mpeg3:0x{0:X}", hr);
Cleanup();
return false;
}
示例14: AddCodecs
//.........这里部分代码省略.........
string monikermp4Muxer =
@"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{990D1978-E48D-43AF-B12D-24A7456EC89F}";
mp4Muxer = Marshal.BindToMoniker(monikermp4Muxer) as IBaseFilter;
if (mp4Muxer == null)
{
Log.Error("TSReader2MP4: FAILED: Unable to create MP4Mux");
Cleanup();
return false;
}
hr = graphBuilder.AddFilter(mp4Muxer, "MP4Mux");
if (hr != 0)
{
Log.Error("TSReader2MP4: FAILED: Add MP4Mux to filtergraph :0x{0:X}", hr);
Cleanup();
return false;
}
//connect output of audio codec to aac encoder
IPin pinOut, pinIn;
Log.Info("TSReader2MP4: connect audio codec->aac encoder");
pinIn = DsFindPin.ByDirection(aacEncoder, PinDirection.Input, 0);
if (pinIn == null)
{
Log.Error("TSReader2MP4: FAILED: cannot get input pin of aac encoder:0x{0:X}", hr);
Cleanup();
return false;
}
pinOut = DsFindPin.ByDirection(AudioCodec, PinDirection.Output, 0);
if (pinOut == null)
{
Log.Error("TSReader2MP4: FAILED: cannot get output pin of audio codec :0x{0:X}", hr);
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut, pinIn);
if (hr != 0)
{
Log.Error("TSReader2MP4: FAILED: unable to connect audio codec->aac encoder: 0x{0:X}", hr);
Cleanup();
return false;
}
//connect output of video codec to h264 encoder
Log.Info("TSReader2MP4: connect video codec->h264 encoder");
pinIn = DsFindPin.ByDirection(h264Encoder, PinDirection.Input, 0);
if (pinIn == null)
{
Log.Error("TSReader2MP4: FAILED: cannot get input pin of h264 encoder:0x{0:X}", hr);
Cleanup();
return false;
}
pinOut = DsFindPin.ByDirection(VideoCodec, PinDirection.Output, 0);
if (pinOut == null)
{
Log.Error("TSReader2MP4: FAILED: cannot get output pin of video codec :0x{0:X}", hr);
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut, pinIn);
if (hr != 0)
{
Log.Error("TSReader2MP4: FAILED: unable to connect video codec->h264 encoder :0x{0:X}", hr);
Cleanup();
return false;
}
//connect output of aac encoder to pin#0 of mp4mux
Log.Info("TSReader2MP4: connect aac encoder->mp4mux");
pinOut = DsFindPin.ByDirection(aacEncoder, PinDirection.Output, 0);
示例15: Transcode
//.........这里部分代码省略.........
Log.Info("DVRMS2DIVX: add MPA mpeg2 audio codec:{0}", strAudioCodec);
Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
if (Mpeg2AudioCodec == null)
{
Log.Error("DVRMS2DIVX:FAILED:unable to add MPV mpeg2 audio codec");
Cleanup();
return false;
}
//connect output #0 of streambuffer source->mpeg2 audio codec pin 1
//connect output #1 of streambuffer source->mpeg2 video codec pin 1
Log.Info("DVRMS2DIVX: connect streambufer source->mpeg audio/video decoders");
IPin pinOut0, pinOut1;
IPin pinIn0, pinIn1;
pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio
pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video
if (pinOut0 == null || pinOut1 == null)
{
Log.Error("DVRMS2DIVX:FAILED:unable to get pins of source");
Cleanup();
return false;
}
pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video
pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio
if (pinIn0 == null || pinIn1 == null)
{
Log.Error("DVRMS2DIVX:FAILED:unable to get pins of mpeg2 video/audio codec");
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut0, pinIn1);
if (hr != 0)
{
Log.Error("DVRMS2DIVX:FAILED:unable to connect audio pins :0x{0:X}", hr);
Cleanup();
return false;
}
hr = graphBuilder.Connect(pinOut1, pinIn0);
if (hr != 0)
{
Log.Error("DVRMS2DIVX:FAILED:unable to connect video pins :0x{0:X}", hr);
Cleanup();
return false;
}
if (!AddCodecs(graphBuilder, info)) return false;
// hr=(graphBuilder as IMediaFilter).SetSyncSource(null);
// if (hr!=0)
// Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr);
mediaControl = graphBuilder as IMediaControl;
mediaSeeking = bufferSource as IStreamBufferMediaSeeking;
mediaEvt = graphBuilder as IMediaEventEx;
mediaPos = graphBuilder as IMediaPosition;
//get file duration
Log.Info("DVRMS2DIVX: Get duration of movie");
long lTime = 5 * 60 * 60;
lTime *= 10000000;
long pStop = 0;
hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
AMSeekingSeekingFlags.NoPositioning);