本文整理汇总了C#中IVideoWindow.put_Owner方法的典型用法代码示例。如果您正苦于以下问题:C# IVideoWindow.put_Owner方法的具体用法?C# IVideoWindow.put_Owner怎么用?C# IVideoWindow.put_Owner使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IVideoWindow
的用法示例。
在下文中一共展示了IVideoWindow.put_Owner方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CloseInterfaces
/// <summary> do cleanup and release DirectShow. </summary>
protected void CloseInterfaces()
{
if (_graphBuilder == null)
{
return;
}
Log.Debug("BDPlayer: Cleanup DShow graph {0}", GUIGraphicsContext.InVmr9Render);
try
{
BDOSDRenderer.StopRendering();
if (VMR9Util.g_vmr9 != null)
{
VMR9Util.g_vmr9.Vmr9MediaCtrl(_mediaCtrl);
VMR9Util.g_vmr9.Enable(false);
}
#region Cleanup
if (VideoCodec != null)
{
DirectShowUtil.FinalReleaseComObject(VideoCodec);
VideoCodec = null;
Log.Info("BDPlayer: Cleanup VideoCodec");
}
if (AudioCodec != null)
{
DirectShowUtil.FinalReleaseComObject(AudioCodec);
AudioCodec = null;
Log.Info("BDPlayer: Cleanup AudioCodec");
}
if (_audioRendererFilter != null)
{
DirectShowUtil.FinalReleaseComObject(_audioRendererFilter);
_audioRendererFilter = null;
Log.Info("BDPlayer: Cleanup AudioRenderer");
}
//Test to ReleaseComObject from PostProcessFilter list objects.
if (PostProcessFilterVideo.Count > 0)
{
foreach (var ppFilter in PostProcessFilterVideo)
{
if (ppFilter.Value != null)
{
DirectShowUtil.RemoveFilter(_graphBuilder, ppFilter.Value as IBaseFilter);
DirectShowUtil.FinalReleaseComObject(ppFilter.Value);
}
}
PostProcessFilterVideo.Clear();
Log.Info("BDPlayer: Cleanup PostProcessVideo");
}
//Test to ReleaseComObject from PostProcessFilter list objects.
if (PostProcessFilterAudio.Count > 0)
{
foreach (var ppFilter in PostProcessFilterAudio)
{
if (ppFilter.Value != null)
{
DirectShowUtil.RemoveFilter(_graphBuilder, ppFilter.Value as IBaseFilter);
DirectShowUtil.FinalReleaseComObject(ppFilter.Value);
}
}
PostProcessFilterAudio.Clear();
Log.Info("BDPlayer: Cleanup PostProcessAudio");
}
if (_interfaceBDReader != null)
{
DirectShowUtil.FinalReleaseComObject(_interfaceBDReader);
_interfaceBDReader = null;
}
if (VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9._vmr9Filter != null)
{
//MadvrInterface.EnableExclusiveMode(false, VMR9Util.g_vmr9._vmr9Filter);
//DirectShowUtil.DisconnectAllPins(_graphBuilder, VMR9Util.g_vmr9._vmr9Filter);
Log.Info("BDPlayer: Cleanup VMR9");
}
#endregion
_videoWin = _graphBuilder as IVideoWindow;
if (_videoWin != null && GUIGraphicsContext.VideoRenderer != GUIGraphicsContext.VideoRendererType.madVR)
{
_videoWin.put_Owner(IntPtr.Zero);
_videoWin.put_Visible(OABool.False);
}
if (_mediaEvt != null)
{
_mediaEvt.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero);
_mediaEvt = null;
}
if (_graphBuilder != null)
//.........这里部分代码省略.........
示例2: renderGraph
//.........这里部分代码省略.........
// Render the file writer portion of graph (mux -> file)
Guid mediaSubType = MediaSubType.Avi;
hr = captureGraphBuilder.SetOutputFileName( ref mediaSubType, Filename, out muxFilter, out fileWriterFilter );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
if ( VideoDevice != null )
{
// Try interleaved first, because if the device supports it,
// it's the only way to get audio as well as video
cat = PinCategory.Capture;
med = MediaType.Interleaved;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter);
if( hr < 0 )
{
med = MediaType.Video;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter);
//if ( hr == -2147220969 ) throw new DeviceInUseException( "Video device", hr );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
}
// Render audio (audio -> mux)
if ( AudioDevice != null )
{
cat = PinCategory.Capture;
med = MediaType.Audio;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, audioDeviceFilter, audioCompressorFilter, muxFilter );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
isCaptureRendered = true;
didSomething = true;
}
// Render preview stream and launch the baseGrabFlt to capture frames
// ===================================================================================
if ( wantPreviewRendered && renderStream && !isPreviewRendered )
{
// Render preview (video.PinPreview -> baseGrabFlt -> renderer)
// At this point intelligent connect is used, because my webcams don't have a preview pin and
// a capture pin, so Smart Tee filter will be used. I have tested it using GraphEdit.
// I can type hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, baseGrabFlt);
// because baseGrabFlt is a transform filter, like videoCompressorFilter.
cat = PinCategory.Preview;
med = MediaType.Video;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, baseGrabFlt, null );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Get the IVideoWindow interface
videoWindow = (IVideoWindow) graphBuilder;
// Set the video window to be a child of the main window
hr = videoWindow.put_Owner( previewWindow.Handle );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Set video window style
hr = videoWindow.put_WindowStyle( WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS);
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Position video window in client rect of owner window
previewWindow.Resize += new EventHandler( onPreviewWindowResize );
onPreviewWindowResize( this, null );
// Make the video window visible, now that it is properly positioned
hr = videoWindow.put_Visible( DsHlp.OATRUE );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
hr = mediaEvt.SetNotifyWindow( this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero );
//if( hr < 0 )
// Marshal.ThrowExceptionForHR( hr );
isPreviewRendered = true;
didSomething = true;
// Begin Configuration of SampGrabber <<<<<<----------------------------------------------------
AMMediaType media = new AMMediaType();
hr = sampGrabber.GetConnectedMediaType( media );
//if( hr < 0 )
// Marshal.ThrowExceptionForHR( hr );
if( (media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero) )
throw new NotSupportedException( "Unknown Grabber Media Format" );
videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure( media.formatPtr, typeof(VideoInfoHeader) );
Marshal.FreeCoTaskMem( media.formatPtr ); media.formatPtr = IntPtr.Zero;
hr = sampGrabber.SetBufferSamples( false );
if( hr == 0 )
hr = sampGrabber.SetOneShot( false );
if( hr == 0 )
hr = sampGrabber.SetCallback( null, 0 );
//if( hr < 0 )
// Marshal.ThrowExceptionForHR( hr );
// Finish Configuration of SampGrabber <<<<<<----------------------------------------------------
}
if ( didSomething )
graphState = GraphState.Rendered;
}
示例3: loadVideo
private void loadVideo(String videoPath)
{
videoFilepath = videoPath;
videoFileName.Text = getDisplayVideoName();
if (graph != null)
{
graph = null;
}
if (mediaControl != null)
{
// Stop media playback
this.mediaControl.Stop();
mediaControl = null;
}
if (videoWindow != null)
{
videoWindow.put_Owner(IntPtr.Zero);
videoWindow = null;
}
if (mediaSeeking != null)
{
mediaSeeking = null;
}
if (basicAudio != null)
{
basicAudio = null;
}
GC.Collect();
/* if (mediaPosition != null)
{
mediaPosition = null;
}*/
graph = (IGraphBuilder)new FilterGraph();
mediaControl = (IMediaControl)graph;
//mediaPosition = (IMediaPosition)graph;
videoWindow = (IVideoWindow)graph;
mediaSeeking = (IMediaSeeking)graph;
basicAudio = (IBasicAudio)graph;
AviSplitter spliter = new AviSplitter();
graph.AddFilter((IBaseFilter)spliter, null);
graph.RenderFile(videoPath, null);
graph.SetDefaultSyncSource();
/*
* AMSeekingSeekingCapabilities cap = AMSeekingSeekingCapabilities.CanGetCurrentPos;
if (mediaSeeking.CheckCapabilities(ref cap) > 0)
{
this.consoleErreur.AppendText("Impossible de recuperer la position de la frame");
}
* */
videoWindow.put_Owner(videoPanel.Handle);
videoWindow.put_MessageDrain(videoPanel.Handle);
videoWindow.put_WindowStyle(WindowStyle.Child);
videoWindow.put_WindowStyleEx(WindowStyleEx.ControlParent);
videoWindow.put_Left(0);
videoWindow.put_Top(0);
videoWindow.put_Width(videoPanel.Width);
videoWindow.put_Height(videoPanel.Height);
//positionTrackbar.Enabled = true;
speedTrackBar.Enabled = true;
mediaSeeking.SetTimeFormat(TimeFormat.Frame);
double rate;
mediaSeeking.GetRate(out rate);
rateText.Text = rate.ToString();
speedTrackBar.Value = (int)(speedTrackBar.Maximum * rate / 2);
trackBar1.Value = trackBar1.Maximum / 2;
this.basicAudio.put_Volume(-5000 + 5000 * trackBar1.Value / trackBar1.Maximum);
//mediaPosition.put_Rate(0.5);
running = false;
frameChanged = false;
}
示例4: ConfigureVideoWindow
/// <summary>
/// Configure the video window
/// </summary>
/// <param name="videoWindow">Interface of the video renderer</param>
/// <param name="previewControl">Preview Control to draw into</param>
private void ConfigureVideoWindow(IVideoWindow videoWindow, Control previewControl)
{
int hr;
if (previewControl == null)
{
return;
}
// Set the output window
hr = videoWindow.put_Owner(ThreadSafe.GetHandle(previewControl));
if (hr >= 0) // If there is video
{
// Set the window style
hr = videoWindow.put_WindowStyle((WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings));
DsError.ThrowExceptionForHR(hr);
// Make the window visible
hr = videoWindow.put_Visible(OABool.True);
DsError.ThrowExceptionForHR(hr);
// Position the playing location
Rectangle rc = ThreadSafe.GetClientRectangle(previewControl);
hr = videoWindow.SetWindowPosition(0, 0, rc.Right, rc.Bottom);
DsError.ThrowExceptionForHR(hr);
}
}
示例5: ConfigVideo
private void ConfigVideo(IVideoWindow ivw, Control hControl)
{
int hr;
hr = ivw.put_Owner(hControl.Handle);
DsError.ThrowExceptionForHR(hr);
hr = ivw.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings);
DsError.ThrowExceptionForHR( hr );
// Yes, make it visible
hr = ivw.put_Visible( OABool.True );
DsError.ThrowExceptionForHR( hr );
// Move to upper left corner
Rectangle rc = hControl.ClientRectangle;
hr = ivw.SetWindowPosition( 0, 0, rc.Right, rc.Bottom );
DsError.ThrowExceptionForHR( hr );
}
示例6: InitVideoWindow
private void InitVideoWindow()
{
if (Render == null)
return;
videoWindow = (IVideoWindow)graphBuilder;
//Set the owener of the videoWindow to an IntPtr of some sort (the Handle of any control - could be a form / button etc.)
var hr = videoWindow.put_Owner(Render.Handle);
DsError.ThrowExceptionForHR(hr);
//Set the style of the video window
hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings);
DsError.ThrowExceptionForHR(hr);
// Position video window in client rect of main application window
//hr = videoWindow.SetWindowPosition(0, 0, Render.Width, Render.Height);
Resize();
DsError.ThrowExceptionForHR(hr);
videoWindow.put_Visible(OABool.True);
Render.SizeChanged -= new EventHandler(onResize);
Render.SizeChanged += new EventHandler(onResize);
}
示例7: Cleanup
private void Cleanup()
{
if (graphBuilder == null)
{
return;
}
int hr;
Log.Info("RTSPPlayer:cleanup DShow graph");
try
{
if (VMR9Util.g_vmr9 != null)
{
VMR9Util.g_vmr9.Vmr9MediaCtrl(_mediaCtrl);
VMR9Util.g_vmr9.Enable(false);
}
if (mediaEvt != null)
{
hr = mediaEvt.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero);
}
videoWin = graphBuilder as IVideoWindow;
if (videoWin != null && GUIGraphicsContext.VideoRenderer != GUIGraphicsContext.VideoRendererType.madVR)
{
videoWin.put_Owner(IntPtr.Zero);
videoWin.put_Visible(OABool.False);
}
_mediaCtrl = null;
mediaEvt = null;
_mediaSeeking = null;
mediaPos = null;
basicAudio = null;
basicVideo = null;
videoWin = null;
SubEngine.GetInstance().FreeSubtitles();
if (graphBuilder != null)
{
DirectShowUtil.RemoveFilters(graphBuilder);
if (_rotEntry != null)
{
_rotEntry.SafeDispose();
_rotEntry = null;
}
DirectShowUtil.FinalReleaseComObject(graphBuilder);
graphBuilder = null;
}
if (VMR9Util.g_vmr9 != null)
{
VMR9Util.g_vmr9.SafeDispose();
VMR9Util.g_vmr9 = null;
}
GUIGraphicsContext.form.Invalidate(true);
_state = PlayState.Init;
if (_mpegDemux != null)
{
Log.Info("cleanup mpegdemux");
DirectShowUtil.FinalReleaseComObject(_mpegDemux);
_mpegDemux = null;
}
if (_rtspSource != null)
{
Log.Info("cleanup _rtspSource");
DirectShowUtil.FinalReleaseComObject(_rtspSource);
_rtspSource = null;
}
if (_subtitleFilter != null)
{
DirectShowUtil.FinalReleaseComObject(_subtitleFilter);
_subtitleFilter = null;
if (this.dvbSubRenderer != null)
{
this.dvbSubRenderer.SetPlayer(null);
}
this.dvbSubRenderer = null;
}
if (vobSub != null)
{
Log.Info("cleanup vobSub");
DirectShowUtil.FinalReleaseComObject(vobSub);
vobSub = null;
}
}
catch (Exception ex)
{
Log.Error("RTSPPlayer: Exception while cleanuping DShow graph - {0} {1}", ex.Message, ex.StackTrace);
}
//switch back to directx windowed mode
Log.Info("RTSPPlayer: Disabling DX9 exclusive mode");
GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 0, 0, null);
GUIWindowManager.SendMessage(msg);
}
示例8: Play
//.........这里部分代码省略.........
mpeg2ProgramStream.unkPtr = IntPtr.Zero;
mpeg2ProgramStream.sampleSize = 0;
mpeg2ProgramStream.temporalCompression = false;
mpeg2ProgramStream.fixedSizeSamples = true;
mpeg2ProgramStream.formatType = FormatType.None;
mpeg2ProgramStream.formatSize = 0;
mpeg2ProgramStream.formatPtr = IntPtr.Zero;
hr = interfaceFile.Load(fileName, mpeg2ProgramStream);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9:Failed to load file");
return false;
}
#region connect tsfilesource->demux
Log.WriteFile("connect tsfilesource->demux");
Log.WriteFile("TSStreamBufferPlayer9:connect tsfilesource->mpeg2 demux");
IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_tsFileSource, PinDirection.Output, 0);
if (pinTsOut == null)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to find output pin of tsfilesource");
return false;
}
IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0);
if (pinDemuxIn == null)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to find output pin of tsfilesource");
return false;
}
hr = _graphBuilder.Connect(pinTsOut, pinDemuxIn);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to connect tsfilesource->mpeg2 demux:{0:X}", hr);
return false;
}
Release.ComObject(pinTsOut);
Release.ComObject(pinDemuxIn);
#endregion
#region map demux pids
Log.WriteFile("map mpeg2 pids");
IMPEG2StreamIdMap pStreamId = (IMPEG2StreamIdMap)_pinVideo;
hr = pStreamId.MapStreamId(0xe0, MPEG2Program.ElementaryStream, 0, 0);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9: failed to map pid 0xe0->video pin");
return false;
}
pStreamId = (IMPEG2StreamIdMap)_pinAudio;
hr = pStreamId.MapStreamId(0xc0, MPEG2Program.ElementaryStream, 0, 0);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9: failed to map pid 0xc0->audio pin");
return false;
}
#endregion
#region render demux audio/video pins
Log.WriteFile("render pins");
hr = _graphBuilder.Render(_pinAudio);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to render video output pin:{0:X}", hr);
}
hr = _graphBuilder.Render(_pinVideo);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to render audio output pin:{0:X}", hr);
}
#endregion
#endregion
_videoWin = _graphBuilder as IVideoWindow;
if (_videoWin != null)
{
_videoWin.put_Visible(OABool.True);
_videoWin.put_Owner(form.Handle);
_videoWin.put_WindowStyle(
(WindowStyle)((int)WindowStyle.Child + (int)WindowStyle.ClipSiblings + (int)WindowStyle.ClipChildren));
_videoWin.put_MessageDrain(form.Handle);
_videoWin.SetWindowPosition(190, 250, 150, 150);
}
Log.WriteFile("run graph");
_mediaCtrl = (IMediaControl)_graphBuilder;
hr = _mediaCtrl.Run();
Log.WriteFile("TSStreamBufferPlayer9:running:{0:X}", hr);
return true;
}
示例9: ConfigureVideoWindow
// Configure the video window
private void ConfigureVideoWindow(IVideoWindow videoWindow, GraphicsDevice hWin)
{
int hr;
// Set the output window
hr = videoWindow.put_Owner( hWin.Adapter.MonitorHandle ); //CHANGE
DsError.ThrowExceptionForHR( hr );
// Set the window style
hr = videoWindow.put_WindowStyle( (WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings) );
DsError.ThrowExceptionForHR( hr );
// Make the window visible
hr = videoWindow.put_Visible( OABool.True );
DsError.ThrowExceptionForHR( hr );
// Position the playing location
hr = videoWindow.SetWindowPosition( 0, 0, 1920, 600 );
DsError.ThrowExceptionForHR( hr );
}
示例10: Play
//.........这里部分代码省略.........
{
hr = filterState.IsFilterReadyToConnectPins(out ready);
System.Threading.Thread.Sleep(25);
}
}
if (hr != 0)
{
ErrorOrSplitter = DirectShowLib.DsError.GetErrorText(hr);
DirectShowUtil.ReleaseComObject(sourceFilter, 2000);
return false;
}
OnlineVideos.MediaPortal1.Player.OnlineVideosPlayer.AddPreferredFilters(_graphBuilder, sourceFilter);
// try to connect the filters
int numConnected = 0;
IEnumPins pinEnum;
hr = sourceFilter.EnumPins(out pinEnum);
if ((hr == 0) && (pinEnum != null))
{
pinEnum.Reset();
IPin[] pins = new IPin[1];
int iFetched;
int iPinNo = 0;
do
{
iPinNo++;
hr = pinEnum.Next(1, pins, out iFetched);
if (hr == 0)
{
if (iFetched == 1 && pins[0] != null)
{
PinDirection pinDir;
pins[0].QueryDirection(out pinDir);
if (pinDir == PinDirection.Output)
{
hr = _graphBuilder.Render(pins[0]);
if (hr == 0)
{
numConnected++;
IPin connectedPin;
if (pins[0].ConnectedTo(out connectedPin) == 0 && connectedPin != null)
{
PinInfo connectedPinInfo;
connectedPin.QueryPinInfo(out connectedPinInfo);
FilterInfo connectedFilterInfo;
connectedPinInfo.filter.QueryFilterInfo(out connectedFilterInfo);
DirectShowUtil.ReleaseComObject(connectedPin, 2000);
IBaseFilter connectedFilter;
if (connectedFilterInfo.pGraph.FindFilterByName(connectedFilterInfo.achName, out connectedFilter) == 0 && connectedFilter != null)
{
var codecInfo = GetCodecInfo(connectedFilter, connectedFilterInfo.achName);
if (codecInfo != null)
{
if (string.IsNullOrEmpty(ErrorOrSplitter)) ErrorOrSplitter = codecInfo.ToString();
else ErrorOrSplitter += ", " + codecInfo.ToString();
}
DirectShowUtil.ReleaseComObject(connectedFilter);
}
}
}
}
DirectShowUtil.ReleaseComObject(pins[0], 2000);
}
}
} while (iFetched == 1);
}
DirectShowUtil.ReleaseComObject(pinEnum, 2000);
if (numConnected > 0)
{
_videoWin = _graphBuilder as IVideoWindow;
if (_videoWin != null)
{
_videoWin.put_Owner(_parentControl.Handle);
_videoWin.put_WindowStyle((WindowStyle)((int)WindowStyle.Child + (int)WindowStyle.ClipSiblings + (int)WindowStyle.ClipChildren));
_videoWin.SetWindowPosition(_parentControl.ClientRectangle.X, _parentControl.ClientRectangle.Y, _parentControl.ClientRectangle.Width, _parentControl.ClientRectangle.Height);
_videoWin.put_Visible(OABool.True);
}
_mediaCtrl = (IMediaControl)_graphBuilder;
hr = _mediaCtrl.Run();
mediaEvents = (IMediaEventEx)_graphBuilder;
// Have the graph signal event via window callbacks for performance
mediaEvents.SetNotifyWindow(_parentControl.FindForm().Handle, WMGraphNotify, IntPtr.Zero);
_parentControl.SizeChanged += _parentControl_SizeChanged;
return true;
}
else
{
ErrorOrSplitter = string.Format("Could not render output pins of {0}", sourceFilterName);
DirectShowUtil.ReleaseComObject(sourceFilter, 2000);
Stop();
return false;
}
}
示例11: openVid
public void openVid(string fileName)
{
if (!File.Exists(fileName))
{
errorMsg("El archivo '" + fileName + "' no existe.");
videoPanel.Visible = false;
isVideoLoaded = false;
drawPositions();
return;
}
if (VideoBoxType == PreviewType.AviSynth)
{
avsClip = null;
//butPause.Enabled = true;
//butPlayR.Enabled = true;
//butPlay.Enabled = true;
//butStop.Enabled = true;
videoPictureBox.Visible = false;
//closeVidDShow(); // añadido
}
if (mediaControl != null)
{
mediaControl.Stop();
videoWindow.put_Visible(DirectShowLib.OABool.False);
videoWindow.put_Owner(IntPtr.Zero);
}
// Dshow :~~
graphBuilder = (IGraphBuilder)new FilterGraph();
graphBuilder.RenderFile(fileName, null);
mediaControl = (IMediaControl)graphBuilder;
// mediaEventEx = (IMediaEventEx)this.graphBuilder;
mediaSeeking = (IMediaSeeking)graphBuilder;
mediaPosition = (IMediaPosition)graphBuilder;
basicVideo = graphBuilder as IBasicVideo;
videoWindow = graphBuilder as IVideoWindow;
VideoBoxType = PreviewType.DirectShow;
// sacando información
int x, y; double atpf;
basicVideo.GetVideoSize(out x, out y);
if (x == 0 || y == 0)
{
errorMsg("No se puede abrir un vídeo sin dimensiones.");
videoPanel.Visible = false;
isVideoLoaded = false;
drawPositions();
return;
}
if (videoInfo == null) videoInfo = new VideoInfo(fileName);
videoInfo.Resolution = new Size(x, y);
basicVideo.get_AvgTimePerFrame(out atpf);
videoInfo.FrameRate = Math.Round(1 / atpf, 3);
//labelResFPS.Text = x.ToString() + "x" + y.ToString() + " @ " + videoInfo.FrameRate.ToString() + " fps";
textResX.Text = x.ToString();
textResY.Text = y.ToString();
textFPS.Text = videoInfo.FrameRate.ToString();
if (File.Exists(Application.StartupPath+"\\MediaInfo.dll") && File.Exists(Application.StartupPath+"\\MediaInfoWrapper.dll"))
{
treeView1.Enabled = true;
try
{
RetrieveMediaFileInfo(fileName);
}
catch { treeView1.Enabled = false; }
}
else treeView1.Enabled = false;
if (x != 0)
{
vidScaleFactor.Enabled = true;
try
{
vidScaleFactor.Text = getFromConfigFile("mainW_Zoom");
}
catch
{
vidScaleFactor.Text = "50%";
};
double p = double.Parse(vidScaleFactor.Text.Substring(0, vidScaleFactor.Text.IndexOf('%')));
p = p / 100;
int new_x = (int)(x * p);
int new_y = (int)(y * p);
videoWindow.put_Height(new_x);
videoWindow.put_Width(new_y);
videoWindow.put_Owner(videoPanel.Handle);
//.........这里部分代码省略.........
示例12: translateW_Load
private void translateW_Load(object sender, EventArgs e)
{
//this.MaximumSize = this.Size;
//this.MinimumSize = this.Size;
toolStripStatusLabel2.Text = "Cargando el Asistente de Traducción...";
// cargamos script
autoComplete = new ArrayList();
al = mW.al;
gridCont.RowCount = al.Count;
bool hasAutoComplete = (mW.script.GetHeader().GetHeaderValue("AutoComplete") != string.Empty);
for (int i = 0; i < al.Count; i++)
{
lineaASS lass = (lineaASS)al[i];
gridCont[0, i].Value = lass.personaje;
if (!autoComplete.Contains(lass.personaje) && !hasAutoComplete)
if (lass.personaje.Trim()!="")
autoComplete.Add(lass.personaje);
gridCont[1, i].Value = lass.texto;
}
if (hasAutoComplete) InsertAutoCompleteFromScript();
labelLineaActual.Text = "1 de " + (al.Count) + " (0%)";
textPersonaje.Text = gridCont[0, 0].Value.ToString();
textOrig.Text = gridCont[1, 0].Value.ToString();
// cargamos video
graphBuilder = (IGraphBuilder)new FilterGraph();
graphBuilder.RenderFile(videoInfo.FileName, null);
mediaControl = (IMediaControl)graphBuilder;
// mediaEventEx = (IMediaEventEx)this.graphBuilder;
mediaSeeking = (IMediaSeeking)graphBuilder;
mediaPosition = (IMediaPosition)graphBuilder;
basicVideo = graphBuilder as IBasicVideo;
basicAudio = graphBuilder as IBasicAudio;
videoWindow = graphBuilder as IVideoWindow;
try
{
int x, y; double atpf;
basicVideo.GetVideoSize(out x, out y);
basicVideo.get_AvgTimePerFrame(out atpf);
videoInfo.FrameRate = Math.Round(1 / atpf, 3);
int new_x = videoPanel.Width;
int new_y = (new_x * y) / x;
videoWindow.put_Height(new_x);
videoWindow.put_Width(new_y);
videoWindow.put_Owner(videoPanel.Handle);
videoPanel.Size = new System.Drawing.Size(new_x, new_y);
videoWindow.SetWindowPosition(0, 0, videoPanel.Width, videoPanel.Height);
videoWindow.put_WindowStyle(WindowStyle.Child);
videoWindow.put_Visible(DirectShowLib.OABool.True);
mediaSeeking.SetTimeFormat(DirectShowLib.TimeFormat.Frame);
mediaControl.Run();
}
catch { mW.errorMsg("Imposible cargar el vídeo. Debe haber algún problema con el mismo, y el asistente será muy inestable"); }
// activamos timers & handlers
timer1.Tick += new EventHandler(timer1_Tick);
timer1.Enabled = true;
timer2.Tick += new EventHandler(timer2_Tick);
AutoSaveTimer.Tick += new EventHandler(timer3_Tick);
AutoSaveTimer.Enabled = true;
gridCont.CellClick += new DataGridViewCellEventHandler(gridCont_CellClick);
textPersonaje.TextChanged += new EventHandler(textPersonaje_TextChanged);
textTradu.TextChanged += new EventHandler(textTradu_TextChanged);
textTradu.KeyUp += new KeyEventHandler(textBox1_KeyUp);
textTradu.KeyDown += new KeyEventHandler(textTradu_KeyDown);
textTradu.KeyPress += new KeyPressEventHandler(textTradu_KeyPress);
textPersonaje.KeyDown += new KeyEventHandler(textPersonaje_KeyDown);
textPersonaje.KeyPress += new KeyPressEventHandler(textPersonaje_KeyPress);
button8.GotFocus += new EventHandler(button8_GotFocus);
button9.GotFocus += new EventHandler(button9_GotFocus);
gridCont.DoubleClick += new EventHandler(gridCont_DoubleClick);
gridCont.SelectionChanged += new EventHandler(gridCont_SelectionChanged);
gridCont.KeyUp += new KeyEventHandler(gridCont_KeyUp);
listBox1.KeyUp += new KeyEventHandler(listBox1_KeyUp);
textToAdd.KeyPress += new KeyPressEventHandler(textToAdd_KeyPress);
progressBar1.MouseDown += new MouseEventHandler(progressBar1_MouseDown);
tiempoInicio_.TimeValidated += new TimeTextBox.OnTimeTextBoxValidated(tiempo_TimeValidated);
tiempoFin_.TimeValidated += new TimeTextBox.OnTimeTextBoxValidated(tiempo_TimeValidated);
this.Move += new EventHandler(translateW_Move);
//textTradu.ContextMenu = new ASSTextBoxRegExDefaultContextMenu(textTradu);
mediaControl.Pause();
// cargar de config
try
{
checkAutoComplete.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_autoC"));
checkTagSSA.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_tagSSA"));
checkComment.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_Comment"));
checkVideo.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_aud"));
//.........这里部分代码省略.........
示例13: renderGraph
/// <summary>
/// Connects the filters of a previously created graph
/// (created by createGraph()). Once rendered the graph
/// is ready to be used. This method may also destroy
/// streams if we have streams we no longer want.
/// </summary>
private void renderGraph()
{
DsGuid cat;
DsGuid med;
int hr;
bool didSomething = false;
assertStopped();
// Ensure required properties set
if (_filename == null)
throw new ArgumentException("The Filename property has not been set to a file.\n");
// Stop the graph
if (_mediaControl != null)
_mediaControl.Stop();
// Create the graph if needed (group should already be created)
createGraph();
// Derender the graph if we have a capture or preview stream
// that we no longer want. We can't derender the capture and
// preview streams seperately.
// Notice the second case will leave a capture stream intact
// even if we no longer want it. This allows the user that is
// not using the preview to Stop() and Start() without
// rerendering the graph.
if (!_wantPreviewRendered && _isPreviewRendered)
derenderGraph();
if (!_wantCaptureRendered && _isCaptureRendered)
if (_wantPreviewRendered)
derenderGraph();
// Render capture stream (only if necessary)
if (_wantCaptureRendered && !_isCaptureRendered)
{
// Render the file writer portion of graph (mux -> file)
Guid mediaSubType = MediaSubType.Avi;
Marshal.ThrowExceptionForHR(_captureGraphBuilder.SetOutputFileName(mediaSubType, Filename, out _muxFilter, out _fileWriterFilter));
// Render video (video -> mux)
if (VideoDevice != null)
{
// Try interleaved first, because if the device supports it,
// it's the only way to get audio as well as video
cat = DsGuid.FromGuid(PinCategory.Capture);
med = DsGuid.FromGuid(MediaType.Interleaved);
hr = _captureGraphBuilder.RenderStream(cat, med, _videoDeviceFilter, _videoCompressorFilter, _muxFilter);
//hr = _captureGraphBuilder.RenderStream(ref cat, ref med, _videoDeviceFilter, _videoCompressorFilter, _muxFilter);
if (hr < 0)
{
med = DsGuid.FromGuid(MediaType.Video);
hr = _captureGraphBuilder.RenderStream(cat, med, _videoDeviceFilter, _videoCompressorFilter, _muxFilter);
//hr = _captureGraphBuilder.RenderStream(ref cat, ref med, _videoDeviceFilter, _videoCompressorFilter, _muxFilter);
if (hr == -2147220969)
throw new DeviceInUseException("Video device", hr);
Marshal.ThrowExceptionForHR(hr);
}
}
// Render audio (audio -> mux)
if (AudioDevice != null)
{
cat = DsGuid.FromGuid(PinCategory.Capture);
med = DsGuid.FromGuid(MediaType.Audio);
Marshal.ThrowExceptionForHR(_captureGraphBuilder.RenderStream(cat, med, _audioDeviceFilter, _audioCompressorFilter, _muxFilter));
//Marshal.ThrowExceptionForHR(_captureGraphBuilder.RenderStream(ref cat, ref med, _audioDeviceFilter, _audioCompressorFilter, _muxFilter));
}
_isCaptureRendered = true;
didSomething = true;
}
// Render preview stream (only if necessary)
if (_wantPreviewRendered && !_isPreviewRendered)
{
// Render preview (video -> renderer)
cat = DsGuid.FromGuid(PinCategory.Preview);
med = DsGuid.FromGuid(MediaType.Video);
Marshal.ThrowExceptionForHR(_captureGraphBuilder.RenderStream(cat, med, _videoDeviceFilter, null, null));
// Marshal.ThrowExceptionForHR(_captureGraphBuilder.RenderStream(ref cat, ref med, _videoDeviceFilter, null, null));
// Get the IVideoWindow interface
_videoWindow = (IVideoWindow)_graphBuilder;
// Set the video window to be a child of the main window
Marshal.ThrowExceptionForHR(_videoWindow.put_Owner(_previewWindow.Handle));
// Set video window style
Marshal.ThrowExceptionForHR(_videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings));
//Marshal.ThrowExceptionForHR(_videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS));
//.........这里部分代码省略.........
示例14: FileLoad
//Мотод для загрузки видео файла.
public void FileLoad(string sfile, Panel vPanel)
{
CleanUp();
graphBuilder = (IGraphBuilder) new FilterGraph();
mediaControl = graphBuilder as IMediaControl;
mediaPosition = graphBuilder as IMediaPosition;
videoWindow = graphBuilder as IVideoWindow;
basicAudio = graphBuilder as IBasicAudio;
ddColor.lBrightness = 0;
ddColor.lContrast = 0;
ddColor.lGamma = 0;
ddColor.lSaturation = 0;
graphBuilder.RenderFile(sfile, null);
videoWindow.put_Owner(vPanel.Handle);
videoWindow.put_WindowStyle(WindowStyle.Child
| WindowStyle.ClipSiblings
| WindowStyle.ClipChildren);
videoWindow.SetWindowPosition(vPanel.ClientRectangle.Left,
vPanel.ClientRectangle.Top,
vPanel.ClientRectangle.Width,
vPanel.ClientRectangle.Height);
mediaControl.Run();
CurrentStatus = mStatus.Play;
mediaPosition.get_Duration(out mediaTimeSeconds);
allSeconds = (int)mediaTimeSeconds;
}
示例15: Cleanup
private void Cleanup()
{
if (graphBuilder == null)
{
return;
}
int hr;
Log.Info("RTSPPlayer:cleanup DShow graph");
try
{
if (_mediaCtrl != null)
{
int counter = 0;
FilterState state;
hr = _mediaCtrl.Stop();
hr = _mediaCtrl.GetState(10, out state);
while (state != FilterState.Stopped || GUIGraphicsContext.InVmr9Render)
{
Thread.Sleep(100);
hr = _mediaCtrl.GetState(10, out state);
counter++;
if (counter >= 30)
{
if (state != FilterState.Stopped)
Log.Error("RTSPPlayer: graph still running");
if (GUIGraphicsContext.InVmr9Render)
Log.Error("RTSPPlayer: in renderer");
break;
}
}
_mediaCtrl = null;
}
if (Vmr9 != null)
{
Vmr9.Enable(false);
}
if (mediaEvt != null)
{
hr = mediaEvt.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero);
mediaEvt = null;
}
videoWin = graphBuilder as IVideoWindow;
if (videoWin != null)
{
hr = videoWin.put_Visible(OABool.False);
hr = videoWin.put_Owner(IntPtr.Zero);
videoWin = null;
}
_mediaSeeking = null;
mediaPos = null;
basicAudio = null;
basicVideo = null;
videoWin = null;
SubEngine.GetInstance().FreeSubtitles();
if (graphBuilder != null)
{
DirectShowUtil.RemoveFilters(graphBuilder);
if (_rotEntry != null)
{
_rotEntry.SafeDispose();
_rotEntry = null;
}
DirectShowUtil.ReleaseComObject(graphBuilder);
graphBuilder = null;
}
if (Vmr9 != null)
{
Vmr9.SafeDispose();
Vmr9 = null;
}
GUIGraphicsContext.form.Invalidate(true);
_state = PlayState.Init;
if (_mpegDemux != null)
{
Log.Info("cleanup mpegdemux");
while ((hr = DirectShowUtil.ReleaseComObject(_mpegDemux)) > 0)
{
;
}
_mpegDemux = null;
}
if (_rtspSource != null)
{
Log.Info("cleanup _rtspSource");
while ((hr = DirectShowUtil.ReleaseComObject(_rtspSource)) > 0)
{
;
}
_rtspSource = null;
}
if (_subtitleFilter != null)
{
//.........这里部分代码省略.........