本文整理汇总了C#中IVideoWindow.put_WindowStyle方法的典型用法代码示例。如果您正苦于以下问题:C# IVideoWindow.put_WindowStyle方法的具体用法?C# IVideoWindow.put_WindowStyle怎么用?C# IVideoWindow.put_WindowStyle使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IVideoWindow
的用法示例。
在下文中一共展示了IVideoWindow.put_WindowStyle方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: renderGraph
//.........这里部分代码省略.........
// Render the file writer portion of graph (mux -> file)
Guid mediaSubType = MediaSubType.Avi;
hr = captureGraphBuilder.SetOutputFileName( ref mediaSubType, Filename, out muxFilter, out fileWriterFilter );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
if ( VideoDevice != null )
{
// Try interleaved first, because if the device supports it,
// it's the only way to get audio as well as video
cat = PinCategory.Capture;
med = MediaType.Interleaved;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter);
if( hr < 0 )
{
med = MediaType.Video;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter);
//if ( hr == -2147220969 ) throw new DeviceInUseException( "Video device", hr );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
}
// Render audio (audio -> mux)
if ( AudioDevice != null )
{
cat = PinCategory.Capture;
med = MediaType.Audio;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, audioDeviceFilter, audioCompressorFilter, muxFilter );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
isCaptureRendered = true;
didSomething = true;
}
// Render preview stream and launch the baseGrabFlt to capture frames
// ===================================================================================
if ( wantPreviewRendered && renderStream && !isPreviewRendered )
{
// Render preview (video.PinPreview -> baseGrabFlt -> renderer)
// At this point intelligent connect is used, because my webcams don't have a preview pin and
// a capture pin, so Smart Tee filter will be used. I have tested it using GraphEdit.
// I can type hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, baseGrabFlt);
// because baseGrabFlt is a transform filter, like videoCompressorFilter.
cat = PinCategory.Preview;
med = MediaType.Video;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, baseGrabFlt, null );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Get the IVideoWindow interface
videoWindow = (IVideoWindow) graphBuilder;
// Set the video window to be a child of the main window
hr = videoWindow.put_Owner( previewWindow.Handle );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Set video window style
hr = videoWindow.put_WindowStyle( WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS);
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Position video window in client rect of owner window
previewWindow.Resize += new EventHandler( onPreviewWindowResize );
onPreviewWindowResize( this, null );
// Make the video window visible, now that it is properly positioned
hr = videoWindow.put_Visible( DsHlp.OATRUE );
//if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
hr = mediaEvt.SetNotifyWindow( this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero );
//if( hr < 0 )
// Marshal.ThrowExceptionForHR( hr );
isPreviewRendered = true;
didSomething = true;
// Begin Configuration of SampGrabber <<<<<<----------------------------------------------------
AMMediaType media = new AMMediaType();
hr = sampGrabber.GetConnectedMediaType( media );
//if( hr < 0 )
// Marshal.ThrowExceptionForHR( hr );
if( (media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero) )
throw new NotSupportedException( "Unknown Grabber Media Format" );
videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure( media.formatPtr, typeof(VideoInfoHeader) );
Marshal.FreeCoTaskMem( media.formatPtr ); media.formatPtr = IntPtr.Zero;
hr = sampGrabber.SetBufferSamples( false );
if( hr == 0 )
hr = sampGrabber.SetOneShot( false );
if( hr == 0 )
hr = sampGrabber.SetCallback( null, 0 );
//if( hr < 0 )
// Marshal.ThrowExceptionForHR( hr );
// Finish Configuration of SampGrabber <<<<<<----------------------------------------------------
}
if ( didSomething )
graphState = GraphState.Rendered;
}
示例2: loadVideo
private void loadVideo(String videoPath)
{
videoFilepath = videoPath;
videoFileName.Text = getDisplayVideoName();
if (graph != null)
{
graph = null;
}
if (mediaControl != null)
{
// Stop media playback
this.mediaControl.Stop();
mediaControl = null;
}
if (videoWindow != null)
{
videoWindow.put_Owner(IntPtr.Zero);
videoWindow = null;
}
if (mediaSeeking != null)
{
mediaSeeking = null;
}
if (basicAudio != null)
{
basicAudio = null;
}
GC.Collect();
/* if (mediaPosition != null)
{
mediaPosition = null;
}*/
graph = (IGraphBuilder)new FilterGraph();
mediaControl = (IMediaControl)graph;
//mediaPosition = (IMediaPosition)graph;
videoWindow = (IVideoWindow)graph;
mediaSeeking = (IMediaSeeking)graph;
basicAudio = (IBasicAudio)graph;
AviSplitter spliter = new AviSplitter();
graph.AddFilter((IBaseFilter)spliter, null);
graph.RenderFile(videoPath, null);
graph.SetDefaultSyncSource();
/*
* AMSeekingSeekingCapabilities cap = AMSeekingSeekingCapabilities.CanGetCurrentPos;
if (mediaSeeking.CheckCapabilities(ref cap) > 0)
{
this.consoleErreur.AppendText("Impossible de recuperer la position de la frame");
}
* */
videoWindow.put_Owner(videoPanel.Handle);
videoWindow.put_MessageDrain(videoPanel.Handle);
videoWindow.put_WindowStyle(WindowStyle.Child);
videoWindow.put_WindowStyleEx(WindowStyleEx.ControlParent);
videoWindow.put_Left(0);
videoWindow.put_Top(0);
videoWindow.put_Width(videoPanel.Width);
videoWindow.put_Height(videoPanel.Height);
//positionTrackbar.Enabled = true;
speedTrackBar.Enabled = true;
mediaSeeking.SetTimeFormat(TimeFormat.Frame);
double rate;
mediaSeeking.GetRate(out rate);
rateText.Text = rate.ToString();
speedTrackBar.Value = (int)(speedTrackBar.Maximum * rate / 2);
trackBar1.Value = trackBar1.Maximum / 2;
this.basicAudio.put_Volume(-5000 + 5000 * trackBar1.Value / trackBar1.Maximum);
//mediaPosition.put_Rate(0.5);
running = false;
frameChanged = false;
}
示例3: Play
public bool Play(string fileName, Form form)
{
_form = form;
Log.WriteFile("play:{0}", fileName);
_graphBuilder = (IFilterGraph2)new FilterGraph();
_rotEntry = new DsROTEntry(_graphBuilder);
TsReader reader = new TsReader();
_tsReader = (IBaseFilter)reader;
Log.Info("TSReaderPlayer:add TsReader to graph");
_graphBuilder.AddFilter(_tsReader, "TsReader");
#region load file in TsReader
Log.WriteFile("load file in Ts");
IFileSourceFilter interfaceFile = (IFileSourceFilter)_tsReader;
if (interfaceFile == null)
{
Log.WriteFile("TSReaderPlayer:Failed to get IFileSourceFilter");
return false;
}
int hr = interfaceFile.Load(fileName, null);
if (hr != 0)
{
Log.WriteFile("TSReaderPlayer:Failed to load file");
return false;
}
#endregion
#region render pin
Log.Info("TSReaderPlayer:render TsReader outputs");
IEnumPins enumPins;
_tsReader.EnumPins(out enumPins);
IPin[] pins = new IPin[2];
int fetched;
while (enumPins.Next(1, pins, out fetched) == 0)
{
if (fetched != 1) break;
PinDirection direction;
pins[0].QueryDirection(out direction);
if (direction == PinDirection.Input)
{
Release.ComObject(pins[0]);
continue;
}
_graphBuilder.Render(pins[0]);
Release.ComObject(pins[0]);
}
Release.ComObject(enumPins);
#endregion
_videoWin = _graphBuilder as IVideoWindow;
if (_videoWin != null)
{
_videoWin.put_Visible(OABool.True);
_videoWin.put_Owner(form.Handle);
_videoWin.put_WindowStyle(
(WindowStyle)((int)WindowStyle.Child + (int)WindowStyle.ClipSiblings + (int)WindowStyle.ClipChildren));
_videoWin.put_MessageDrain(form.Handle);
_videoWin.SetWindowPosition(form.ClientRectangle.X, form.ClientRectangle.Y, form.ClientRectangle.Width,
form.ClientRectangle.Height);
}
Log.WriteFile("run graph");
_mediaCtrl = (IMediaControl)_graphBuilder;
hr = _mediaCtrl.Run();
Log.WriteFile("TSReaderPlayer:running:{0:X}", hr);
return true;
}
示例4: ConfigVideo
private void ConfigVideo(IVideoWindow ivw, Control hControl)
{
int hr;
hr = ivw.put_Owner(hControl.Handle);
DsError.ThrowExceptionForHR(hr);
hr = ivw.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings);
DsError.ThrowExceptionForHR( hr );
// Yes, make it visible
hr = ivw.put_Visible( OABool.True );
DsError.ThrowExceptionForHR( hr );
// Move to upper left corner
Rectangle rc = hControl.ClientRectangle;
hr = ivw.SetWindowPosition( 0, 0, rc.Right, rc.Bottom );
DsError.ThrowExceptionForHR( hr );
}
示例5: ConfigureVideoWindow
/// <summary>
/// Configure the video window
/// </summary>
/// <param name="videoWindow">Interface of the video renderer</param>
/// <param name="previewControl">Preview Control to draw into</param>
private void ConfigureVideoWindow(IVideoWindow videoWindow, Control previewControl)
{
int hr;
if (previewControl == null)
{
return;
}
// Set the output window
hr = videoWindow.put_Owner(ThreadSafe.GetHandle(previewControl));
if (hr >= 0) // If there is video
{
// Set the window style
hr = videoWindow.put_WindowStyle((WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings));
DsError.ThrowExceptionForHR(hr);
// Make the window visible
hr = videoWindow.put_Visible(OABool.True);
DsError.ThrowExceptionForHR(hr);
// Position the playing location
Rectangle rc = ThreadSafe.GetClientRectangle(previewControl);
hr = videoWindow.SetWindowPosition(0, 0, rc.Right, rc.Bottom);
DsError.ThrowExceptionForHR(hr);
}
}
示例6: renderGraph
/// <summary>
/// Connects the filters of a previously created graph
/// (created by createGraph()). Once rendered the graph
/// is ready to be used. This method may also destroy
/// streams if we have streams we no longer want.
/// </summary>
protected void renderGraph()
{
Guid cat;
Guid med;
int hr;
bool didSomething = false;
const int WS_CHILD = 0x40000000;
const int WS_CLIPCHILDREN = 0x02000000;
const int WS_CLIPSIBLINGS = 0x04000000;
assertStopped();
// Ensure required properties set
if ( filename == null )
throw new ArgumentException( "The Filename property has not been set to a file.\n" );
// Stop the graph
if ( mediaControl != null )
mediaControl.Stop();
// Create the graph if needed (group should already be created)
createGraph();
// Derender the graph if we have a capture or preview stream
// that we no longer want. We can't derender the capture and
// preview streams seperately.
// Notice the second case will leave a capture stream intact
// even if we no longer want it. This allows the user that is
// not using the preview to Stop() and Start() without
// rerendering the graph.
if ( !wantPreviewRendered && isPreviewRendered )
derenderGraph();
if ( !wantCaptureRendered && isCaptureRendered )
if ( wantPreviewRendered )
derenderGraph();
// Render capture stream (only if necessary)
if ( wantCaptureRendered && !isCaptureRendered )
{
// Render the file writer portion of graph (mux -> file)
Guid mediaSubType = MediaSubType.Avi;
hr = captureGraphBuilder.SetOutputFileName( ref mediaSubType, Filename, out muxFilter, out fileWriterFilter );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Render video (video -> mux)
if ( VideoDevice != null )
{
// Try interleaved first, because if the device supports it,
// it's the only way to get audio as well as video
cat = PinCategory.Capture;
med = MediaType.Interleaved;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter );
if( hr < 0 )
{
med = MediaType.Video;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter );
if ( hr == -2147220969 ) throw new DeviceInUseException( "Video device", hr );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
}
// Render audio (audio -> mux)
if ( AudioDevice != null )
{
cat = PinCategory.Capture;
med = MediaType.Audio;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, audioDeviceFilter, audioCompressorFilter, muxFilter );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
}
isCaptureRendered = true;
didSomething = true;
}
// Render preview stream (only if necessary)
if ( wantPreviewRendered && !isPreviewRendered )
{
// Render preview (video -> renderer)
cat = PinCategory.Preview;
med = MediaType.Video;
hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, null );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Get the IVideoWindow interface
videoWindow = (IVideoWindow) graphBuilder;
// Set the video window to be a child of the main window
hr = videoWindow.put_Owner( previewWindow.Handle );
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
// Set video window style
hr = videoWindow.put_WindowStyle( WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS);
if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
//.........这里部分代码省略.........
示例7: Play
//.........这里部分代码省略.........
mpeg2ProgramStream.unkPtr = IntPtr.Zero;
mpeg2ProgramStream.sampleSize = 0;
mpeg2ProgramStream.temporalCompression = false;
mpeg2ProgramStream.fixedSizeSamples = true;
mpeg2ProgramStream.formatType = FormatType.None;
mpeg2ProgramStream.formatSize = 0;
mpeg2ProgramStream.formatPtr = IntPtr.Zero;
hr = interfaceFile.Load(fileName, mpeg2ProgramStream);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9:Failed to load file");
return false;
}
#region connect tsfilesource->demux
Log.WriteFile("connect tsfilesource->demux");
Log.WriteFile("TSStreamBufferPlayer9:connect tsfilesource->mpeg2 demux");
IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_tsFileSource, PinDirection.Output, 0);
if (pinTsOut == null)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to find output pin of tsfilesource");
return false;
}
IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0);
if (pinDemuxIn == null)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to find output pin of tsfilesource");
return false;
}
hr = _graphBuilder.Connect(pinTsOut, pinDemuxIn);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to connect tsfilesource->mpeg2 demux:{0:X}", hr);
return false;
}
Release.ComObject(pinTsOut);
Release.ComObject(pinDemuxIn);
#endregion
#region map demux pids
Log.WriteFile("map mpeg2 pids");
IMPEG2StreamIdMap pStreamId = (IMPEG2StreamIdMap)_pinVideo;
hr = pStreamId.MapStreamId(0xe0, MPEG2Program.ElementaryStream, 0, 0);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9: failed to map pid 0xe0->video pin");
return false;
}
pStreamId = (IMPEG2StreamIdMap)_pinAudio;
hr = pStreamId.MapStreamId(0xc0, MPEG2Program.ElementaryStream, 0, 0);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9: failed to map pid 0xc0->audio pin");
return false;
}
#endregion
#region render demux audio/video pins
Log.WriteFile("render pins");
hr = _graphBuilder.Render(_pinAudio);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to render video output pin:{0:X}", hr);
}
hr = _graphBuilder.Render(_pinVideo);
if (hr != 0)
{
Log.WriteFile("TSStreamBufferPlayer9:failed to render audio output pin:{0:X}", hr);
}
#endregion
#endregion
_videoWin = _graphBuilder as IVideoWindow;
if (_videoWin != null)
{
_videoWin.put_Visible(OABool.True);
_videoWin.put_Owner(form.Handle);
_videoWin.put_WindowStyle(
(WindowStyle)((int)WindowStyle.Child + (int)WindowStyle.ClipSiblings + (int)WindowStyle.ClipChildren));
_videoWin.put_MessageDrain(form.Handle);
_videoWin.SetWindowPosition(190, 250, 150, 150);
}
Log.WriteFile("run graph");
_mediaCtrl = (IMediaControl)_graphBuilder;
hr = _mediaCtrl.Run();
Log.WriteFile("TSStreamBufferPlayer9:running:{0:X}", hr);
return true;
}
示例8: InitVideoWindow
private void InitVideoWindow()
{
if (Render == null)
return;
videoWindow = (IVideoWindow)graphBuilder;
//Set the owener of the videoWindow to an IntPtr of some sort (the Handle of any control - could be a form / button etc.)
var hr = videoWindow.put_Owner(Render.Handle);
DsError.ThrowExceptionForHR(hr);
//Set the style of the video window
hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings);
DsError.ThrowExceptionForHR(hr);
// Position video window in client rect of main application window
//hr = videoWindow.SetWindowPosition(0, 0, Render.Width, Render.Height);
Resize();
DsError.ThrowExceptionForHR(hr);
videoWindow.put_Visible(OABool.True);
Render.SizeChanged -= new EventHandler(onResize);
Render.SizeChanged += new EventHandler(onResize);
}
示例9: Play
//.........这里部分代码省略.........
{
hr = filterState.IsFilterReadyToConnectPins(out ready);
System.Threading.Thread.Sleep(25);
}
}
if (hr != 0)
{
ErrorOrSplitter = DirectShowLib.DsError.GetErrorText(hr);
DirectShowUtil.ReleaseComObject(sourceFilter, 2000);
return false;
}
OnlineVideos.MediaPortal1.Player.OnlineVideosPlayer.AddPreferredFilters(_graphBuilder, sourceFilter);
// try to connect the filters
int numConnected = 0;
IEnumPins pinEnum;
hr = sourceFilter.EnumPins(out pinEnum);
if ((hr == 0) && (pinEnum != null))
{
pinEnum.Reset();
IPin[] pins = new IPin[1];
int iFetched;
int iPinNo = 0;
do
{
iPinNo++;
hr = pinEnum.Next(1, pins, out iFetched);
if (hr == 0)
{
if (iFetched == 1 && pins[0] != null)
{
PinDirection pinDir;
pins[0].QueryDirection(out pinDir);
if (pinDir == PinDirection.Output)
{
hr = _graphBuilder.Render(pins[0]);
if (hr == 0)
{
numConnected++;
IPin connectedPin;
if (pins[0].ConnectedTo(out connectedPin) == 0 && connectedPin != null)
{
PinInfo connectedPinInfo;
connectedPin.QueryPinInfo(out connectedPinInfo);
FilterInfo connectedFilterInfo;
connectedPinInfo.filter.QueryFilterInfo(out connectedFilterInfo);
DirectShowUtil.ReleaseComObject(connectedPin, 2000);
IBaseFilter connectedFilter;
if (connectedFilterInfo.pGraph.FindFilterByName(connectedFilterInfo.achName, out connectedFilter) == 0 && connectedFilter != null)
{
var codecInfo = GetCodecInfo(connectedFilter, connectedFilterInfo.achName);
if (codecInfo != null)
{
if (string.IsNullOrEmpty(ErrorOrSplitter)) ErrorOrSplitter = codecInfo.ToString();
else ErrorOrSplitter += ", " + codecInfo.ToString();
}
DirectShowUtil.ReleaseComObject(connectedFilter);
}
}
}
}
DirectShowUtil.ReleaseComObject(pins[0], 2000);
}
}
} while (iFetched == 1);
}
DirectShowUtil.ReleaseComObject(pinEnum, 2000);
if (numConnected > 0)
{
_videoWin = _graphBuilder as IVideoWindow;
if (_videoWin != null)
{
_videoWin.put_Owner(_parentControl.Handle);
_videoWin.put_WindowStyle((WindowStyle)((int)WindowStyle.Child + (int)WindowStyle.ClipSiblings + (int)WindowStyle.ClipChildren));
_videoWin.SetWindowPosition(_parentControl.ClientRectangle.X, _parentControl.ClientRectangle.Y, _parentControl.ClientRectangle.Width, _parentControl.ClientRectangle.Height);
_videoWin.put_Visible(OABool.True);
}
_mediaCtrl = (IMediaControl)_graphBuilder;
hr = _mediaCtrl.Run();
mediaEvents = (IMediaEventEx)_graphBuilder;
// Have the graph signal event via window callbacks for performance
mediaEvents.SetNotifyWindow(_parentControl.FindForm().Handle, WMGraphNotify, IntPtr.Zero);
_parentControl.SizeChanged += _parentControl_SizeChanged;
return true;
}
else
{
ErrorOrSplitter = string.Format("Could not render output pins of {0}", sourceFilterName);
DirectShowUtil.ReleaseComObject(sourceFilter, 2000);
Stop();
return false;
}
}
示例10: openVid
//.........这里部分代码省略.........
treeView1.Enabled = true;
try
{
RetrieveMediaFileInfo(fileName);
}
catch { treeView1.Enabled = false; }
}
else treeView1.Enabled = false;
if (x != 0)
{
vidScaleFactor.Enabled = true;
try
{
vidScaleFactor.Text = getFromConfigFile("mainW_Zoom");
}
catch
{
vidScaleFactor.Text = "50%";
};
double p = double.Parse(vidScaleFactor.Text.Substring(0, vidScaleFactor.Text.IndexOf('%')));
p = p / 100;
int new_x = (int)(x * p);
int new_y = (int)(y * p);
videoWindow.put_Height(new_x);
videoWindow.put_Width(new_y);
videoWindow.put_Owner(videoPanel.Handle);
videoPanel.Size = new System.Drawing.Size(new_x, new_y);
videoWindow.SetWindowPosition(0, 0, videoPanel.Width, videoPanel.Height);
videoWindow.put_WindowStyle(WindowStyle.Child);
videoWindow.put_Visible(DirectShowLib.OABool.True);
}
else vidScaleFactor.Enabled = false;
// timer
actualizaFrames.Interval = 10;
actualizaFrames.Enabled = true;
//mediaControl.Run();
drawPositions();
framesFin.Enabled = true;
buttonAddFrameInicio.Enabled = buttonAddFrameInicio.Visible = true;
framesInicio.Enabled = true;
buttonAddFrameFin.Enabled = buttonAddFrameFin.Visible = true;
butClip.Enabled = false;
mediaSeeking.SetTimeFormat(DirectShowLib.TimeFormat.Frame);
videoInfo.FrameTotal = VideoUnitConversion.getTotal(mediaSeeking, videoInfo.FrameRate);
seekBar.Maximum = FrameTotal;
seekBar.TickFrequency = seekBar.Maximum / 10;
// VFW ( __ SOLO AVIs __ )
try
{
AVIFileWrapper.AVIFileInit();
int aviFile = 0;
IntPtr aviStream;
int res = AVIFileWrapper.AVIFileOpen(ref aviFile, fileName, 0x20, 0);
示例11: translateW_Load
private void translateW_Load(object sender, EventArgs e)
{
//this.MaximumSize = this.Size;
//this.MinimumSize = this.Size;
toolStripStatusLabel2.Text = "Cargando el Asistente de Traducción...";
// cargamos script
autoComplete = new ArrayList();
al = mW.al;
gridCont.RowCount = al.Count;
bool hasAutoComplete = (mW.script.GetHeader().GetHeaderValue("AutoComplete") != string.Empty);
for (int i = 0; i < al.Count; i++)
{
lineaASS lass = (lineaASS)al[i];
gridCont[0, i].Value = lass.personaje;
if (!autoComplete.Contains(lass.personaje) && !hasAutoComplete)
if (lass.personaje.Trim()!="")
autoComplete.Add(lass.personaje);
gridCont[1, i].Value = lass.texto;
}
if (hasAutoComplete) InsertAutoCompleteFromScript();
labelLineaActual.Text = "1 de " + (al.Count) + " (0%)";
textPersonaje.Text = gridCont[0, 0].Value.ToString();
textOrig.Text = gridCont[1, 0].Value.ToString();
// cargamos video
graphBuilder = (IGraphBuilder)new FilterGraph();
graphBuilder.RenderFile(videoInfo.FileName, null);
mediaControl = (IMediaControl)graphBuilder;
// mediaEventEx = (IMediaEventEx)this.graphBuilder;
mediaSeeking = (IMediaSeeking)graphBuilder;
mediaPosition = (IMediaPosition)graphBuilder;
basicVideo = graphBuilder as IBasicVideo;
basicAudio = graphBuilder as IBasicAudio;
videoWindow = graphBuilder as IVideoWindow;
try
{
int x, y; double atpf;
basicVideo.GetVideoSize(out x, out y);
basicVideo.get_AvgTimePerFrame(out atpf);
videoInfo.FrameRate = Math.Round(1 / atpf, 3);
int new_x = videoPanel.Width;
int new_y = (new_x * y) / x;
videoWindow.put_Height(new_x);
videoWindow.put_Width(new_y);
videoWindow.put_Owner(videoPanel.Handle);
videoPanel.Size = new System.Drawing.Size(new_x, new_y);
videoWindow.SetWindowPosition(0, 0, videoPanel.Width, videoPanel.Height);
videoWindow.put_WindowStyle(WindowStyle.Child);
videoWindow.put_Visible(DirectShowLib.OABool.True);
mediaSeeking.SetTimeFormat(DirectShowLib.TimeFormat.Frame);
mediaControl.Run();
}
catch { mW.errorMsg("Imposible cargar el vídeo. Debe haber algún problema con el mismo, y el asistente será muy inestable"); }
// activamos timers & handlers
timer1.Tick += new EventHandler(timer1_Tick);
timer1.Enabled = true;
timer2.Tick += new EventHandler(timer2_Tick);
AutoSaveTimer.Tick += new EventHandler(timer3_Tick);
AutoSaveTimer.Enabled = true;
gridCont.CellClick += new DataGridViewCellEventHandler(gridCont_CellClick);
textPersonaje.TextChanged += new EventHandler(textPersonaje_TextChanged);
textTradu.TextChanged += new EventHandler(textTradu_TextChanged);
textTradu.KeyUp += new KeyEventHandler(textBox1_KeyUp);
textTradu.KeyDown += new KeyEventHandler(textTradu_KeyDown);
textTradu.KeyPress += new KeyPressEventHandler(textTradu_KeyPress);
textPersonaje.KeyDown += new KeyEventHandler(textPersonaje_KeyDown);
textPersonaje.KeyPress += new KeyPressEventHandler(textPersonaje_KeyPress);
button8.GotFocus += new EventHandler(button8_GotFocus);
button9.GotFocus += new EventHandler(button9_GotFocus);
gridCont.DoubleClick += new EventHandler(gridCont_DoubleClick);
gridCont.SelectionChanged += new EventHandler(gridCont_SelectionChanged);
gridCont.KeyUp += new KeyEventHandler(gridCont_KeyUp);
listBox1.KeyUp += new KeyEventHandler(listBox1_KeyUp);
textToAdd.KeyPress += new KeyPressEventHandler(textToAdd_KeyPress);
progressBar1.MouseDown += new MouseEventHandler(progressBar1_MouseDown);
tiempoInicio_.TimeValidated += new TimeTextBox.OnTimeTextBoxValidated(tiempo_TimeValidated);
tiempoFin_.TimeValidated += new TimeTextBox.OnTimeTextBoxValidated(tiempo_TimeValidated);
this.Move += new EventHandler(translateW_Move);
//textTradu.ContextMenu = new ASSTextBoxRegExDefaultContextMenu(textTradu);
mediaControl.Pause();
// cargar de config
try
{
checkAutoComplete.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_autoC"));
checkTagSSA.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_tagSSA"));
checkComment.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_Comment"));
checkVideo.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_aud"));
//.........这里部分代码省略.........
示例12: renderGraph
/// <summary>
/// Connects the filters of a previously created graph
/// (created by createGraph()). Once rendered the graph
/// is ready to be used. This method may also destroy
/// streams if we have streams we no longer want.
/// </summary>
private void renderGraph()
{
DsGuid cat;
DsGuid med;
int hr;
bool didSomething = false;
assertStopped();
// Ensure required properties set
if (_filename == null)
throw new ArgumentException("The Filename property has not been set to a file.\n");
// Stop the graph
if (_mediaControl != null)
_mediaControl.Stop();
// Create the graph if needed (group should already be created)
createGraph();
// Derender the graph if we have a capture or preview stream
// that we no longer want. We can't derender the capture and
// preview streams seperately.
// Notice the second case will leave a capture stream intact
// even if we no longer want it. This allows the user that is
// not using the preview to Stop() and Start() without
// rerendering the graph.
if (!_wantPreviewRendered && _isPreviewRendered)
derenderGraph();
if (!_wantCaptureRendered && _isCaptureRendered)
if (_wantPreviewRendered)
derenderGraph();
// Render capture stream (only if necessary)
if (_wantCaptureRendered && !_isCaptureRendered)
{
// Render the file writer portion of graph (mux -> file)
Guid mediaSubType = MediaSubType.Avi;
Marshal.ThrowExceptionForHR(_captureGraphBuilder.SetOutputFileName(mediaSubType, Filename, out _muxFilter, out _fileWriterFilter));
// Render video (video -> mux)
if (VideoDevice != null)
{
// Try interleaved first, because if the device supports it,
// it's the only way to get audio as well as video
cat = DsGuid.FromGuid(PinCategory.Capture);
med = DsGuid.FromGuid(MediaType.Interleaved);
hr = _captureGraphBuilder.RenderStream(cat, med, _videoDeviceFilter, _videoCompressorFilter, _muxFilter);
//hr = _captureGraphBuilder.RenderStream(ref cat, ref med, _videoDeviceFilter, _videoCompressorFilter, _muxFilter);
if (hr < 0)
{
med = DsGuid.FromGuid(MediaType.Video);
hr = _captureGraphBuilder.RenderStream(cat, med, _videoDeviceFilter, _videoCompressorFilter, _muxFilter);
//hr = _captureGraphBuilder.RenderStream(ref cat, ref med, _videoDeviceFilter, _videoCompressorFilter, _muxFilter);
if (hr == -2147220969)
throw new DeviceInUseException("Video device", hr);
Marshal.ThrowExceptionForHR(hr);
}
}
// Render audio (audio -> mux)
if (AudioDevice != null)
{
cat = DsGuid.FromGuid(PinCategory.Capture);
med = DsGuid.FromGuid(MediaType.Audio);
Marshal.ThrowExceptionForHR(_captureGraphBuilder.RenderStream(cat, med, _audioDeviceFilter, _audioCompressorFilter, _muxFilter));
//Marshal.ThrowExceptionForHR(_captureGraphBuilder.RenderStream(ref cat, ref med, _audioDeviceFilter, _audioCompressorFilter, _muxFilter));
}
_isCaptureRendered = true;
didSomething = true;
}
// Render preview stream (only if necessary)
if (_wantPreviewRendered && !_isPreviewRendered)
{
// Render preview (video -> renderer)
cat = DsGuid.FromGuid(PinCategory.Preview);
med = DsGuid.FromGuid(MediaType.Video);
Marshal.ThrowExceptionForHR(_captureGraphBuilder.RenderStream(cat, med, _videoDeviceFilter, null, null));
// Marshal.ThrowExceptionForHR(_captureGraphBuilder.RenderStream(ref cat, ref med, _videoDeviceFilter, null, null));
// Get the IVideoWindow interface
_videoWindow = (IVideoWindow)_graphBuilder;
// Set the video window to be a child of the main window
Marshal.ThrowExceptionForHR(_videoWindow.put_Owner(_previewWindow.Handle));
// Set video window style
Marshal.ThrowExceptionForHR(_videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings));
//Marshal.ThrowExceptionForHR(_videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS));
//.........这里部分代码省略.........
示例13: FileLoad
//Мотод для загрузки видео файла.
public void FileLoad(string sfile, Panel vPanel)
{
CleanUp();
graphBuilder = (IGraphBuilder) new FilterGraph();
mediaControl = graphBuilder as IMediaControl;
mediaPosition = graphBuilder as IMediaPosition;
videoWindow = graphBuilder as IVideoWindow;
basicAudio = graphBuilder as IBasicAudio;
ddColor.lBrightness = 0;
ddColor.lContrast = 0;
ddColor.lGamma = 0;
ddColor.lSaturation = 0;
graphBuilder.RenderFile(sfile, null);
videoWindow.put_Owner(vPanel.Handle);
videoWindow.put_WindowStyle(WindowStyle.Child
| WindowStyle.ClipSiblings
| WindowStyle.ClipChildren);
videoWindow.SetWindowPosition(vPanel.ClientRectangle.Left,
vPanel.ClientRectangle.Top,
vPanel.ClientRectangle.Width,
vPanel.ClientRectangle.Height);
mediaControl.Run();
CurrentStatus = mStatus.Play;
mediaPosition.get_Duration(out mediaTimeSeconds);
allSeconds = (int)mediaTimeSeconds;
}
示例14: Init
public override void Init()
{
if (!isPlaying)
{
string Filename = "";
float size = 0;
double Max = 0;
int volume = 0;
graph = new FilterGraph() as IFilterGraph;
media = graph as IMediaControl;
eventEx = media as IMediaEventEx;
igb = media as IGraphBuilder;
imp = igb as IMediaPosition;
master.form.Invoke((MethodInvoker)delegate()
{
Filename = master.form.M_Filename.Text;
media.RenderFile(Filename);
size = (float)master.form.M_PrevSize.Value;
master.form.M_PrevSize.Enabled = false;
imp.get_Duration(out Max);
master.form.M_Seek.Maximum = (int)(Max);
master.form.M_Seek.Value = 0;
volume = master.form.M_Volume.Value;
span = (uint)(1000000.0f / master.form.M_CollectFPS);
});
graph.FindFilterByName("Video Renderer", out render);
if (render != null)
{
window = render as IVideoWindow;
window.put_WindowStyle(
WindowStyle.Caption | WindowStyle.Child
);
window.put_WindowStyleEx(
WindowStyleEx.ToolWindow
);
window.put_Caption("ElectronicBoard - VideoPrev -");
int Width, Height, Left, Top;
window.get_Width(out Width);
window.get_Height(out Height);
window.get_Left(out Left);
window.get_Top(out Top);
renderSize.Width = (int)(Width * size);
renderSize.Height = (int)(Height * size);
Aspect = (float)renderSize.Height / (float)renderSize.Width;
window.SetWindowPosition(Left, Top, renderSize.Width, renderSize.Height);
eventEx = media as IMediaEventEx;
eventEx.SetNotifyWindow(master.form.Handle, WM_DirectShow, IntPtr.Zero);
media.Run();
foreach (Process p in Process.GetProcesses())
{
if (p.MainWindowTitle == "ElectronicBoard - VideoPrev -")
{
renderwindow = p.MainWindowHandle;
break;
}
}
isPlaying = true;
iba = media as IBasicAudio;
iba.put_Volume(volume);
//master.form.checkBox3_CheckedChanged(null, null);
master.Start();
}
}
}
示例15: ConfigureVideoWindow
// Configure the video window
private void ConfigureVideoWindow(IVideoWindow videoWindow, GraphicsDevice hWin)
{
int hr;
// Set the output window
hr = videoWindow.put_Owner( hWin.Adapter.MonitorHandle ); //CHANGE
DsError.ThrowExceptionForHR( hr );
// Set the window style
hr = videoWindow.put_WindowStyle( (WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings) );
DsError.ThrowExceptionForHR( hr );
// Make the window visible
hr = videoWindow.put_Visible( OABool.True );
DsError.ThrowExceptionForHR( hr );
// Position the playing location
hr = videoWindow.SetWindowPosition( 0, 0, 1920, 600 );
DsError.ThrowExceptionForHR( hr );
}