本文整理汇总了C#中DirectShowLib.AMMediaType类的典型用法代码示例。如果您正苦于以下问题:C# AMMediaType类的具体用法?C# AMMediaType怎么用?C# AMMediaType使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
AMMediaType类属于DirectShowLib命名空间,在下文中一共展示了AMMediaType类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: GetVideoMediaType
public static AMMediaType GetVideoMediaType(short bitCount, int width, int height)
{
Guid mediaSubType = GetMediaSubTypeForBitCount(bitCount);
var VideoGroupType = new AMMediaType();
VideoGroupType.majorType = MediaType.Video;
VideoGroupType.subType = mediaSubType;
VideoGroupType.formatType = FormatType.VideoInfo;
VideoGroupType.fixedSizeSamples = true;
VideoGroupType.formatSize = Marshal.SizeOf(typeof (VideoInfoHeader));
var vif = new VideoInfoHeader();
vif.BmiHeader = new BitmapInfoHeader();
// The HEADER macro returns the BITMAPINFO within the VIDEOINFOHEADER
vif.BmiHeader.Size = Marshal.SizeOf(typeof (BitmapInfoHeader));
vif.BmiHeader.Compression = 0;
vif.BmiHeader.BitCount = bitCount;
vif.BmiHeader.Width = width;
vif.BmiHeader.Height = height;
vif.BmiHeader.Planes = 1;
int iSampleSize = vif.BmiHeader.Width*vif.BmiHeader.Height*(vif.BmiHeader.BitCount/8);
vif.BmiHeader.ImageSize = iSampleSize;
VideoGroupType.sampleSize = iSampleSize;
VideoGroupType.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(vif));
Marshal.StructureToPtr(vif, VideoGroupType.formatPtr, false);
return VideoGroupType;
}
示例2: GetAudioMediaType
/// <summary>
/// Create an audio media type
/// </summary>
/// <returns>The constructed media type</returns>
public static AMMediaType GetAudioMediaType()
{
var AudioGroupType = new AMMediaType();
AudioGroupType.majorType = MediaType.Audio;
return AudioGroupType;
}
示例3: MediaGroup
/// <summary>
/// Constructor
/// </summary>
/// <param name="mType">Media type of the new group</param>
/// <param name="pTimeline">Timeline to use for the group</param>
/// <param name="fps">FPS for the group</param>
public MediaGroup(AMMediaType mType, IAMTimeline pTimeline, double fps)
{
int hr;
IAMTimelineObj pGroupObj;
m_Length = 0;
m_Files = new ArrayList();
m_FPS = fps;
m_pTimeline = pTimeline;
// make the root group/composition
hr = m_pTimeline.CreateEmptyNode(out pGroupObj, TimelineMajorType.Group);
DESError.ThrowExceptionForHR(hr);
try
{
m_pGroup = (IAMTimelineGroup)pGroupObj;
// Set the media type we just created
hr = m_pGroup.SetMediaType(mType);
DESError.ThrowExceptionForHR(hr);
DsUtils.FreeAMMediaType(mType);
// add the video group to the timeline
hr = m_pTimeline.AddGroup(pGroupObj);
DESError.ThrowExceptionForHR(hr);
}
finally
{
//Marshal.ReleaseComObject(pGroupObj);
}
//Marshal.ReleaseComObject(pTrack1Obj); // Released as m_VideoTrack in dispose
}
示例4: Capture
/// <summary> Use capture with selected media caps</summary>
public Capture(int iDeviceNum, AMMediaType media)
{
DsDevice[] capDevices;
// Get the collection of video devices
capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice);
if (iDeviceNum + 1 > capDevices.Length)
{
throw new Exception("No video capture devices found at that index!");
}
try
{
// Set up the capture graph
SetupGraph(capDevices[iDeviceNum], media);
// tell the callback to ignore new images
m_PictureReady = new ManualResetEvent(false);
m_bGotOne = true;
m_bRunning = false;
timer1.Interval = 1000 / 15; // 15 fps
timer1.Tick += new EventHandler(timer1_Tick);
timer1.Start();
}
catch
{
Dispose();
throw;
}
}
示例5: AddStreamSourceFilter
/// <summary>
/// AddStreamSourceFilter
/// </summary>
/// <param name="url"></param>
protected override void AddStreamSourceFilter(string url)
{
Log.Log.WriteFile("dvbip:Add NWSource-Plus");
_filterStreamSource = FilterGraphTools.AddFilterFromClsid(_graphBuilder, typeof (ElecardNWSourcePlus).GUID,
"Elecard NWSource-Plus");
AMMediaType mpeg2ProgramStream = new AMMediaType();
mpeg2ProgramStream.majorType = MediaType.Stream;
mpeg2ProgramStream.subType = MediaSubType.Mpeg2Transport;
mpeg2ProgramStream.unkPtr = IntPtr.Zero;
mpeg2ProgramStream.sampleSize = 0;
mpeg2ProgramStream.temporalCompression = false;
mpeg2ProgramStream.fixedSizeSamples = true;
mpeg2ProgramStream.formatType = FormatType.None;
mpeg2ProgramStream.formatSize = 0;
mpeg2ProgramStream.formatPtr = IntPtr.Zero;
((IFileSourceFilter)_filterStreamSource).Load(url, mpeg2ProgramStream);
//connect the [stream source] -> [inf tee]
Log.Log.WriteFile("dvb: Render [source]->[inftee]");
int hr = _capBuilder.RenderStream(null, null, _filterStreamSource, null, _infTeeMain);
if (hr != 0)
{
Log.Log.Error("dvb:Add source returns:0x{0:X}", hr);
throw new TvException("Unable to add source filter");
}
}
示例6: GCSBitmapInfo
public GCSBitmapInfo(int width, int height, long fps, string standard, AMMediaType media)
{
Width = width;
Height = height;
Fps = fps;
Standard = standard;
Media = media;
}
示例7: WavFileRenderer
public WavFileRenderer(ITimeline timeline, string outputFile, IBaseFilter audioCompressor, AMMediaType mediaType,
ICallbackParticipant[] audioParticipants)
: base(timeline)
{
RenderToWavDest(outputFile, audioCompressor, mediaType, audioParticipants);
ChangeState(RendererState.Initialized);
}
示例8: ResolutionInfo
private ResolutionInfo(AMMediaType media)
{
var videoInfo = new VideoInfoHeader();
Marshal.PtrToStructure(media.formatPtr, videoInfo);
Width = videoInfo.BmiHeader.Width;
Height = videoInfo.BmiHeader.Height;
Bpp = videoInfo.BmiHeader.BitCount;
}
示例9: RenderToWavDest
private void RenderToWavDest(
string outputFile,
IBaseFilter audioCompressor,
AMMediaType mediaType,
ICallbackParticipant[] audioParticipants)
{
if (audioCompressor != null) Cleanup.Add(audioCompressor);
int hr;
if (FirstAudioGroup == null)
{
throw new SplicerException(Resources.ErrorNoAudioStreamToRender);
}
if (outputFile == null)
{
throw new SplicerException(Resources.ErrorInvalidOutputFileName);
}
// Contains useful routines for creating the graph
var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
Cleanup.Add(graphBuilder);
try
{
hr = graphBuilder.SetFiltergraph(Graph);
DESError.ThrowExceptionForHR(hr);
IBaseFilter wavDestFilter = StandardFilters.RenderWavDestination(Cleanup, Graph);
IBaseFilter fileSink = StandardFilters.RenderFileDestination(Cleanup, Graph, outputFile);
try
{
RenderGroups(graphBuilder, audioCompressor, null, wavDestFilter, audioParticipants, null);
FilterGraphTools.ConnectFilters(Graph, wavDestFilter, fileSink, true);
// if supplied, apply the media type to the filter
if (mediaType != null)
{
FilterGraphTools.SetFilterFormat(mediaType, audioCompressor);
}
DisableClock();
}
finally
{
if (wavDestFilter != null) Marshal.ReleaseComObject(wavDestFilter);
if (fileSink != null) Marshal.ReleaseComObject(fileSink);
}
}
finally
{
Marshal.ReleaseComObject(graphBuilder);
}
}
示例10: Dispose
private void Dispose(bool disposing)
{
if (_filter != null)
{
Marshal.ReleaseComObject(_filter);
_filter = null;
}
if (_mediaType != null)
{
DsUtils.FreeAMMediaType(_mediaType);
_mediaType = null;
}
}
示例11: MainForm
public MainForm()
{
InitializeComponent();
graphbuilder = (IGraphBuilder)new FilterGraph();
samplegrabber = (ISampleGrabber)new SampleGrabber();
graphbuilder.AddFilter((IBaseFilter)samplegrabber, "samplegrabber");
mt = new AMMediaType();
mt.majorType = MediaType.Video;
mt.subType = MediaSubType.RGB24;
mt.formatType = FormatType.VideoInfo;
samplegrabber.SetMediaType(mt);
PrintSeconds();
}
示例12: Group
/// <summary>
/// Constructor
/// </summary>
/// <param name="type">The type of group this is</param>
/// <param name="mediaType">Media type of the new group</param>
/// <param name="timeline">Timeline to use for the group</param>
/// <param name="fps">Fps for the group</param>
public Group(ITimeline timeline, GroupType type, AMMediaType mediaType, string name, double fps)
: base(timeline, name, -1)
{
if (timeline == null) throw new ArgumentNullException("timeline");
if (mediaType == null) throw new ArgumentNullException("mediaType");
if (fps <= 0) throw new SplicerException(Resources.ErrorFramesPerSecondMustBeGreaterThenZero);
_timeline = timeline;
_type = type;
_fps = fps;
_group = TimelineBuilder.InsertGroup(_timeline.DesTimeline, mediaType, name);
TimelineComposition = (IAMTimelineComp) _group;
}
示例13: ConfigSampleGrabber
private static void ConfigSampleGrabber(ISampleGrabber sb)
{
// set the media type
var media = new AMMediaType
{
majorType = MediaType.Video,
subType = MediaSubType.RGB24,
formatType = FormatType.VideoInfo
};
// that's the call to the ISampleGrabber interface
sb.SetMediaType(media);
DsUtils.FreeAMMediaType(media);
}
示例14: CaptureForm
public CaptureForm()
{
InitializeComponent();
graph_builder = (IGraphBuilder)new FilterGraph();
media_control = (IMediaControl)graph_builder;
events = (IMediaEventEx)graph_builder;
grabber = (ISampleGrabber)new SampleGrabber();
AMMediaType media_type = new AMMediaType();
media_type.majorType = MediaType.Video;
media_type.subType = MediaSubType.RGB24;
grabber.SetMediaType( media_type );
grabber.SetCallback( this, 1 );
cbDevices.Items.AddRange( GetDevices( FilterCategory.VideoInputDevice ) );
}
示例15: GetStride
//
// retrieve the bitmap stride (the offset from one row of pixel to the next)
//
private int GetStride(int videoWidth)
{
var media = new AMMediaType();
// GetConnectedMediaType retrieve the media type for a sample
var hr = sampleGrabber.GetConnectedMediaType(media);
DsError.ThrowExceptionForHR(hr);
if (media.formatType != FormatType.VideoInfo || media.formatPtr == IntPtr.Zero)
{
throw new Exception("Format type incorrect");
}
// save the stride
var videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
int videoStride = videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);
DsUtils.FreeAMMediaType(media);
return videoStride;
}