本文整理汇总了C#中System.Windows.Media.MediaStreamDescription类的典型用法代码示例。如果您正苦于以下问题:C# MediaStreamDescription类的具体用法?C# MediaStreamDescription怎么用?C# MediaStreamDescription使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
MediaStreamDescription类属于System.Windows.Media命名空间,在下文中一共展示了MediaStreamDescription类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: OpenMediaAsync
protected override void OpenMediaAsync ()
{
Dictionary<MediaSourceAttributesKeys, string> media_attributes = new Dictionary<MediaSourceAttributesKeys, string> ();
List<MediaStreamDescription> media_streams = new List<MediaStreamDescription> ();
Dictionary<MediaStreamAttributeKeys, string> stream_attributes = new Dictionary<MediaStreamAttributeKeys,string> ();
MediaStreamDescription media_stream = new MediaStreamDescription (MediaStreamType.Audio, stream_attributes);
long duration = 60 * 10000;
WaveFormatEx wave = new WaveFormatEx ();
Mp3Frame frame = Mp3Frame.Read (stream);
wave.FormatTag = 85;
wave.AvgBytesPerSec = (uint) frame.Bitrate / 8;
wave.BitsPerSample = 0;
wave.BlockAlign = 1;
wave.Channels = (ushort) frame.Channels;
wave.SamplesPerSec = (ushort) frame.SampleRate;
wave.Size = 12;
media_attributes.Add (MediaSourceAttributesKeys.CanSeek, "0");
media_attributes.Add (MediaSourceAttributesKeys.Duration, duration.ToString ());
stream_attributes [MediaStreamAttributeKeys.CodecPrivateData] = wave.Encoded;
media_streams.Add (media_stream);
try {
this.frame = frame;
this.description = media_stream;
ReportOpenMediaCompleted (media_attributes, media_streams);
opened = DateTime.Now;
} catch (Exception ex) {
Console.WriteLine (ex);
}
}
示例2: OpenMediaAsync
protected override void OpenMediaAsync()
{
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();
byte[] videoData = new byte[this.videoStream.Length];
if (videoData.Length != this.videoStream.Read(videoData, 0, videoData.Length))
{
throw new IOException("Could not read in the VideoStream");
}
dec = new org.diracvideo.Jirac.Decoder();
dec.Push(videoData, 0, videoData.Length);
dec.Decode();
mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
mediaStreamAttributes[MediaStreamAttributeKeys.Height] = dec.format.width.ToString ();
mediaStreamAttributes[MediaStreamAttributeKeys.Width] = dec.format.height.ToString ();
this.streamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
mediaStreamDescriptions.Add(streamDescription);
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromMinutes(5).Ticks.ToString(CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString ();
this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
}
示例3: OpenMediaAsync
protected override void OpenMediaAsync()
{
_frameTime = (int)TimeSpan.FromSeconds((double)1 / 30).Ticks;
// Init
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();
// Stream Description
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
streamAttributes[MediaStreamAttributeKeys.Height] = format.PixelHeight.ToString();
streamAttributes[MediaStreamAttributeKeys.Width] = format.PixelWidth.ToString();
MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Video, streamAttributes);
_videoDescription = msd;
availableStreams.Add(_videoDescription);
// a zero timespan is an infinite video
sourceAttributes[MediaSourceAttributesKeys.Duration] =
TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture);
sourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();
ReportOpenMediaCompleted(sourceAttributes, availableStreams);
}
示例4: OpenMediaAsync
protected override void OpenMediaAsync()
{
try
{
this.wavParser = new WavParser(this.stream);
this.wavParser.ParseWaveHeader();
this.wavParser.WaveFormatEx.ValidateWaveFormat();
this.startPosition = this.currentPosition = this.wavParser.DataPosition;
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();
streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.wavParser.WaveFormatEx.ToHexString();
MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
this.audioDesc = msd;
availableStreams.Add(this.audioDesc);
sourceAttributes[MediaSourceAttributesKeys.Duration] = this.wavParser.Duration.ToString();
ReportOpenMediaCompleted(sourceAttributes, availableStreams);
}
catch(Exception){}
}
示例5: H264Parser
/// <summary>
/// Default constructor
/// </summary>
/// <param name="msd"></param>
public H264Parser(SampleBuffer outputBuffer, IContainerMetadata metadata, HLSStream hlsStream)
: base(outputBuffer, hlsStream)
{
string[] resolution = null;
string s;
if (metadata.Attributes != null &&
metadata.Attributes.TryGetValue(HLSPlaylistMetaKeys.Resolution, out s))
{
string[] components = s.Split(new char[] { 'x' });
if (components != null && components.Length == 2)
resolution = components;
}
if (resolution == null)
{
HLSTrace.WriteLine("Missing 'Resolution' tag in HLS MetaKeys, defaulting to the maximum supported resolution of 1280x720.");
resolution = new string[] { "1280", "720" };
}
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
streamAttributes[MediaStreamAttributeKeys.Width] = resolution[0];
streamAttributes[MediaStreamAttributeKeys.Height] = resolution[1];
Description = new MediaStreamDescription(MediaStreamType.Video, streamAttributes);
}
示例6: MediaStreamSample
public MediaStreamSample (MediaStreamDescription mediaStreamDescription, Stream stream, long offset, long count, long timestamp, long duration, IDictionary<MediaSampleAttributeKeys, string> attributes)
{
this.media_stream_description = mediaStreamDescription;
this.stream = stream;
this.offset = offset;
this.count = count;
this.timestamp = timestamp;
this.attributes = attributes;
this.duration = duration;
}
示例7: OpenMediaAsync
protected override void OpenMediaAsync()
{
int channels = this.Asap.GetInfo().GetChannels();
int blockSize = channels * BitsPerSample >> 3;
string waveFormatHex = string.Format("0100{0:X2}00{1:X8}{2:X8}{3:X2}00{4:X2}000000",
channels, SwapBytes(ASAP.SampleRate), SwapBytes(ASAP.SampleRate * blockSize), blockSize, BitsPerSample);
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = waveFormatHex;
this.MediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
sourceAttributes[MediaSourceAttributesKeys.CanSeek] = "True";
sourceAttributes[MediaSourceAttributesKeys.Duration] = (this.Duration < 0 ? 0 : this.Duration * 10000).ToString();
ReportOpenMediaCompleted(sourceAttributes, new MediaStreamDescription[1] { this.MediaStreamDescription });
}
示例8: OpenMediaAsync
protected override void OpenMediaAsync()
{
var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
var mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
var mediaStreamDescriptions = new List<MediaStreamDescription>();
var wfx = new MediaParsers.WaveFormatExtensible () {
FormatTag = 1, // PCM
Channels = parameters.Channels,
SamplesPerSec = parameters.SamplesPerSecond,
AverageBytesPerSecond = parameters.SamplesPerSecond * 2 * 2,
BlockAlign = 0,
BitsPerSample = parameters.BitsPerSample,
Size = 0 };
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = wfx.ToHexString();
this.media_desc = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);
mediaStreamDescriptions.Add(this.media_desc);
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.track_duration.Ticks.ToString (CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = true.ToString ();
}
示例9: OpenMediaAsync
protected override void OpenMediaAsync()
{
startPosition = currentPosition = 0;
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();
string format = "";
format += ToLittleEndianString(string.Format("{0:X4}", 1)); //PCM
format += ToLittleEndianString(string.Format("{0:X4}", Constants.ChannelCount));
format += ToLittleEndianString(string.Format("{0:X8}", Constants.SampleRate));
format += ToLittleEndianString(string.Format("{0:X8}", byteRate));
format += ToLittleEndianString(string.Format("{0:X4}", blockAlign));
format += ToLittleEndianString(string.Format("{0:X4}", Constants.BitsPerSample));
format += ToLittleEndianString(string.Format("{0:X4}", 0));
streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = format;
mediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
availableStreams.Add(mediaStreamDescription);
sourceAttributes[MediaSourceAttributesKeys.Duration] = "0";
sourceAttributes[MediaSourceAttributesKeys.CanSeek] = "false";
ReportOpenMediaCompleted(sourceAttributes, availableStreams);
}
示例10: OpenMediaAsync
protected override void OpenMediaAsync()
{
// Initialize data structures to pass to the Media pipeline via the MediaStreamSource
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();
// Pull in the entire Audio stream.
byte[] videoData = new byte[this.videoStream.Length];
if (videoData.Length != this.videoStream.Read(videoData, 0, videoData.Length))
{
throw new IOException("Could not read in the VideoStream");
}
//TODO parse until first frame
//todo find what is the offset of first frame and put it in push len param
org.diracvideo.Jirac.Decoder dec = new org.diracvideo.Jirac.Decoder();
dec.Push(videoData, 0, videoData.Length);
dec.Decode();
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = dec.format.ToString();
this.streamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
mediaStreamDescriptions.Add(streamDescription);
// Setting a 0 duration to avoid the math to calcualte the Mp3 file length in minutes and seconds.
// This was done just to simplify this initial version of the code for other people reading it.
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromMinutes(5).Ticks.ToString(CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
// Report that the DiracMediaStreamSource has finished initializing its internal state and can now
// pass in Dirac Samples.
this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
//this.currentFrameStartPosition = result;
//this.currentFrameSize = mpegLayer3Frame.FrameSize;
}
示例11: OpenMediaAsync
/// <summary>
/// Open the media.
/// Create the structures.
/// </summary>
protected override void OpenMediaAsync() {
header = WaveFormatExtensible.ReadHeader(stream);
header.ValidateWaveFormat();
sampleSize = (long)header.Channels * header.BitsPerSample / 8 * numSamples;
startPosition = currentPosition = stream.Position;
pcmDataLen = stream.Length - startPosition;
duration = header.AudioDurationFromDataLen(pcmDataLen);
// Init
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();
// Stream Description
streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = header.ToHexString();
MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
this.audioDesc = msd;
availableStreams.Add(this.audioDesc);
sourceAttributes[MediaSourceAttributesKeys.Duration] = duration.ToString();
ReportOpenMediaCompleted(sourceAttributes, availableStreams);
}
示例12: OpenMediaAsync
/// <summary>
/// Initialises the data structures to pass data to the media pipeline
/// via the MediaStreamSource.
/// </summary>
protected override void OpenMediaAsync()
{
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes =
new Dictionary<MediaSourceAttributesKeys, string>();
Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes =
new Dictionary<MediaStreamAttributeKeys, string>();
List<MediaStreamDescription> mediaStreamDescriptions =
new List<MediaStreamDescription>();
CameraStreamSourceDataSingleton dataSource = CameraStreamSourceDataSingleton.Instance;
mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
mediaStreamAttributes[MediaStreamAttributeKeys.Width] = dataSource.FrameWidth.ToString();
mediaStreamAttributes[MediaStreamAttributeKeys.Height] = dataSource.FrameHeight.ToString();
videoStreamDescription =
new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
mediaStreamDescriptions.Add(videoStreamDescription);
// A zero timespan is an infinite video
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] =
TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();
frameTime = (int)TimeSpan.FromSeconds((double)0).Ticks;
// Report that we finished initializing its internal state and can now
// pass in frame samples.
ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
DispatcherTimer fpsTimer = new DispatcherTimer();
fpsTimer.Interval = TimeSpan.FromSeconds(1);
fpsTimer.Tick += Fps_Tick;
fpsTimer.Start();
}
示例13: ReadPastId3v2TagsCallback
/// <summary>
/// Callback which handles setting up an MSS once the first MpegFrame after Id3v2 data has been read.
/// </summary>
/// <param name="mpegLayer3Frame"> First MpegFrame</param>
/// <param name="mediaStreamAttributes">Empty dictionary for MediaStreamAttributes</param>
/// <param name="mediaStreamDescriptions">Empty dictionary for MediaStreamDescriptions</param>
/// <param name="mediaSourceAttributes">Empty dictionary for MediaSourceAttributes</param>
private void ReadPastId3v2TagsCallback(
MpegFrame mpegLayer3Frame,
Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes,
List<MediaStreamDescription> mediaStreamDescriptions,
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes)
{
if (mpegLayer3Frame.FrameSize <= 0)
{
throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative");
}
// Initialize the Mp3 data structures used by the Media pipeline with state from the first frame.
WaveFormatExtensible wfx = new WaveFormatExtensible();
this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat();
this.MpegLayer3WaveFormat.WaveFormatExtensible = wfx;
this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag = 85;
this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels = (short)((mpegLayer3Frame.Channels == Channel.SingleChannel) ? 1 : 2);
this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec = mpegLayer3Frame.SamplingRate;
this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8;
this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign = 1;
this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample = 0;
this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize = 12;
this.MpegLayer3WaveFormat.Id = 1;
this.MpegLayer3WaveFormat.BitratePaddingMode = 0;
this.MpegLayer3WaveFormat.FramesPerBlock = 1;
this.MpegLayer3WaveFormat.BlockSize = (short)mpegLayer3Frame.FrameSize;
this.MpegLayer3WaveFormat.CodecDelay = 0;
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.MpegLayer3WaveFormat.ToHexString();
this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);
mediaStreamDescriptions.Add(this.audioStreamDescription);
this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond));
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.trackDuration.Ticks.ToString(CultureInfo.InvariantCulture);
if (this.audioStream.CanSeek)
{
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "1";
}
else
{
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
}
// Report that the Mp3MediaStreamSource has finished initializing its internal state and can now
// pass in Mp3 Samples.
this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
this.currentFrame = mpegLayer3Frame;
this.currentFrameStartPosition = MpegFrame.FrameHeaderSize;
}
示例14: OpenMediaAsync
protected override void OpenMediaAsync()
{
//WaveFormatEx
HeAacWaveFormat aacf = new HeAacWaveFormat();
WaveFormatExtensible wfx = new WaveFormatExtensible();
aacf.WaveFormatExtensible = wfx;
aacf.WaveFormatExtensible.FormatTag = 0x1610; //0xFF;//0x1610;
aacf.WaveFormatExtensible.Channels = 2; //
aacf.WaveFormatExtensible.BlockAlign = 1;
aacf.WaveFormatExtensible.BitsPerSample = 0;//16; //unkonw set to 0
aacf.WaveFormatExtensible.SamplesPerSec = 24000; // from 8000 to 96000 Hz
aacf.WaveFormatExtensible.AverageBytesPerSecond = 0;//wfx.SamplesPerSec * wfx.Channels * wfx.BitsPerSample / wfx.BlockAlign;
aacf.WaveFormatExtensible.Size = 12;
// Extra 3 words in WAVEFORMATEX
// refer to http://msdn.microsoft.com/en-us/library/windows/desktop/dd757806(v=vs.85).aspx
aacf.wPayloadType = 0x0; //Audio Data Transport Stream (ADTS). The stream contains an adts_sequence, as defined by MPEG-2.
aacf.wAudioProfileLevelIndication = 0xFE;
aacf.wStructType = 0;
string codecPrivateData = aacf.ToHexString();
Dictionary<MediaStreamAttributeKeys, string> audioStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
audioStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData;
audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, audioStreamAttributes);
m_vbuffer.WaitForWorkItem();
m_curVideoBlk = m_vbuffer.Dequeue().CommandParameter as MSF.VideoBlock;
if (m_curVideoBlk == null)
return;
vIdx = 0;
fNum = (int)m_curVideoBlk.VideoFrameNum;
H264NalFormat h264f = new H264NalFormat();
h264f.sps = m_curVideoBlk.FirstIFrameInfo.sps;
h264f.pps = m_curVideoBlk.FirstIFrameInfo.pps;
string s = h264f.ToHexString();
//Video
Dictionary<MediaStreamAttributeKeys, string> videoStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
videoStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
videoStreamAttributes[MediaStreamAttributeKeys.Height] = "240";
videoStreamAttributes[MediaStreamAttributeKeys.Width] = "320";
videoStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = s;//"0000016742E00D96520283F40500000168CE388000";
videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes);
//Media
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(6).Ticks.ToString(CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();
#if !DEBUG
// Emulator does not support HE-AAC
mediaStreamDescriptions.Add(audioStreamDescription);
#endif
mediaStreamDescriptions.Add(videoStreamDescription);
this.AudioBufferLength = 500;
this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
}
示例15: ParseAvcConfig
private static void ParseAvcConfig(
MediaStreamDescription stream,
List<MediaStreamSample> samples,
byte[] data)
{
System.IO.Stream ios = new System.IO.MemoryStream(data);
ios.Seek(5, System.IO.SeekOrigin.Begin);
int num_sps = ios.ReadByte() & 0x1f;
for (int i = 0; i < num_sps; ++i)
{
int len_sps = (ios.ReadByte() << 8) | ios.ReadByte();
byte[] sps = new byte[len_sps];
ios.Read(sps, 0, len_sps);
samples.Add(new MediaStreamSample(
stream,
new System.IO.MemoryStream(sps),
0,
len_sps,
0,
new Dictionary<MediaSampleAttributeKeys, string>()));
}
int num_pps = ios.ReadByte();
for (int i = 0; i < num_pps; ++i)
{
int len_pps = (ios.ReadByte() << 8) | ios.ReadByte();
byte[] pps = new byte[len_pps];
ios.Read(pps, 0, len_pps);
samples.Add(new MediaStreamSample(
stream,
new System.IO.MemoryStream(pps),
0,
len_pps,
0,
new Dictionary<MediaSampleAttributeKeys, string>()));
}
}