本文整理汇总了C#中AMFWriter.WriteAssociativeArray方法的典型用法代码示例。如果您正苦于以下问题:C# AMFWriter.WriteAssociativeArray方法的具体用法?C# AMFWriter.WriteAssociativeArray怎么用?C# AMFWriter.WriteAssociativeArray使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类AMFWriter
的用法示例。
在下文中一共展示了AMFWriter.WriteAssociativeArray方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: WriteData
public void WriteData(AMFWriter writer, object data)
{
if( data is IList )
{
IList list = data as IList;
object[] array = new object[list.Count];
list.CopyTo(array, 0);
writer.WriteArray(ObjectEncoding.AMF0, array);
return;
}
#if !(SILVERLIGHT)
IListSource listSource = data as IListSource;
if (listSource != null)
{
IList list = listSource.GetList();
object[] array = new object[list.Count];
list.CopyTo(array, 0);
writer.WriteArray(ObjectEncoding.AMF0, array);
return;
}
#endif
if(data is IDictionary)
{
writer.WriteAssociativeArray(ObjectEncoding.AMF0, data as IDictionary);
return;
}
if(data is Exception)
{
writer.WriteASO(ObjectEncoding.AMF0, new ExceptionASO(data as Exception) );
return;
}
if (data is IEnumerable)
{
List<object> tmp = new List<object>();
foreach (object element in (data as IEnumerable))
{
tmp.Add(element);
}
writer.WriteArray(ObjectEncoding.AMF0, tmp.ToArray());
return;
}
writer.WriteObject(ObjectEncoding.AMF0, data);
}
示例2: CreateFileMeta
//.........这里部分代码省略.........
/// tags: Array of key value pairs representing the information present in the "ilst" atom, which is
/// the equivalent of ID3 tags for MP4 files. These tags are mostly used by iTunes.
/// </para>
/// </summary>
/// <returns>Metadata event tag.</returns>
ITag CreateFileMeta()
{
#if !SILVERLIGHT
log.Debug("Creating onMetaData");
#endif
// Create tag for onMetaData event
ByteBuffer buf = ByteBuffer.Allocate(1024);
buf.AutoExpand = true;
AMFWriter output = new AMFWriter(buf);
output.WriteString("onMetaData");
Dictionary<string, object> props = new Dictionary<string, object>();
// Duration property
props.Add("duration", ((double)_duration / (double)_timeScale));
props.Add("width", _width);
props.Add("height", _height);
// Video codec id
props.Add("videocodecid", _videoCodecId);
props.Add("avcprofile", _avcProfile);
props.Add("avclevel", _avcLevel);
props.Add("videoframerate", _fps);
// Audio codec id - watch for mp3 instead of aac
props.Add("audiocodecid", _audioCodecId);
props.Add("aacaot", _audioCodecType);
props.Add("audiosamplerate", _audioTimeScale);
props.Add("audiochannels", _audioChannels);
props.Add("moovposition", _moovOffset);
//props.put("chapters", ""); //this is for f4b - books
if (_seekPoints != null)
{
props.Add("seekpoints", _seekPoints);
}
//tags will only appear if there is an "ilst" atom in the file
//props.put("tags", "");
List<Dictionary<String, Object>> arr = new List<Dictionary<String, Object>>(2);
if (_hasAudio)
{
Dictionary<String, Object> audioMap = new Dictionary<String, Object>(4);
audioMap.Add("timescale", _audioTimeScale);
audioMap.Add("language", "und");
List<Dictionary<String, String>> desc = new List<Dictionary<String, String>>(1);
audioMap.Add("sampledescription", desc);
Dictionary<String, String> sampleMap = new Dictionary<String, String>(1);
sampleMap.Add("sampletype", _audioCodecId);
desc.Add(sampleMap);
if (_audioSamples != null)
{
audioMap.Add("length_property", _audioSampleDuration * _audioSamples.Count);
//release some memory, since we're done with the vectors
_audioSamples.Clear();
_audioSamples = null;
}
arr.Add(audioMap);
}
if (_hasVideo)
{
Dictionary<String, Object> videoMap = new Dictionary<String, Object>(3);
videoMap.Add("timescale", _videoTimeScale);
videoMap.Add("language", "und");
List<Dictionary<String, String>> desc = new List<Dictionary<String, String>>(1);
videoMap.Add("sampledescription", desc);
Dictionary<String, String> sampleMap = new Dictionary<String, String>(1);
sampleMap.Add("sampletype", _videoCodecId);
desc.Add(sampleMap);
if (_videoSamples != null)
{
videoMap.Add("length_property", _videoSampleDuration * _videoSamples.Count);
//release some memory, since we're done with the vectors
_videoSamples.Clear();
_videoSamples = null;
}
arr.Add(videoMap);
}
props.Add("trackinfo", arr.ToArray());
//set this based on existence of seekpoints
props.Add("canSeekToEnd", (_seekPoints != null));
output.WriteAssociativeArray(ObjectEncoding.AMF0, props);
buf.Flip();
//now that all the meta properties are done, update the duration
_duration = (long)Math.Round(_duration * 1000d);
ITag result = new Tag(IOConstants.TYPE_METADATA, 0, buf.Limit, buf.ToArray(), 0);
return result;
}
示例3: SendOnPlayStatus
private void SendOnPlayStatus(String code, int duration, long bytes)
{
MemoryStream ms = new MemoryStream();
AMFWriter writer = new AMFWriter(ms);
writer.WriteString("onPlayStatus");
Hashtable props = new Hashtable();
props.Add("code", code);
props.Add("level", "status");
props.Add("duration", duration);
props.Add("bytes", bytes);
writer.WriteAssociativeArray(ObjectEncoding.AMF0, props);
ByteBuffer buffer = new ByteBuffer(ms);
IRtmpEvent evt = new Notify(buffer);
if (_lastMessage != null)
{
int timestamp = _lastMessage.Timestamp;
evt.Timestamp = timestamp;
}
else
{
evt.Timestamp = 0;
}
RtmpMessage msg = new RtmpMessage();
msg.body = evt;
DoPushMessage(msg);
}
示例4: CreateFileMeta
/// <summary>
/// Create tag for metadata event.
/// </summary>
/// <returns></returns>
ITag CreateFileMeta() {
log.Debug("Creating onMetaData");
// Create tag for onMetaData event
ByteBuffer buf = ByteBuffer.Allocate(1024);
buf.AutoExpand = true;
AMFWriter output = new AMFWriter(buf);
output.WriteString("onMetaData");
Hashtable props = new Hashtable();
// Duration property
props.Add("duration", ((double)_duration / (double)_timeScale));
// Audio codec id - watch for mp3 instead of aac
props.Add("audiocodecid", _audioCodecId);
props.Add("aacaot", _audioCodecType);
props.Add("audiosamplerate", _audioTimeScale);
props.Add("audiochannels", _audioChannels);
props.Add("moovposition", _moovOffset);
//tags will only appear if there is an "ilst" atom in the file
//props.put("tags", "");
props.Add("canSeekToEnd", false);
output.WriteAssociativeArray(ObjectEncoding.AMF0, props);
buf.Flip();
//now that all the meta properties are done, update the duration
_duration = (long)Math.Round(_duration * 1000d);
ITag result = new Tag(IOConstants.TYPE_METADATA, 0, buf.Limit, buf.ToArray(), 0);
return result;
}
示例5: WriteMetadataTag
/// <summary>
/// Write "onMetaData" tag to the file.
/// </summary>
/// <param name="duration">Duration to write in milliseconds.</param>
/// <param name="videoCodecId">Id of the video codec used while recording.</param>
/// <param name="audioCodecId">Id of the audio codec used while recording.</param>
private void WriteMetadataTag(double duration, object videoCodecId, object audioCodecId)
{
_metaPosition = _writer.BaseStream.Position;
MemoryStream ms = new MemoryStream();
AMFWriter output = new AMFWriter(ms);
output.WriteString("onMetaData");
Dictionary<string, object> props = new Dictionary<string, object>();
props.Add("duration", _duration);
if (videoCodecId != null)
{
props.Add("videocodecid", videoCodecId);
}
if (audioCodecId != null)
{
props.Add("audiocodecid", audioCodecId);
}
props.Add("canSeekToEnd", true);
output.WriteAssociativeArray(ObjectEncoding.AMF0, props);
byte[] buffer = ms.ToArray();
if (_fileMetaSize == 0)
{
_fileMetaSize = buffer.Length;
}
ITag onMetaData = new Tag(IOConstants.TYPE_METADATA, 0, buffer.Length, buffer, 0);
WriteTag(onMetaData);
}
示例6: CreateFileMeta
/// <summary>
/// Create tag for metadata event.
/// </summary>
/// <returns></returns>
private ITag CreateFileMeta()
{
// Create tag for onMetaData event
ByteBuffer buf = ByteBuffer.Allocate(1024);
buf.AutoExpand = true;
AMFWriter output = new AMFWriter(buf);
// Duration property
output.WriteString("onMetaData");
Dictionary<string, object> props = new Dictionary<string,object>();
props.Add("duration", _duration / 1000.0);
if (_firstVideoTag != -1)
{
long old = GetCurrentPosition();
SetCurrentPosition(_firstVideoTag);
ReadTagHeader();
byte frametype = _reader.ReadByte();
// Video codec id
props.Add("videocodecid", frametype & IOConstants.MASK_VIDEO_CODEC);
SetCurrentPosition(old);
}
if (_firstAudioTag != -1)
{
long old = GetCurrentPosition();
SetCurrentPosition(_firstAudioTag);
ReadTagHeader();
byte frametype = _reader.ReadByte();
// Audio codec id
props.Add("audiocodecid", (frametype & IOConstants.MASK_SOUND_FORMAT) >> 4);
SetCurrentPosition(old);
}
props.Add("canSeekToEnd", true);
output.WriteAssociativeArray(ObjectEncoding.AMF0, props);
buf.Flip();
ITag result = new Tag(IOConstants.TYPE_METADATA, 0, buf.Limit, buf.ToArray(), 0);
return result;
}