本文整理汇总了C#中AMFWriter.WriteString方法的典型用法代码示例。如果您正苦于以下问题:C# AMFWriter.WriteString方法的具体用法?C# AMFWriter.WriteString怎么用?C# AMFWriter.WriteString使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类AMFWriter
的用法示例。
在下文中一共展示了AMFWriter.WriteString方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: WriteData
public void WriteData(AMFWriter writer, object data) {
if (data is INullable) {
if ((data as INullable).IsNull) {
writer.WriteNull();
return;
}
}
if (data is SqlByte) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlByte)data).Value);
return;
}
if (data is SqlInt16) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlInt16)data).Value);
return;
}
if (data is SqlInt32) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlInt32)data).Value);
return;
}
if (data is SqlInt64) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlInt64)data).Value);
return;
}
if (data is SqlSingle) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlSingle)data).Value);
return;
}
if (data is SqlDouble) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlDouble)data).Value);
return;
}
if (data is SqlDecimal) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlDecimal)data).Value);
return;
}
if (data is SqlMoney) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlMoney)data).Value);
return;
}
if (data is SqlDateTime) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlDateTime)data).Value);
return;
}
if (data is SqlString) {
writer.WriteString(((SqlString)data).Value);
return;
}
if (data is SqlGuid) {
writer.WriteData(ObjectEncoding.AMF0, ((SqlGuid)data).Value.ToString("D"));
return;
}
if (data is SqlBoolean) {
writer.WriteBoolean(((SqlBoolean)data).Value);
return;
}
string msg = string.Format("Could not find serializer for type {0}", data.GetType().FullName);
if (_log.IsErrorEnabled)
_log.Error(msg);
throw new FluorineException(msg);
}
示例2: Serialize
public void Serialize(AMFWriter writer)
{
writer.WriteString(this.Name);
writer.WriteString(this.Path);
writer.WriteData(ObjectEncoding.AMF0, _attributes);
}
示例3: CreateFileMeta
/// <summary>
/// Create tag for metadata event.
///
/// Info from http://www.kaourantin.net/2007/08/what-just-happened-to-video-on-web_20.html
/// <para>
/// duration - Obvious. But unlike for FLV files this field will always be present.
/// videocodecid - For H.264 we report 'avc1'.
/// audiocodecid - For AAC we report 'mp4a', for MP3 we report '.mp3'.
/// avcprofile - 66, 77, 88, 100, 110, 122 or 144 which corresponds to the H.264 profiles.
/// avclevel - A number between 10 and 51. Consult this list to find out more.
/// aottype - Either 0, 1 or 2. This corresponds to AAC Main, AAC LC and SBR audio types.
/// moovposition - The offset in bytes of the moov atom in a file.
/// trackinfo - An array of objects containing various infomation about all the tracks in a file
/// ex.
/// trackinfo[0].length: 7081
/// trackinfo[0].timescale: 600
/// trackinfo[0].sampledescription.sampletype: avc1
/// trackinfo[0].language: und
/// trackinfo[1].length: 525312
/// trackinfo[1].timescale: 44100
/// trackinfo[1].sampledescription.sampletype: mp4a
/// trackinfo[1].language: und
///
/// chapters - As mentioned above information about chapters in audiobooks.
/// seekpoints - As mentioned above times you can directly feed into NetStream.seek();
/// videoframerate - The frame rate of the video if a monotone frame rate is used. Most videos will have a monotone frame rate.
/// audiosamplerate - The original sampling rate of the audio track.
/// audiochannels - The original number of channels of the audio track.
/// tags - As mentioned above ID3 like tag information.
/// </para>
///
/// <para>
/// width: Display width in pixels.
/// height: Display height in pixels.
/// duration: Duration in seconds.
/// avcprofile: AVC profile number such as 55, 77, 100 etc.
/// avclevel: AVC IDC level number such as 10, 11, 20, 21 etc.
/// aacaot: AAC audio object type; 0, 1 or 2 are supported.
/// videoframerate: Frame rate of the video in this MP4.
/// seekpoints: Array that lists the available keyframes in a file as time stamps in milliseconds.
/// This is optional as the MP4 file might not contain this information. Generally speaking,
/// most MP4 files will include this by default.
/// videocodecid: Usually a string such as "avc1" or "VP6F."
/// audiocodecid: Usually a string such as ".mp3" or "mp4a."
/// progressivedownloadinfo: Object that provides information from the "pdin" atom. This is optional
/// and many files will not have this field.
/// trackinfo: Object that provides information on all the tracks in the MP4 file, including their sample description ID.
/// tags: Array of key value pairs representing the information present in the "ilst" atom, which is
/// the equivalent of ID3 tags for MP4 files. These tags are mostly used by iTunes.
/// </para>
/// </summary>
/// <returns>Metadata event tag.</returns>
ITag CreateFileMeta()
{
#if !SILVERLIGHT
log.Debug("Creating onMetaData");
#endif
// Create tag for onMetaData event
ByteBuffer buf = ByteBuffer.Allocate(1024);
buf.AutoExpand = true;
AMFWriter output = new AMFWriter(buf);
output.WriteString("onMetaData");
Dictionary<string, object> props = new Dictionary<string, object>();
// Duration property
props.Add("duration", ((double)_duration / (double)_timeScale));
props.Add("width", _width);
props.Add("height", _height);
// Video codec id
props.Add("videocodecid", _videoCodecId);
props.Add("avcprofile", _avcProfile);
props.Add("avclevel", _avcLevel);
props.Add("videoframerate", _fps);
// Audio codec id - watch for mp3 instead of aac
props.Add("audiocodecid", _audioCodecId);
props.Add("aacaot", _audioCodecType);
props.Add("audiosamplerate", _audioTimeScale);
props.Add("audiochannels", _audioChannels);
props.Add("moovposition", _moovOffset);
//props.put("chapters", ""); //this is for f4b - books
if (_seekPoints != null)
{
props.Add("seekpoints", _seekPoints);
}
//tags will only appear if there is an "ilst" atom in the file
//props.put("tags", "");
List<Dictionary<String, Object>> arr = new List<Dictionary<String, Object>>(2);
if (_hasAudio)
{
Dictionary<String, Object> audioMap = new Dictionary<String, Object>(4);
audioMap.Add("timescale", _audioTimeScale);
audioMap.Add("language", "und");
List<Dictionary<String, String>> desc = new List<Dictionary<String, String>>(1);
audioMap.Add("sampledescription", desc);
Dictionary<String, String> sampleMap = new Dictionary<String, String>(1);
//.........这里部分代码省略.........
示例4: WriteData
public void WriteData(AMFWriter writer, object data)
{
writer.WriteString( ((Guid)data).ToString() );
}
示例5: SendOnPlayStatus
private void SendOnPlayStatus(String code, int duration, long bytes)
{
MemoryStream ms = new MemoryStream();
AMFWriter writer = new AMFWriter(ms);
writer.WriteString("onPlayStatus");
Hashtable props = new Hashtable();
props.Add("code", code);
props.Add("level", "status");
props.Add("duration", duration);
props.Add("bytes", bytes);
writer.WriteAssociativeArray(ObjectEncoding.AMF0, props);
ByteBuffer buffer = new ByteBuffer(ms);
IRtmpEvent evt = new Notify(buffer);
if (_lastMessage != null)
{
int timestamp = _lastMessage.Timestamp;
evt.Timestamp = timestamp;
}
else
{
evt.Timestamp = 0;
}
RtmpMessage msg = new RtmpMessage();
msg.body = evt;
DoPushMessage(msg);
}
示例6: CreateFileMeta
/// <summary>
/// Create tag for metadata event.
/// </summary>
/// <returns></returns>
ITag CreateFileMeta() {
log.Debug("Creating onMetaData");
// Create tag for onMetaData event
ByteBuffer buf = ByteBuffer.Allocate(1024);
buf.AutoExpand = true;
AMFWriter output = new AMFWriter(buf);
output.WriteString("onMetaData");
Hashtable props = new Hashtable();
// Duration property
props.Add("duration", ((double)_duration / (double)_timeScale));
// Audio codec id - watch for mp3 instead of aac
props.Add("audiocodecid", _audioCodecId);
props.Add("aacaot", _audioCodecType);
props.Add("audiosamplerate", _audioTimeScale);
props.Add("audiochannels", _audioChannels);
props.Add("moovposition", _moovOffset);
//tags will only appear if there is an "ilst" atom in the file
//props.put("tags", "");
props.Add("canSeekToEnd", false);
output.WriteAssociativeArray(ObjectEncoding.AMF0, props);
buf.Flip();
//now that all the meta properties are done, update the duration
_duration = (long)Math.Round(_duration * 1000d);
ITag result = new Tag(IOConstants.TYPE_METADATA, 0, buf.Limit, buf.ToArray(), 0);
return result;
}
示例7: SaveObject
private bool SaveObject(IPersistable obj) {
string filename = GetObjectFilename(obj);
FileInfo file = _scope.Context.GetResource(filename).File;
string path = file.DirectoryName;
if (!Directory.Exists(path))
Directory.CreateDirectory(path);
lock (this.SyncRoot) {
MemoryStream ms = new MemoryStream();
AMFWriter writer = new AMFWriter(ms);
writer.UseLegacyCollection = false;
writer.WriteString(obj.GetType().FullName);
//amfSerializer.WriteData(ObjectEncoding.AMF0, obj);
obj.Serialize(writer);
writer.Flush();
byte[] buffer = ms.ToArray();
ms.Close();
using (FileStream fs = new FileStream(file.FullName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite)) {
fs.Write(buffer, 0, buffer.Length);
fs.Close();
}
}
return true;
}
示例8: WriteMetadataTag
/// <summary>
/// Write "onMetaData" tag to the file.
/// </summary>
/// <param name="duration">Duration to write in milliseconds.</param>
/// <param name="videoCodecId">Id of the video codec used while recording.</param>
/// <param name="audioCodecId">Id of the audio codec used while recording.</param>
private void WriteMetadataTag(double duration, object videoCodecId, object audioCodecId)
{
_metaPosition = _writer.BaseStream.Position;
MemoryStream ms = new MemoryStream();
AMFWriter output = new AMFWriter(ms);
output.WriteString("onMetaData");
Dictionary<string, object> props = new Dictionary<string, object>();
props.Add("duration", _duration);
if (videoCodecId != null)
{
props.Add("videocodecid", videoCodecId);
}
if (audioCodecId != null)
{
props.Add("audiocodecid", audioCodecId);
}
props.Add("canSeekToEnd", true);
output.WriteAssociativeArray(ObjectEncoding.AMF0, props);
byte[] buffer = ms.ToArray();
if (_fileMetaSize == 0)
{
_fileMetaSize = buffer.Length;
}
ITag onMetaData = new Tag(IOConstants.TYPE_METADATA, 0, buffer.Length, buffer, 0);
WriteTag(onMetaData);
}
示例9: CreateFileMeta
/// <summary>
/// Create tag for metadata event.
/// </summary>
/// <returns></returns>
private ITag CreateFileMeta()
{
// Create tag for onMetaData event
ByteBuffer buf = ByteBuffer.Allocate(1024);
buf.AutoExpand = true;
AMFWriter output = new AMFWriter(buf);
// Duration property
output.WriteString("onMetaData");
Dictionary<string, object> props = new Dictionary<string,object>();
props.Add("duration", _duration / 1000.0);
if (_firstVideoTag != -1)
{
long old = GetCurrentPosition();
SetCurrentPosition(_firstVideoTag);
ReadTagHeader();
byte frametype = _reader.ReadByte();
// Video codec id
props.Add("videocodecid", frametype & IOConstants.MASK_VIDEO_CODEC);
SetCurrentPosition(old);
}
if (_firstAudioTag != -1)
{
long old = GetCurrentPosition();
SetCurrentPosition(_firstAudioTag);
ReadTagHeader();
byte frametype = _reader.ReadByte();
// Audio codec id
props.Add("audiocodecid", (frametype & IOConstants.MASK_SOUND_FORMAT) >> 4);
SetCurrentPosition(old);
}
props.Add("canSeekToEnd", true);
output.WriteAssociativeArray(ObjectEncoding.AMF0, props);
buf.Flip();
ITag result = new Tag(IOConstants.TYPE_METADATA, 0, buf.Limit, buf.ToArray(), 0);
return result;
}
示例10: WriteData
public void WriteData(AMFWriter writer, object data)
{
writer.WriteString(data as string);
}