本文整理汇总了C#中EndianBinaryReader.ReadDouble方法的典型用法代码示例。如果您正苦于以下问题:C# EndianBinaryReader.ReadDouble方法的具体用法?C# EndianBinaryReader.ReadDouble怎么用?C# EndianBinaryReader.ReadDouble使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类EndianBinaryReader
的用法示例。
在下文中一共展示了EndianBinaryReader.ReadDouble方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Parse
protected override void Parse(EndianBinaryReader r)
{
#if DEBUG
Action = (Actions)ReadVarInt(r);
switch (Action)
{
case Actions.SetSize:
Radius = r.ReadDouble();
break;
case Actions.LerpSize:
OldRadius = r.ReadDouble();
NewRadius = r.ReadDouble();
Speed = ReadVarLong(r);
break;
case Actions.SetCenter:
X = r.ReadDouble();
Z = r.ReadDouble();
break;
case Actions.Initialize:
X = r.ReadDouble();
Z = r.ReadDouble();
OldRadius = r.ReadDouble();
NewRadius = r.ReadDouble();
Speed = ReadVarLong(r);
U = ReadVarInt(r);
//We got 7 bytes left here, more than 2 varint
WarningTime = ReadVarInt(r);
WarningBlocks = ReadVarInt(r);
break;
case Actions.WarningTime:
WarningTime = ReadVarInt(r);
break;
case Actions.WarningBlocks:
WarningBlocks = ReadVarInt(r);
break;
default:
throw new NotImplementedException();
}
#endif
}
示例2: Read
public void Read(Stream inStream, MesgDefinition defnMesg)
{
inStream.Position = 1;
EndianBinaryReader mesgReader = new EndianBinaryReader(inStream, defnMesg.IsBigEndian);
LocalNum = defnMesg.LocalMesgNum;
foreach (FieldDefinition fieldDef in defnMesg.GetFields())
{
// It's possible the field type found in the field definition may
// not agree with the type defined in the profile. The profile
// type will be preferred for decode.
Field field = GetField(fieldDef.Num);
if (field == null)
{
// We normally won't have fields attached to our skeleton message,
// as we add values we need to add the fields too based on the mesg,field
// combo in the profile. Must derive from the profile so the scale etc
// is correct
field = new Field(Profile.GetMesg(this.Num).GetField(fieldDef.Num));
if (field.Num == Fit.FieldNumInvalid)
{
// If there was no info in the profile the FieldNum will get set to invalid
// so preserve the unknown fields info while we know it
field.Num = fieldDef.Num;
field.Type = fieldDef.Type;
}
SetField(field);
}
object value;
// strings may be an array and are of variable length
if ((field.Type & Fit.BaseTypeNumMask) == Fit.String)
{
List<byte> utf8Bytes = new List<byte>();
byte b = new byte();
for (int i=0; i<fieldDef.Size; i++)
{
b = mesgReader.ReadByte();
if (b == 0x00)
{
field.AddValue(utf8Bytes.ToArray());
utf8Bytes.Clear();
}
else
{
utf8Bytes.Add(b);
}
}
if (utf8Bytes.Count != 0)
{
field.AddValue(utf8Bytes.ToArray());
utf8Bytes.Clear();
}
}
else
{
int numElements = (int)fieldDef.Size / Fit.BaseType[field.Type & Fit.BaseTypeNumMask].size;
for (int i=0; i < numElements; i++)
{
switch (field.Type & Fit.BaseTypeNumMask)
{
case Fit.Enum:
case Fit.Byte:
case Fit.UInt8:
case Fit.UInt8z:
value = mesgReader.ReadByte();
break;
case Fit.SInt8:
value = mesgReader.ReadSByte();
break;
case Fit.SInt16:
value = mesgReader.ReadInt16();
break;
case Fit.UInt16:
case Fit.UInt16z:
value = mesgReader.ReadUInt16();
break;
case Fit.SInt32:
value = mesgReader.ReadInt32();
break;
case Fit.UInt32:
case Fit.UInt32z:
value = mesgReader.ReadUInt32();
break;
case Fit.Float32:
value = mesgReader.ReadSingle();
break;
case Fit.Float64:
value = mesgReader.ReadDouble();
break;
//.........这里部分代码省略.........
示例3: read
public void read(EndianBinaryReader r)
{
MaxScore = r.ReadDouble();
MaxNotesAndChords = r.ReadDouble();
MaxNotesAndChords_Real = r.ReadDouble();
PointsPerNote = r.ReadDouble();
FirstBeatLength = r.ReadSingle();
StartTime = r.ReadSingle();
CapoFretId = r.ReadByte();
LastConversionDateTime = r.ReadBytes(32);
Part = r.ReadInt16();
SongLength = r.ReadSingle();
StringCount = r.ReadInt32();
Tuning = new Int16[StringCount]; for (int i = 0; i < StringCount; i++) Tuning[i] = r.ReadInt16();
Unk11_FirstNoteTime = r.ReadSingle();
Unk12_FirstNoteTime = r.ReadSingle();
MaxDifficulty = r.ReadInt32();
}
示例4: Propagate
private static bool Propagate(FileStream fh, EndianBinaryReader reader, EndianBinaryWriter writer, Header header, long timestamp, ArchiveInfo higher, ArchiveInfo lower)
{
var aggregationType = header.AggregationType;
var xff = header.xFilesFactor;
var lowerIntervalStart = timestamp - timestamp.Mod(lower.SecondsPerPoint);
var lowerIntervalEnd = lowerIntervalStart + lower.SecondsPerPoint;
fh.Seek(higher.Offset, SeekOrigin.Begin);
var higherBaseInterval = reader.ReadInt64(); // timestamp
var higherBaseValue = reader.ReadDouble(); // value
long higherFirstOffset;
if (higherBaseInterval == 0)
{
higherFirstOffset = higher.Offset;
}
else
{
var timeDistance = lowerIntervalStart - higherBaseInterval;
var pointDistance = timeDistance / higher.SecondsPerPoint;
var byteDistance = pointDistance * PointSize;
higherFirstOffset = higher.Offset + byteDistance.Mod(higher.Size);
}
var higherPoints = lower.SecondsPerPoint / higher.SecondsPerPoint;
var higherSize = higherPoints * PointSize;
var relativeFirstOffset = higherFirstOffset - higher.Offset;
var relativeLastOffset = (relativeFirstOffset + higherSize).Mod(higher.Size);
var higherLastOffset = relativeLastOffset + higher.Offset;
fh.Seek(higherFirstOffset, SeekOrigin.Begin);
byte[] seriesBuffer;
int bytesRead = 0;
if (higherFirstOffset < higherLastOffset)
{
seriesBuffer = new byte[(int)(higherLastOffset - higherFirstOffset)];
// we don't wrap the archive
bytesRead = fh.Read(seriesBuffer, 0, seriesBuffer.Length);
}
else
{
var higherEnd = higher.Offset + higher.Size;
var firstPart = (int)(higherEnd - higherFirstOffset);
var secondPart = (int)(higherLastOffset - higher.Offset);
seriesBuffer = new byte[firstPart + secondPart];
bytesRead += fh.Read(seriesBuffer, 0, firstPart);
fh.Seek(higher.Offset, SeekOrigin.Begin);
bytesRead += fh.Read(seriesBuffer, firstPart, secondPart);
//var archiveEnd = higher.Offset + higher.Size;
//seriesBuffer = new byte[(int)(archiveEnd - higherFirstOffset) + (int)(higherLastOffset - higher.Offset)];
//// We do wrap around the archive, so we need two reads
//bytesRead += fh.Read(seriesBuffer, 0, (int)(archiveEnd - higherFirstOffset));
//if (higherLastOffset < higherFirstOffset)
//{
// fh.Seek(higher.Offset, SeekOrigin.Begin);
// bytesRead += fh.Read(seriesBuffer, 0, (int)(higherLastOffset - higher.Offset));
//}
}
var neighborValues = UnpackSeriesBuffer(seriesBuffer, bytesRead);
// Propagate aggregateValue to propagate from neighborValues if we have enough known points
var knownValues = neighborValues.Where(x => !x.Equals(default(PointPair)) && x.Timestamp != default(long)).Select(x => x.value);
if (knownValues.Count() == 0)
{
return false;
}
var knownPercent = (double)knownValues.Count() / (double)neighborValues.Length;
Debug.WriteLine(string.Format("Calculate Aggregate xff = {0} for {1} points", knownPercent, knownValues.Count()));
if (knownPercent >= xff)
{
// we have enough data to propagte a value
var aggregateValue = Aggregate(aggregationType, knownValues);
fh.Seek(lower.Offset, SeekOrigin.Begin);
var lowerBaseInterval = reader.ReadInt64(); // timestamp
var lowerBaseValue = reader.ReadDouble(); // value
if (lowerBaseInterval == 0)
{
// First propagated update to this lower archive
fh.Seek(lower.Offset, SeekOrigin.Begin);
writer.Write(lowerIntervalStart);
writer.Write(aggregateValue);
Debug.WriteLine(string.Format("writing aggregate point ({0},{1}) to position {2} - first update", lowerIntervalStart, aggregateValue, lower.Offset));
}
else
{
// Not our first propagated update to this lower archive
var timeDistance = lowerIntervalStart - lowerBaseInterval;
var pointDistance = timeDistance / lower.SecondsPerPoint;
var byteDistance = pointDistance * PointSize;
var lowerOffset = lower.Offset + (byteDistance.Mod(lower.Size));
Debug.WriteLine(string.Format("calculating aggregate offset int = {0} base = {1} td = {2} pd = {3} bd = {4} offset = {5}", lowerIntervalStart, lowerBaseInterval, timeDistance, pointDistance, byteDistance, lowerOffset));
fh.Seek(lowerOffset, SeekOrigin.Begin);
writer.Write(lowerIntervalStart);
//.........这里部分代码省略.........
示例5: FileUpdate
private static void FileUpdate(FileStream fh, double value, long? timestamp, long? now)
{
var header = ReadHeader(fh);
now = now ?? DateTime.UtcNow.ToUnixTime();
if (!timestamp.HasValue)
{
timestamp = now.Value;
}
var diff = now - timestamp;
if (!(diff < header.MaxRetention && diff >= 0))
{
throw new TimestampNotCoveredException("Timestamp not covered by any archives in this database.");
}
List<ArchiveInfo> lowerArchives = null;
ArchiveInfo archive = new ArchiveInfo();
for (int i = 0; i < header.ArchiveList.Count; i++)
{
archive = header.ArchiveList[i];
// Find the highest-precision archive that covers timestamp
if (archive.Retention < diff)
{
continue;
}
// We'll pass on the update to these lower precision archives later
lowerArchives = header.ArchiveList.Skip(i + 1).ToList();
break;
}
using (var reader = new EndianBinaryReader(EndianBitConverter.Big, new NonClosingStreamWrapper(fh)))
using (var writer = new EndianBinaryWriter(EndianBitConverter.Big, new NonClosingStreamWrapper(fh)))
{
// First we update the highest-precision archive
var myInterval = timestamp.Value - (timestamp.Mod(archive.SecondsPerPoint));
fh.Seek(archive.Offset, SeekOrigin.Begin);
var baseInterval = reader.ReadInt64(); // timestamp
var baseValue = reader.ReadDouble(); // value
if (baseInterval == 0)
{
// this file's first update
fh.Seek(archive.Offset, SeekOrigin.Begin);
writer.Write(myInterval);
writer.Write(value);
baseInterval = myInterval;
baseValue = value;
Debug.WriteLine(string.Format("writing point ({0},{1}) to position {2} - first update", myInterval, value, archive.Offset));
}
else
{
// not our first update
var timeDistance = myInterval - baseInterval;
var pointDistance = timeDistance / archive.SecondsPerPoint;
var byteDistance = pointDistance * PointSize;
var myOffset = archive.Offset + (byteDistance.Mod(archive.Size));
Debug.WriteLine(string.Format("calculating offset int = {0} base = {1} td = {2} pd = {3} bd = {4} offset = {5}", myInterval, baseInterval, timeDistance, pointDistance, byteDistance, myOffset));
fh.Seek(myOffset, SeekOrigin.Begin);
writer.Write(myInterval);
writer.Write(value);
Debug.WriteLine(string.Format("writing point ({0},{1}) to position {2}", myInterval, value, myOffset));
}
// Now we propagate the update to lower-precision archives
var higher = archive;
foreach (var lower in lowerArchives)
{
if (!Propagate(fh, reader, writer, header, myInterval, higher, lower))
{
break;
}
higher = lower;
}
}
fh.Flush(AutoFlush);
}
示例6: UnpackSeriesBuffer
private static PointPair[] UnpackSeriesBuffer(byte[] seriesBuffer, int bytesRead)
{
var valueList = new PointPair[bytesRead / PointSize];
using (var seriesMemoryStream = new MemoryStream(seriesBuffer))
{
using (var seriesReader = new EndianBinaryReader(EndianBitConverter.Big, seriesMemoryStream))
{
for (int i = 0; i < valueList.Length; i++)
{
var timestamp = seriesReader.ReadInt64();
var value = seriesReader.ReadDouble();
valueList[i] = new PointPair(timestamp, value);
//Debug.WriteLine(string.Format("Reading Point ({0},{1}) from i = {2}", timestamp, value, i));
}
}
}
return valueList;
}
示例7: ArchiveFetch
/// <summary>
/// Fetch data from a single archive. Note that checks for validity of the time
/// period requested happen above this level so it's possible to wrap around the
/// archive on a read and request data older than the archive's retention
/// </summary>
private static ArchiveFetch ArchiveFetch(FileStream fh, ArchiveInfo archive, long fromTime, long untilTime)
{
Debug.WriteLine(string.Format("ArchiveFetch from {0} to {1} in archive [{2},{3}]", fromTime, untilTime, archive.SecondsPerPoint, archive.Points));
var fromInterval = (fromTime - (fromTime.Mod(archive.SecondsPerPoint))) + (int)archive.SecondsPerPoint;
var untilInterval = (untilTime - (untilTime.Mod(archive.SecondsPerPoint))) + (int)archive.SecondsPerPoint;
fh.Seek(archive.Offset, SeekOrigin.Begin);
using (var reader = new EndianBinaryReader(EndianBitConverter.Big, new NonClosingStreamWrapper(fh)))
{
var baseInterval = reader.ReadInt64(); // timestamp
var baseValue = reader.ReadDouble(); // value
if (baseInterval == 0)
{
var step = archive.SecondsPerPoint;
var points = (int)((untilInterval - fromInterval) / step);
var _timeInfo = new TimeInfo(fromInterval, untilInterval, archive.SecondsPerPoint);
var _valueList = Enumerable.Repeat(new PointPair(0, 0), points).ToList();
return new ArchiveFetch(_timeInfo, _valueList);
}
// Determine fromOffset
var timeDistance = fromInterval - baseInterval;
var pointDistance = timeDistance / archive.SecondsPerPoint;
var byteDistance = pointDistance * PointSize;
var fromOffset = (int)(archive.Offset + (byteDistance.Mod(archive.Size)));
// Determine untilOffset
timeDistance = untilInterval - baseInterval;
pointDistance = timeDistance / archive.SecondsPerPoint;
byteDistance = pointDistance * PointSize;
var untilOffset = (int)(archive.Offset + (byteDistance.Mod(archive.Size)));
// read all the points in the interval
fh.Seek(fromOffset, SeekOrigin.Begin);
byte[] seriesBuffer;
int bytesRead = 0;
if (fromOffset < untilOffset)
{
// If we don't wrap around the archive
seriesBuffer = new byte[(int)(untilOffset - fromOffset)];
bytesRead += fh.Read(seriesBuffer, 0, seriesBuffer.Length);
if (bytesRead != seriesBuffer.Length)
{
throw new CorruptWhisperFileException(string.Format("read: {0} != {1}", bytesRead, seriesBuffer.Length));
}
Debug.WriteLine(string.Format("read {0} points starting at offset {1}", (bytesRead / PointSize), fromOffset));
}
else
{
// We do wrap around the archive, so we need two reads
var archiveEnd = archive.Offset + archive.Size;
var firstPart = (int)(archiveEnd - fromOffset);
var secondPart = (int)(untilOffset - archive.Offset);
seriesBuffer = new byte[firstPart + secondPart];
bytesRead += fh.Read(seriesBuffer, 0, firstPart);
Debug.WriteLine(string.Format("read {0} points starting at offset {1}", (firstPart / PointSize), fromOffset));
fh.Seek(archive.Offset, SeekOrigin.Begin);
bytesRead += fh.Read(seriesBuffer, firstPart, secondPart);
Debug.WriteLine(string.Format("read {0} points starting at offset {1}", (secondPart / PointSize), archive.Offset));
}
var valueList = UnpackSeriesBuffer(seriesBuffer, bytesRead);
var timeInfo = new TimeInfo(fromInterval, untilInterval, archive.SecondsPerPoint);
return new ArchiveFetch(timeInfo, valueList.Where(x => !x.Equals(default(PointPair)) && x.Timestamp != default(long)).ToList());
}
}
示例8: ReadHeader
private static Header ReadHeader(FileStream fh)
{
if (cachedHeaders.ContainsKey(fh.Name))
{
return cachedHeaders[fh.Name];
}
var originalOffest = fh.Position;
fh.Seek(0, SeekOrigin.Begin);
Header header;
using (var reader = new EndianBinaryReader(EndianBitConverter.Big, new NonClosingStreamWrapper(fh)))
{
long aggregationType;
long maxRetention;
double xff;
long archiveCount;
try
{
aggregationType = reader.ReadInt64();
maxRetention = reader.ReadInt64();
xff = reader.ReadDouble();
archiveCount = reader.ReadInt64();
}
catch (Exception e)
{
throw new CorruptWhisperFileException("Unable to read header", fh.Name, e);
}
var archives = new List<ArchiveInfo>();
for (int i = 0; i < archiveCount; i++)
{
try
{
var offset = reader.ReadInt64();
var secondsPerPoint = reader.ReadInt64();
var points = reader.ReadInt64();
archives.Add(new ArchiveInfo(secondsPerPoint, points, offset));
}
catch (Exception e)
{
throw new CorruptWhisperFileException(string.Format("Unable to read archive{0} metadata", i), fh.Name, e);
}
}
header = new Header((AggregationType)aggregationType, maxRetention, xff, archives);
}
if (CacheHeaders)
{
cachedHeaders.TryAdd(fh.Name, header);
}
return header;
}