本文整理汇总了C#中Raven.Abstractions.Smuggler.SmugglerOptions.MatchFilters方法的典型用法代码示例。如果您正苦于以下问题:C# SmugglerOptions.MatchFilters方法的具体用法?C# SmugglerOptions.MatchFilters怎么用?C# SmugglerOptions.MatchFilters使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Raven.Abstractions.Smuggler.SmugglerOptions
的用法示例。
在下文中一共展示了SmugglerOptions.MatchFilters方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ImportData
public void ImportData(Stream stream, SmugglerOptions options, bool importIndexes = true)
{
EnsureDatabaseExists();
var sw = Stopwatch.StartNew();
// Try to read the stream compressed, otherwise continue uncompressed.
JsonTextReader jsonReader;
try
{
var streamReader = new StreamReader(new GZipStream(stream, CompressionMode.Decompress));
jsonReader = new JsonTextReader(streamReader);
if (jsonReader.Read() == false)
return;
}
catch (InvalidDataException)
{
stream.Seek(0, SeekOrigin.Begin);
var streamReader = new StreamReader(stream);
jsonReader = new JsonTextReader(streamReader);
if (jsonReader.Read() == false)
return;
}
if (jsonReader.TokenType != JsonToken.StartObject)
throw new InvalidDataException("StartObject was expected");
// should read indexes now
if (jsonReader.Read() == false)
return;
if (jsonReader.TokenType != JsonToken.PropertyName)
throw new InvalidDataException("PropertyName was expected");
if (Equals("Indexes", jsonReader.Value) == false)
throw new InvalidDataException("Indexes property was expected");
if (jsonReader.Read() == false)
return;
if (jsonReader.TokenType != JsonToken.StartArray)
throw new InvalidDataException("StartArray was expected");
while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray)
{
var index = RavenJToken.ReadFrom(jsonReader);
if ((options.OperateOnTypes & ItemType.Indexes) != ItemType.Indexes)
continue;
var indexName = index.Value<string>("name");
if (indexName.StartsWith("Raven/") || indexName.StartsWith("Temp/"))
continue;
PutIndex(indexName, index);
}
// should read documents now
if (jsonReader.Read() == false)
return;
if (jsonReader.TokenType != JsonToken.PropertyName)
throw new InvalidDataException("PropertyName was expected");
if (Equals("Docs", jsonReader.Value) == false)
throw new InvalidDataException("Docs property was expected");
if (jsonReader.Read() == false)
return;
if (jsonReader.TokenType != JsonToken.StartArray)
throw new InvalidDataException("StartArray was expected");
var batch = new List<RavenJObject>();
int totalCount = 0;
while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray)
{
var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader);
if ((options.OperateOnTypes & ItemType.Documents) != ItemType.Documents)
continue;
if (options.MatchFilters(document) == false)
continue;
totalCount += 1;
batch.Add(document);
if (batch.Count >= 128)
FlushBatch(batch);
}
FlushBatch(batch);
var attachmentCount = 0;
if (jsonReader.Read() == false || jsonReader.TokenType == JsonToken.EndObject)
return;
if (jsonReader.TokenType != JsonToken.PropertyName)
throw new InvalidDataException("PropertyName was expected");
if (Equals("Attachments", jsonReader.Value) == false)
throw new InvalidDataException("Attachment property was expected");
if (jsonReader.Read() == false)
return;
if (jsonReader.TokenType != JsonToken.StartArray)
throw new InvalidDataException("StartArray was expected");
while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray)
{
attachmentCount += 1;
var item = RavenJToken.ReadFrom(jsonReader);
if ((options.OperateOnTypes & ItemType.Attachments) != ItemType.Attachments)
continue;
var attachmentExportInfo =
//.........这里部分代码省略.........
示例2: ImportDocuments
private async Task<int> ImportDocuments(JsonTextReader jsonReader, SmugglerOptions options)
{
var count = 0;
if (jsonReader.Read() == false)
return count;
if (jsonReader.TokenType != JsonToken.PropertyName)
throw new InvalidDataException("PropertyName was expected");
if (Equals("Docs", jsonReader.Value) == false)
throw new InvalidDataException("Docs property was expected");
if (jsonReader.Read() == false)
return count;
if (jsonReader.TokenType != JsonToken.StartArray)
throw new InvalidDataException("StartArray was expected");
while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray)
{
var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader);
var size = GetRoughSize(document);
if (size > 1024 * 1024)
{
Console.WriteLine("Large document warning: {0:#,#.##;;0} kb - {1}",
(double)size / 1024,
document["@metadata"].Value<string>("@id"));
}
if ((options.OperateOnTypes & ItemType.Documents) != ItemType.Documents)
continue;
if (options.MatchFilters(document) == false)
continue;
if (!string.IsNullOrEmpty(options.TransformScript))
document = await TransformDocument(document, options.TransformScript);
if (document == null)
continue;
await PutDocument(document);
count++;
if (count % options.BatchSize == 0)
{
ShowProgress("Read {0} documents", count);
}
}
await PutDocument(null); // force flush
return count;
}
示例3: ExportDocuments
private async Task<Etag> ExportDocuments(SmugglerOptions options, JsonTextWriter jsonWriter, Etag lastEtag)
{
var totalCount = 0;
var lastReport = SystemTime.UtcNow;
var reportInterval = TimeSpan.FromSeconds(2);
var errorcount = 0;
ShowProgress("Exporting Documents");
while (true)
{
bool hasDocs = false;
try {
var maxRecords = options.Limit - totalCount;
if (maxRecords > 0)
{
using (var documents = await GetDocuments(lastEtag, maxRecords))
{
var watch = Stopwatch.StartNew();
while (await documents.MoveNextAsync())
{
hasDocs = true;
var document = documents.Current;
lastEtag = Etag.Parse(document.Value<RavenJObject>("@metadata").Value<string>("@etag"));
if (!options.MatchFilters(document))
continue;
if (options.ShouldExcludeExpired && options.ExcludeExpired(document))
continue;
document.WriteTo(jsonWriter);
totalCount++;
if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval)
{
ShowProgress("Exported {0} documents", totalCount);
lastReport = SystemTime.UtcNow;
}
if (watch.ElapsedMilliseconds > 100)
errorcount++;
watch.Start();
}
}
if (hasDocs)
continue;
// The server can filter all the results. In this case, we need to try to go over with the next batch.
// Note that if the ETag' server restarts number is not the same, this won't guard against an infinite loop.
// (This code provides support for legacy RavenDB version: 1.0)
var databaseStatistics = await GetStats();
var lastEtagComparable = new ComparableByteArray(lastEtag);
if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0)
{
lastEtag = EtagUtil.Increment(lastEtag, maxRecords);
ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag);
continue;
}
}
}
catch (Exception e)
{
ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message);
ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag);
throw new SmugglerExportException(e.Message, e)
{
LastEtag = lastEtag,
};
}
ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag);
return lastEtag;
}
}
示例4: ImportData
public void ImportData(Stream stream, SmugglerOptions options, bool importIndexes = true)
{
EnsureDatabaseExists();
Stream sizeStream;
var sw = Stopwatch.StartNew();
// Try to read the stream compressed, otherwise continue uncompressed.
JsonTextReader jsonReader;
try
{
sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress));
var streamReader = new StreamReader(sizeStream);
jsonReader = new JsonTextReader(streamReader);
if (jsonReader.Read() == false)
return;
}
catch (InvalidDataException)
{
sizeStream = stream;
stream.Seek(0, SeekOrigin.Begin);
var streamReader = new StreamReader(stream);
jsonReader = new JsonTextReader(streamReader);
if (jsonReader.Read() == false)
return;
}
if (jsonReader.TokenType != JsonToken.StartObject)
throw new InvalidDataException("StartObject was expected");
// should read indexes now
if (jsonReader.Read() == false)
return;
if (jsonReader.TokenType != JsonToken.PropertyName)
throw new InvalidDataException("PropertyName was expected");
if (Equals("Indexes", jsonReader.Value) == false)
throw new InvalidDataException("Indexes property was expected");
if (jsonReader.Read() == false)
return;
if (jsonReader.TokenType != JsonToken.StartArray)
throw new InvalidDataException("StartArray was expected");
while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray)
{
var index = RavenJToken.ReadFrom(jsonReader);
if ((options.OperateOnTypes & ItemType.Indexes) != ItemType.Indexes)
continue;
var indexName = index.Value<string>("name");
if (indexName.StartsWith("Temp/"))
continue;
if (index.Value<RavenJObject>("definition").Value<bool>("IsCompiled"))
continue; // can't import compiled indexes
PutIndex(indexName, index);
}
// should read documents now
if (jsonReader.Read() == false)
return;
if (jsonReader.TokenType != JsonToken.PropertyName)
throw new InvalidDataException("PropertyName was expected");
if (Equals("Docs", jsonReader.Value) == false)
throw new InvalidDataException("Docs property was expected");
if (jsonReader.Read() == false)
return;
if (jsonReader.TokenType != JsonToken.StartArray)
throw new InvalidDataException("StartArray was expected");
var batch = new List<RavenJObject>();
int totalCount = 0;
long lastFlushedAt = 0;
int batchCount = 0;
long sizeOnDisk = 0;
while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray)
{
var before = sizeStream.Position;
var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader);
var size = sizeStream.Position - before;
if (size > 1024 * 1024)
{
Console.WriteLine("{0:#,#.##;;0} kb - {1}",
(double)size / 1024,
document["@metadata"].Value<string>("@id"));
}
if ((options.OperateOnTypes & ItemType.Documents) != ItemType.Documents)
continue;
if (options.MatchFilters(document) == false)
continue;
totalCount += 1;
batch.Add(document);
sizeOnDisk = (sizeStream.Position - lastFlushedAt);
if (batch.Count >= smugglerOptions.BatchSize ||
sizeOnDisk >= MaxSizeOfUncompressedSizeToSendToDatabase)
{
lastFlushedAt = sizeStream.Position;
HandleBatch(options,batch, sizeOnDisk);
sizeOnDisk = 0;
//.........这里部分代码省略.........
示例5: ExportDocuments
private async Task<Etag> ExportDocuments(SmugglerOptions options, JsonTextWriter jsonWriter, Etag lastEtag)
{
var totalCount = 0;
var lastReport = SystemTime.UtcNow;
var reportInterval = TimeSpan.FromSeconds(2);
var errorcount = 0;
ShowProgress("Exporting Documents");
while (true)
{
using (var documents = await GetDocuments(lastEtag))
{
var watch = Stopwatch.StartNew();
while (await documents.MoveNextAsync())
{
var document = documents.Current;
if (!options.MatchFilters(document))
continue;
if (options.ShouldExcludeExpired && options.ExcludeExpired(document))
continue;
document.WriteTo(jsonWriter);
totalCount++;
if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval)
{
ShowProgress("Exported {0} documents", totalCount);
lastReport = SystemTime.UtcNow;
}
lastEtag = Etag.Parse(document.Value<RavenJObject>("@metadata").Value<string>("@etag"));
if (watch.ElapsedMilliseconds > 100)
errorcount++;
watch.Start();
}
}
var databaseStatistics = await GetStats();
var lastEtagComparable = new ComparableByteArray(lastEtag);
if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0)
{
lastEtag = EtagUtil.Increment(lastEtag, SmugglerOptions.BatchSize);
ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag);
continue;
}
ShowProgress("Done with reading documents, total: {0}", totalCount);
return lastEtag;
}
}
示例6: ImportDocuments
private async Task<int> ImportDocuments(JsonTextReader jsonReader, SmugglerOptions options)
{
var now = SystemTime.UtcNow;
var count = 0;
while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray)
{
var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader);
var size = GetRoughSize(document);
if (size > 1024 * 1024)
{
Console.WriteLine("Large document warning: {0:#,#.##;;0} kb - {1}",
(double)size / 1024,
document["@metadata"].Value<string>("@id"));
}
if ((options.OperateOnTypes & ItemType.Documents) != ItemType.Documents)
continue;
if (options.MatchFilters(document) == false)
continue;
if (options.ShouldExcludeExpired && options.ExcludeExpired(document, now))
continue;
if (!string.IsNullOrEmpty(options.TransformScript))
document = await TransformDocument(document, options.TransformScript);
if (document == null)
continue;
PutDocument(document, options);
count++;
if (count % options.BatchSize == 0)
{
ShowProgress("Read {0} documents", count);
}
}
PutDocument(null, options); // force flush
return count;
}