本文整理汇总了C#中ConcurrentBag.Sum方法的典型用法代码示例。如果您正苦于以下问题:C# ConcurrentBag.Sum方法的具体用法?C# ConcurrentBag.Sum怎么用?C# ConcurrentBag.Sum使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ConcurrentBag
的用法示例。
在下文中一共展示了ConcurrentBag.Sum方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestIt
static void TestIt()
{
int testsCount = Environment.ProcessorCount;
var results = new ConcurrentBag<TestingResult>();
Parallel.For(0, testsCount, new ParallelOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount},
index => {
FullChaineTester tester = new FullChaineTester();
TestingResult result = tester.TestFullChaine();
results.Add(result);
});
long realProfit = results.Sum(res => res.RealProfit);
long rndProfit = results.Sum(res => res.RndProfit);
long maxProfit = results.Sum(res => res.MaxProfit);
var nfi = (NumberFormatInfo)CultureInfo.InvariantCulture.NumberFormat.Clone();
nfi.NumberGroupSeparator = " ";
Console.WriteLine("Real: {0}. Rnd: {1}. Max {2}.",
realProfit.ToString("#,#.00", nfi),
rndProfit.ToString("#,#.00", nfi),
maxProfit.ToString("#,#.00", nfi));
double proc = (maxProfit - realProfit) / (double)maxProfit;
Console.WriteLine("Real is smaller then max on {0}. ", proc.ToString("0.##%"));
double rndProc = (maxProfit - rndProfit) / (double)maxProfit;
Console.WriteLine("Rnd is smaller then max on {0}. ", rndProc.ToString("0.##%"));
}
示例2: Main
static void Main(string[] args)
{
// Get all abundant numbers
Console.WriteLine("Getting abundant numbers.");
var abNums = GetAbundantNumbers();
var results = new ConcurrentBag<int>();
Console.WriteLine("Processing sums of abundant numbers.");
Parallel.ForEach(Enumerable.Range(1, 28122), n =>
{
int mid = (int)Math.Floor(n / 2.0) + 1;
bool exp = false;
foreach (var i in abNums.Where(abNum => abNum <= mid))
{
int remainder = n - i;
if (abNums.Contains(remainder))
{
exp = true;
break;
}
}
if (!exp)
{
results.Add(n);
}
});
foreach (var r in results.OrderBy(v => v))
{
Console.WriteLine(r);
}
Console.WriteLine("Result: {0}", results.Sum());
Console.ReadKey();
}
示例3: StartSimulation
private async Task StartSimulation()
{
if(Simulator.SongData==null)
{
MessageBox.Show("楽曲を選んでください");
return;
}
if (Simulator.Unit == null)
{
MessageBox.Show("ユニットを選んでください");
return;
}
if (Runs < 1 || Runs > 1000000)
{
MessageBox.Show("試行回数は1から1,000,000までである必要があります");
return;
}
Note[] pattern = null;
if (UtilizeActualPattern)
{
pattern = await new PatternProvider().GetPattern(Simulator.Song, Simulator.SongData.Difficulty, Simulator.SongData.Notes);
if (pattern == null)
{
MessageBox.Show($"{Simulator.Song.Title}({Simulator.SongData.Difficulty})の譜面データが見つかりませんでした。");
return;
}
}
SimulationCompleted = false;
var results = new ConcurrentBag<SimulationResult>();
await Task.Run(() => Parallel.For(1, Runs+1, i => results.Add(Simulator.StartSimulation(RandomFactory.Create(), i, pattern == null ? null : new Queue<Note>(pattern)))));
MaxScore = results.Max(x=>x.Score);
MaxScorePerNote = results.Max(x => x.ScorePerNote);
MinScore = results.Min(x => x.Score);
MinScorePerNote = results.Min(x => x.ScorePerNote);
AverageScore = (int)results.Average(x => x.Score);
AverageScorePerNote = (int)results.Average(x => x.ScorePerNote);
ScoreDistribution = results.GroupBy(x => (int)Math.Floor(x.Score / 10000.0)).OrderBy(x => x.Key).ToDictionary(x => x.Key, x => (double)x.Count() / results.Count);
StandardDeviation = Math.Round(Math.Sqrt(results.Sum(x => Math.Pow(x.Score - AverageScore, 2))) / results.Count);
int idx = 1;
var duration = results.First().Duration;
ActualTriggerRatio = Simulator.Unit.Slots.ToDictionary(s => $"スロット{idx++}",
s => s == null ? 0 : results.SelectMany(x => x.TriggeredSkills).Where(x => x.Who == s).Count() / (results.Count * Math.Floor((duration - 1.0) / s.Skill.Interval)));
SimulationResults = results.OrderBy(x => x.Id).Take(100).ToList();
SelectedResult = SimulationResults[0];
SimulationCompleted = true;
}
示例4: RowSetFetchNextAllEnumeratorsWait
public void RowSetFetchNextAllEnumeratorsWait()
{
var pageSize = 10;
var rs = CreateStringsRowset(10, pageSize);
rs.PagingState = new byte[0];
var fetchCounter = 0;
rs.FetchNextPage = (pagingState) =>
{
fetchCounter++;
//fake a fetch
Thread.Sleep(1000);
return CreateStringsRowset(10, pageSize);
};
var counterList = new ConcurrentBag<int>();
Action iteration = () =>
{
var counter = 0;
foreach (var row in rs)
{
counter++;
//Try to synchronize, all the threads will try to fetch at the almost same time.
Thread.Sleep(300);
}
counterList.Add(counter);
};
//Invoke it in parallel more than 10 times
Parallel.Invoke(iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration);
//Assert that the fetch was called just 1 time
Assert.AreEqual(1, fetchCounter);
//Sum all rows dequeued from the different threads
var totalRows = counterList.Sum();
//Check that the total amount of rows dequeued are the same as pageSize * number of pages.
Assert.AreEqual(pageSize * 2, totalRows);
}
示例5: RunTestCollectionsAsync
protected override async Task<RunSummary> RunTestCollectionsAsync(IMessageBus messageBus,
CancellationTokenSource cancellationTokenSource)
{
//bit side effecty, sets up assemblyFixtureMapping before possibly letting xunit do its regular concurrency thing
var grouped = (from c in OrderTestCollections()
let cluster = ClusterFixture(c.Item1)
let testcase = new GroupedByCluster {Collection = c.Item1, TestCases = c.Item2, Cluster = cluster}
group testcase by testcase.Cluster
into g
orderby g.Count() descending
select g).ToList();
//If we are not running any integration tests we do not care about only keeping a single IClusterFixture
//active at a time, so let xunit do what it does best.
if (!TestClient.Configuration.RunIntegrationTests)
{
var result = await base.RunTestCollectionsAsync(messageBus, cancellationTokenSource);
foreach (var g in grouped) g.Key?.Dispose();
return result;
}
//threading guess
var defaultMaxConcurrency = Environment.ProcessorCount * 4;
var summaries = new ConcurrentBag<RunSummary>();
var clusterTotals = new Dictionary<string, Stopwatch>();
var clusterFilter = TestClient.Configuration.ClusterFilter;
var testFilter = TestClient.Configuration.TestFilter;
foreach (var group in grouped)
{
var type = group.Key?.GetType();
var clusterName = type?.Name.Replace("Cluster", "") ?? "UNKNOWN";
if (!string.IsNullOrWhiteSpace(clusterFilter) && clusterName.IndexOf(clusterFilter, StringComparison.OrdinalIgnoreCase) < 0)
continue;
var dop = group.Key != null && group.Key.MaxConcurrency > 0
? group.Key.MaxConcurrency
: defaultMaxConcurrency;
clusterTotals.Add(clusterName, Stopwatch.StartNew());
//We group over each cluster group and execute test classes pertaining to that cluster
//in parallel
using (group.Key ?? System.Reactive.Disposables.Disposable.Empty)
{
group.Key?.Start();
await group.ForEachAsync(dop, async g =>
{
var test = g.Collection.DisplayName.Replace("Test collection for", "");
if (!string.IsNullOrWhiteSpace(testFilter) && test.IndexOf(testFilter, StringComparison.OrdinalIgnoreCase) < 0)
return;
//display tests we execute when we filter so we get confirmation on the command line we run the tests we expect
if (!string.IsNullOrWhiteSpace(testFilter))
Console.WriteLine(" -> " + test);
try
{
var summary = await RunTestCollectionAsync(messageBus, g.Collection, g.TestCases, cancellationTokenSource);
summaries.Add(summary);
}
catch (TaskCanceledException)
{
}
});
}
clusterTotals[clusterName].Stop();
}
Console.WriteLine("--------");
Console.WriteLine("Individual cluster running times");
foreach (var kv in clusterTotals)
Console.WriteLine($"- {kv.Key}: {kv.Value.Elapsed.ToString()}");
Console.WriteLine("--------");
return new RunSummary()
{
Total = summaries.Sum(s => s.Total),
Failed = summaries.Sum(s => s.Failed),
Skipped = summaries.Sum(s => s.Skipped)
};
}
示例6: Bound_Paging_Parallel
public void Bound_Paging_Parallel()
{
var pageSize = 25;
var totalRowLength = 300;
var table = "table" + Guid.NewGuid().ToString("N").ToLower();
Session.Execute(String.Format(TestUtils.CreateTableAllTypes, table));
for (var i = 0; i < totalRowLength; i++)
{
Session.Execute(String.Format("INSERT INTO {0} (id, text_sample) VALUES ({1}, '{2}')", table, Guid.NewGuid(), "value" + i));
}
var ps = Session.Prepare(String.Format("SELECT * FROM {0} LIMIT 10000", table));
var rs = Session.Execute(ps.Bind().SetPageSize(pageSize));
Assert.AreEqual(pageSize, rs.GetAvailableWithoutFetching());
var counterList = new ConcurrentBag<int>();
Action iterate = () =>
{
var counter = rs.Count();
counterList.Add(counter);
};
//Iterate in parallel the RowSet
Parallel.Invoke(iterate, iterate, iterate, iterate);
//Check that the sum of all rows in different threads is the same as total rows
Assert.AreEqual(totalRowLength, counterList.Sum());
}
示例7: QueryPagingParallel
public void QueryPagingParallel()
{
var pageSize = 25;
var totalRowLength = 300;
var table = CreateSimpleTableAndInsert(totalRowLength);
var query = new SimpleStatement(String.Format("SELECT * FROM {0} LIMIT 10000", table))
.SetPageSize(pageSize);
var rs = Session.Execute(query);
Assert.AreEqual(pageSize, rs.GetAvailableWithoutFetching());
var counterList = new ConcurrentBag<int>();
Action iterate = () =>
{
var counter = rs.Count();
counterList.Add(counter);
};
//Iterate in parallel the RowSet
Parallel.Invoke(iterate, iterate, iterate, iterate);
//Check that the sum of all rows in different threads is the same as total rows
Assert.AreEqual(totalRowLength, counterList.Sum());
}
示例8: Generate
//.........这里部分代码省略.........
lp) == lp)
{
TraceSources.TemplateSource.TraceInformation(
"Progress: {0:P1} ({1:N0}/{2:N0})",
c / (double)totalCount,
c,
totalCount);
}
}
});
// stop timing
timer.Stop();
Stopwatch statsTimer = new Stopwatch();
// prepare stats
Dictionary<Type, WorkUnitResult[]> resultGroups =
results.GroupBy(ps => ps.WorkUnit.GetType()).ToDictionary(g => g.Key, g => g.ToArray());
var stylesheetStats =
resultGroups[typeof(StylesheetApplication)]
.GroupBy(r => ((StylesheetApplication)r.WorkUnit).StylesheetName);
foreach (var statGroup in stylesheetStats)
{
long min = statGroup.Min(ps => ps.Duration);
long max = statGroup.Max(ps => ps.Duration);
TraceSources.TemplateSource.TraceInformation("Applied stylesheet '{0}' {1:N0} times in {2:N0} ms (min: {3:N0}, mean {4:N0}, max {5:N0}, avg: {6:N0})",
statGroup.Key,
statGroup.Count(),
statGroup.Sum(ps => ps.Duration) / 1000.0,
min / 1000.0,
statGroup.Skip(statGroup.Count() / 2).Take(1).Single().Duration / 1000.0,
max / 1000.0,
statGroup.Average(ps => ps.Duration) / 1000.0);
// TODO this is quick and dirty, should be cleaned up
long[] buckets = new long[20];
int rows = 6;
/*
┌────────────────────┐ ◄ 230
│█ █│
│█ █│
│█ █│
│█ █│
│█ █│
│█__________________█│
└────────────────────┘ ◄ 0
▲ 12ms ▲ 12ms
*/
// this is a little hacky, but it will do for now
WorkUnitResult[] sortedResults = statGroup.OrderBy(r => r.Duration).ToArray();
double bucketSize = (max - min) / (double)buckets.Length;
int bucketNum = 0;
long bucketMax = 0;
foreach (WorkUnitResult result in sortedResults)
{
while ((result.Duration - min) > (bucketNum + 1) * bucketSize)
bucketNum++;
buckets[bucketNum] += 1;
bucketMax = Math.Max(buckets[bucketNum], bucketMax);
示例9: Generate
//.........这里部分代码省略.........
ConcurrentBag<WorkUnitResult> results = new ConcurrentBag<WorkUnitResult>();
// create context
ITemplatingContext context = new TemplatingContext(this._cache,
this._basePath,
templateData,
this._resolvers,
this._fileProvider);
// fill indices
using (TraceSources.TemplateSource.TraceActivity("Indexing input document"))
{
var customXsltContext = CreateCustomXsltContext(templateData.IgnoredVersionComponent);
foreach (var index in tmpl.Indices)
{
TraceSources.TemplateSource.TraceVerbose("Adding index {0} (match: '{1}', key: '{1}')",
index.Name,
index.MatchExpr,
index.KeyExpr);
context.DocumentIndex.AddKey(index.Name, index.MatchExpr, index.KeyExpr, customXsltContext);
}
TraceSources.TemplateSource.TraceInformation("Indexing...");
context.DocumentIndex.BuildIndexes();
}
int totalCount = work.Count;
long lastProgress = Stopwatch.GetTimestamp();
int processed = 0;
// process all units of work
ParallelOptions parallelOptions = new ParallelOptions
{
//MaxDegreeOfParallelism = 1
};
Parallel.ForEach(work,
parallelOptions,
uow =>
{
results.Add(uow.Execute(context));
int c = Interlocked.Increment(ref processed);
long lp = Interlocked.Read(ref lastProgress);
if ((Stopwatch.GetTimestamp() - lp) / (double)Stopwatch.Frequency > 5.0)
{
if (Interlocked.CompareExchange(ref lastProgress,
Stopwatch.GetTimestamp(),
lp) == lp)
{
TraceSources.TemplateSource.TraceInformation(
"Progress: {0:P1} ({1:N0}/{2:N0})",
c / (double)totalCount,
c,
totalCount);
}
}
});
// stop timing
timer.Stop();
// prepare stats
Dictionary<Type, WorkUnitResult[]> resultGroups =
results.GroupBy(ps => ps.WorkUnit.GetType()).ToDictionary(g => g.Key, g => g.ToArray());
var stylesheetStats =
resultGroups[typeof(StylesheetApplication)]
.GroupBy(r => ((StylesheetApplication)r.WorkUnit).StylesheetName);
foreach (var statGroup in stylesheetStats)
{
TraceSources.TemplateSource.TraceInformation("Applied stylesheet '{0}' {1:N0} times in {2:N0} ms (min: {3:N0}, mean {4:N0}, max {5:N0}, avg: {6:N0})",
statGroup.Key,
statGroup.Count(),
statGroup.Sum(ps => ps.Duration) / 1000.0,
statGroup.Min(ps => ps.Duration) / 1000.0,
statGroup.Skip(statGroup.Count() / 2).Take(1).Single().Duration / 1000.0,
statGroup.Max(ps => ps.Duration) / 1000.0,
statGroup.Average(ps => ps.Duration) / 1000.0);
}
var resourceStats = resultGroups[typeof(ResourceDeployment)];
foreach (var statGroup in resourceStats)
{
TraceSources.TemplateSource.TraceInformation("Deployed resource '{0}' in {1:N0} ms",
((ResourceDeployment)statGroup.WorkUnit).ResourcePath,
statGroup.Duration);
}
TraceSources.TemplateSource.TraceInformation("Documentation generated in {0:N1} seconds (processing time: {1:N1} seconds)",
timer.Elapsed.TotalSeconds,
results.Sum(ps => ps.Duration) / 1000000.0);
return new TemplateOutput(results.ToArray());
}
示例10: Generate
/// <summary>
/// Applies the loaded templates to <paramref name="templateData"/>.
/// </summary>
/// <param name="templateData">
/// Instance of <see cref="TemplateData"/> containing the various input data needed.
/// </param>
public virtual TemplateOutput Generate(TemplateData templateData)
{
Stopwatch timer = Stopwatch.StartNew();
ParsedTemplate tmpl = this.PrepareTemplate(templateData);
// collect all work that has to be done
List<UnitOfWork> work = new List<UnitOfWork>();
// resource work units
work.AddRange(this.DiscoverWork(templateData, tmpl.Resources));
// stylesheet work units
{
List<StylesheetApplication> stylesheetApplications = new List<StylesheetApplication>();
foreach (Stylesheet stylesheet in tmpl.Stylesheets)
{
stylesheetApplications.AddRange(this.DiscoverWork(templateData, stylesheet));
}
var duplicates =
stylesheetApplications.GroupBy(sa => sa.SaveAs, StringComparer.OrdinalIgnoreCase)
.Where(g => g.Count() > 1);
foreach (var group in duplicates)
{
TraceSources.TemplateSource.TraceCritical("Duplicate work unit target ({0}) generated from: {1}",
group.Key,
string.Join(", ",
group.Select(
sa => '\'' + sa.StylesheetName + '\'')));
// TODO replace this with something more specific
// throw new Exception("Critical error, continuing is not safe.");
}
work.AddRange(stylesheetApplications);
}
TraceSources.TemplateSource.TraceInformation("Generating {0:N0} documents from {1:N0} stylesheets.",
work.Count, tmpl.Stylesheets.Length);
ConcurrentBag<WorkUnitResult> results = new ConcurrentBag<WorkUnitResult>();
// create context
ITemplatingContext context = new TemplatingContext(this._basePath,
templateData,
this._resolvers,
this._fileProvider);
// process all units of work
Parallel.ForEach(work, uow => results.Add(uow.Execute(context)));
// stop timing
timer.Stop();
// prepare stats
Dictionary<Type, WorkUnitResult[]> resultGroups =
results.GroupBy(ps => ps.WorkUnit.GetType()).ToDictionary(g => g.Key, g => g.ToArray());
var stylesheetStats =
resultGroups[typeof(StylesheetApplication)]
.GroupBy(r => ((StylesheetApplication)r.WorkUnit).StylesheetName);
foreach (var statGroup in stylesheetStats)
{
TraceSources.TemplateSource.TraceInformation("Applied stylesheet '{0}' {1:N0} times in {2:N0} ms (min: {3:N0}, mean {4:N0}, max {5:N0}, avg: {6:N0})",
statGroup.Key,
statGroup.Count(),
statGroup.Sum(ps => ps.Duration) / 1000.0,
statGroup.Min(ps => ps.Duration) / 1000.0,
statGroup.Skip(statGroup.Count() / 2).Take(1).Single().Duration / 1000.0,
statGroup.Max(ps => ps.Duration) / 1000.0,
statGroup.Average(ps => ps.Duration) / 1000.0);
}
var resourceStats = resultGroups[typeof(ResourceDeployment)];
foreach (var statGroup in resourceStats)
{
TraceSources.TemplateSource.TraceInformation("Deployed resource '{0}' in {1:N0} ms",
((ResourceDeployment)statGroup.WorkUnit).ResourcePath,
statGroup.Duration);
}
TraceSources.TemplateSource.TraceInformation("Documentation generated in {0:N1} seconds (processing time: {1:N1} seconds)",
timer.Elapsed.TotalSeconds,
results.Sum(ps => ps.Duration) / 1000000.0);
return new TemplateOutput(results.ToArray());
//.........这里部分代码省略.........
示例11: ButtonGenerateClick
// button events
private void ButtonGenerateClick(object sender, EventArgs e)
{
// start logging
Program.Log.Write("\nGenerate Data: " + DateTime.Now.ToString("u"));
Program.Log.Write("Skip Missing Animations? " + this.checkBoxSkipMissingAnimations.Checked);
Program.Log.Write("Delete Old Data? " + this.checkBoxClearOldJson.Checked);
Program.Log.Write("Clear Favorites? " + this.checkBoxReseedFavorites.Checked);
// initialize variables
int validPoserCount;
int emptyPoserCount;
int poserPacksCount;
var removedPoserCount = 0;
var jsonOutputDirectory = this.skyrimDirectory + Resources.PoserHotkeysDataPath;
// ensure output directory exists
if (!Directory.Exists(jsonOutputDirectory))
{
Directory.CreateDirectory(jsonOutputDirectory);
}
// clear favorites...
if (this.checkBoxReseedFavorites.Checked)
{
var favoritesJson = Directory.GetFiles(jsonOutputDirectory, Resources.FavoritesJsonName);
if (favoritesJson.Length <= 0)
{
Program.Log.Write(Resources.FavoritesJsonName + " not found.");
}
else
{
removedPoserCount++;
Program.Log.Write("Deleting " + Resources.FavoritesJsonName);
File.Delete(favoritesJson.First());
}
}
// clear old data...
if (this.checkBoxClearOldJson.Checked)
{
foreach (var jsonFile in Directory.GetFiles(jsonOutputDirectory, "*.json")
.Where(jsonFile => !jsonFile.Contains(Resources.FavoritesJsonName)))
{
removedPoserCount++;
Program.Log.Write("Deleting " + jsonFile);
File.Delete(jsonFile);
}
}
// generate data...
using (var pleaseWaitMessage = new FormPleaseWaitMessage())
{
pleaseWaitMessage.Show(this);
pleaseWaitMessage.Update();
try
{
// get checked poser list
var checkedPosers = this.checkedListBoxPosers.CheckedItems.OfType<Poser>();
var checkedPosersList = checkedPosers as IList<Poser> ?? checkedPosers.ToList();
// extract all packs from all checked posers
var results = new ConcurrentBag<PopulatePacksResults>();
Parallel.ForEach(
checkedPosersList,
poser =>
{
results.Add(poser.PopulatePacks(this.checkBoxSkipMissingAnimations.Checked));
});
// log results
foreach (var result in results)
{
Program.Log.Write("\nPoser: " + result.PoserName);
Program.Log.Write("\tLoaded Packs: " + result.PacksAdded.Count);
Program.Log.Write("\tLoaded Pack Names: " + String.Join(", ", result.PacksAdded));
Program.Log.Write("\tLoaded Animations: " + result.AnimationsAdded.Count);
Program.Log.Write("\tMissing Animations: " + result.AnimationsMissing.Count);
Program.Log.Write("\tMissing Animations Names: " + String.Join(", ", result.AnimationsMissing));
}
// get summary counts
validPoserCount = results.Count(x => x.PacksAdded.Count > 0);
emptyPoserCount = results.Count(x => x.PacksAdded.Count <= 0);
poserPacksCount = results.Sum(x => x.PacksAdded.Count);
// write to disc
SeedFavoritesJson(jsonOutputDirectory);
WriteJson(checkedPosersList, jsonOutputDirectory);
Program.Log.Write("\nGeneration complete! " + DateTime.Now.ToString("u"));
}
catch (Exception exception)
{
Program.Log.Write("Fatal Error: " + exception.Message);
throw;
}
pleaseWaitMessage.Close();
}
// show summary
//.........这里部分代码省略.........