当前位置: 首页>>代码示例>>C#>>正文


C# ConcurrentDictionary.GroupBy方法代码示例

本文整理汇总了C#中ConcurrentDictionary.GroupBy方法的典型用法代码示例。如果您正苦于以下问题:C# ConcurrentDictionary.GroupBy方法的具体用法?C# ConcurrentDictionary.GroupBy怎么用?C# ConcurrentDictionary.GroupBy使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在ConcurrentDictionary的用法示例。


在下文中一共展示了ConcurrentDictionary.GroupBy方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: GetData


//.........这里部分代码省略.........
                                    }

                                    sw2.Stop();
                                    subresult.CreatedOn = DateTime.Now.Ticks;
                                    subresult.Metadata.Initiated = initiated;
                                    subresult.Metadata.Completed = DateTime.Now;
                                    subresult.Metadata.OperationTime = sw2.Elapsed;
                                    results[peerInfo] = CachedData[subqueryHash] = subresult;
                                }
                                catch (Exception ex)
                                {
                                    results[peerInfo] = new BermudaResult { Error = "[Failed Node] " + ex };
                                }
                            }
                        }, reducer, TaskCreationOptions.LongRunning);

                        tasks.Add(t);
                        t.Start();
                    }

                    Task.WaitAll(tasks.ToArray());

                    sw.Stop();

#if DEBUG
                    Trace.WriteLine("Join Time:" + sw.Elapsed);
#endif

                    if (results.Any(x => x.Value.Error != null)) throw new BermudaException("Some nodes failed:\r\n" + string.Join("\r\n", results.Select(x => x.Value.Error)));

                    if (results.All(x => x.Value.Data == null)) return new BermudaResult { Metadata = new BermudaNodeStatistic { Notes = "No Data" } };

                    //if all results are not the same time throw an error
                    if (results.GroupBy(x => x.Value.DataType).Count() > 1) throw new BermudaException("Subresults must all return the same type");

                    var dataTypeDescriptor = results.Select(x => x.Value.DataType).FirstOrDefault(x => x != null);

                    if (dataTypeDescriptor == null) return new BermudaResult { Error = "Could not determine the merge type, none of the nodes provided type info" };

                    //use the passed combine espression to make multiple datapoint sets into one

                    var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(dataTypeDescriptor);

                    //allItems = results.Values.SelectMany(x => x.DataObject)

                    var totalJson = "[" + string.Join(",", results.Values.Where(x => !string.IsNullOrWhiteSpace(x.Data)).Select(x => x.Data.Trim('[', ']')).Where(x => !string.IsNullOrWhiteSpace(x))) + "]";

                    var allItems = LinqRuntimeTypeBuilder.DeserializeJson(totalJson, dataTypeDescriptor, true);


                    //var aaa = new JavaScriptSerializer().Deserialize<Datapoint[]>(totalJson);
                    //var ggc = aaa.GroupBy(x => new { x.Id, x.Id2 }).Count();

                    //InvokeSelectManyViaReflectionTheKilla(results.Values.Select(x => x.DataObject), dataType);

                    var mergeFunc = GetMergeFunc(merge, mapreduce, dataType, dataType);
                    if (mergeFunc != null)
                    {
                        //var dataType = "kdsajkdsa";
                        var mergeInvokeMethod = mergeFunc.GetType().GetMethod("Invoke");
                        allItems = mergeInvokeMethod.Invoke(mergeFunc, new object[] { allItems }); // MergeDatapoints(results.Values.Where(x => x.Data != null).SelectMany(x => x.Data), mergeFunc);
                    }

                    var pagingFunc = GetPagingFunc(paging, dataType);
                    if (pagingFunc != null)
                    {
开发者ID:yonglehou,项目名称:Bermuda,代码行数:67,代码来源:BermudaMapReduce.cs

示例2: GetData

        public BermudaResult GetData(string domain, IEnumerable<string> blobs, string query, string mapreduce, string merge, DateTime minDate, DateTime maxDate, int remdepth, object[] parameters, string command)
        {
            var args = ParseCommand(command);

            if (remdepth > 0)
            {
                //map
                var blobInterfaces = blobs == null ? AzureInterface.Instance.ListBlobs(domain, minDate.Ticks, maxDate.Ticks) : AzureInterface.Instance.GetBlobInterfacesByNames(domain, blobs);

                var blobSetKey = GetQueryChecksum(domain, string.Join(",", blobInterfaces.Select(x => x.Name)), query, mapreduce, minDate, maxDate, parameters, null);

                //reduce 
                BermudaResult cachedDatapoints;
                if (CachedData.TryGetValue(blobSetKey, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
                    if (CacheTraceMessageLevel < 3) Trace.WriteLine("returned CACHED BLOBS DATAPOINTS results FOR ENTIRE BLOB SET [REMDEPTH:" + remdepth + "]");
                    return new BermudaResult { DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_1" } };
                }
                else
                {

                    var assignments = PartitionBlobs(domain, blobInterfaces, minDate, maxDate, false, true);

                    if (!assignments.Any()) throw new Exception("Specified dataset not loaded: " + domain);

                    ConcurrentDictionary<IPEndPoint, BermudaResult> results = new ConcurrentDictionary<IPEndPoint, BermudaResult>();
                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    List<Task> tasks = new List<Task>();
                    foreach (var ass in assignments)
                    {
                        Task t = new Task((assObj) =>
                        {
                            ZipMetadata assignment = assObj as ZipMetadata;
                            var initiated = DateTime.Now;
                            var blobSubsetKey = GetQueryChecksum(domain, string.Join(",", assignment.Blobs.Select(x => x.Name)), query, mapreduce, minDate, maxDate, parameters, assignment.PeerEndpoint.ToString());
                            Stopwatch sw3 = new Stopwatch();
                            sw3.Start();

                            //see if the cache contains a matching result and return it if it's not outdated
                            BermudaResult cachedDatapoints2;
                            if (CachedData.TryGetValue(blobSubsetKey, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                            {
                                if (CacheTraceMessageLevel < 2) Trace.WriteLine("returned CACHED BLOB DATAPOINT results FOR BLOB SUBSET [REMDEPTH:" + remdepth + "]");
                                results[assignment.PeerEndpoint] = new BermudaResult { DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_2" } };
                            }
                            else
                            {
                                try
                                {
                                    Stopwatch sw2 = new Stopwatch();
                                    sw2.Start();
                                    BermudaResult subresult = null;

                                    if (assignment.PeerEndpoint.Equals(Endpoint))
                                    {
                                        subresult = GetData(domain, assignment.Blobs.Select(x => x.Name), query, mapreduce, merge, minDate, maxDate, remdepth - 1, parameters, command);

                                    }
                                    else
                                    {
                                        using (var client = AzureInterface.Instance.GetServiceClient(assignment.PeerEndpoint))
                                        {
                                            subresult = client.GetData(domain, query, mapreduce, merge, minDate, maxDate, remdepth - 1, parameters, command);
                                        }
                                    }

                                    sw2.Stop();
                                    subresult.CreatedOn = DateTime.Now.Ticks;
                                    subresult.MetadataObject.Initiated = initiated;
                                    subresult.MetadataObject.Completed = DateTime.Now;
                                    subresult.MetadataObject.OperationTime = sw2.Elapsed;
                                    results[assignment.PeerEndpoint] = CachedData[blobSubsetKey] = subresult;
                                }
                                catch (Exception ex)
                                {
                                    results[assignment.PeerEndpoint] = new BermudaResult { Error = "[Failed Node] " + ex };
                                }
                            }
                        }, ass, TaskCreationOptions.LongRunning);

                        tasks.Add(t);
                        t.Start();
                    }

                    Task.WaitAll(tasks.ToArray());

                    sw.Stop();
                    Trace.WriteLine("Join Time:" + sw.Elapsed);

                    if (results.All(x => x.Value.Error != null)) throw new Exception("All nodes failed:\r\n" + string.Join("\r\n", results.Select(x => x.Value.Error)));

                    //if all results are not the same time throw an error
                    if (results.GroupBy(x => x.Value.DataType).Count() > 1) throw new Exception("Subresults must all return the same type");

                    var dataTypeDescriptor = results.Select(x => x.Value.DataType).FirstOrDefault(x => x != null);

                    if (dataTypeDescriptor == null) return new BermudaResult { Error = "Could not determine the merge type, none of the nodes provided type info" };

//.........这里部分代码省略.........
开发者ID:yonglehou,项目名称:Bermuda,代码行数:101,代码来源:AzureMapReducer.cs


注:本文中的ConcurrentDictionary.GroupBy方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。