本文整理汇总了C#中System.Collections.Set.Remove方法的典型用法代码示例。如果您正苦于以下问题:C# Set.Remove方法的具体用法?C# Set.Remove怎么用?C# Set.Remove使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类System.Collections.Set
的用法示例。
在下文中一共展示了Set.Remove方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CheckModifiedFiles
static void CheckModifiedFiles ()
{
// Check databases following a bottom-up strategy in the dependency
// tree. This will help resolving parsed classes.
Set<ProjectDom> list = new Set<ProjectDom> ();
lock (databases) {
// There may be several uris for the same db
foreach (ProjectDom ob in databases.Values)
list.Add (ob);
}
Set<ProjectDom> done = new Set<ProjectDom> ();
while (list.Count > 0)
{
ProjectDom readydb = null;
ProjectDom bestdb = null;
int bestRefCount = int.MaxValue;
// Look for a db with all references resolved
foreach (ProjectDom db in list)
{
bool allDone = true;
foreach (ProjectDom refdb in db.References) {
if (!done.Contains (refdb)) {
allDone = false;
break;
}
}
if (allDone) {
readydb = db;
break;
}
else if (db.References.Count < bestRefCount) {
bestdb = db;
bestRefCount = db.References.Count;
break;
}
}
// It may not find any db without resolved references if there
// are circular dependencies. In this case, take the one with
// less references
if (readydb == null)
readydb = bestdb;
readydb.CheckModifiedFiles ();
list.Remove (readydb);
done.Add (readydb);
}
}
示例2: ValidEnvironments
/// <summary>
/// Gets a set of ids of environments that are valid.
/// </summary>
/// <param name="cache"></param>
/// <returns>Set of ids of environments that are valid.</returns>
public static Set<int> ValidEnvironments(FdoCache cache)
{
Set<int> set = new Set<int>(cache.LangProject.PhonologicalDataOA.EnvironmentsOS.HvoArray);
// Remove any that have problem annotations.
string sql = "SELECT env.Id "
+ "FROM CmBaseAnnotation_ ann "
+ "JOIN PhEnvironment env ON ann.BeginObject = env.Id";
foreach (int id in DbOps.ReadIntsFromCommand(cache, sql, null))
set.Remove(id);
return set;
}
示例3: VerifyCacheRules
/// <summary>
/// Confirm the set of cache rules in the given execution context includes a set of expected ones; assertion failure if not.
/// </summary>
/// <param name="ctx"></param>
/// <param name="rules"></param>
void VerifyCacheRules(ExecutionContext ctx, IList rules)
{
Set set = new Set();
foreach (CacheRule rule in ctx.CacheRules)
set.AddRange(rule.AllLeafRules);
foreach (CacheRule r in rules)
{
Assert.IsTrue(set.Contains(r), "Unable to find expected CacheRule (" + r.Description + ")");
set.Remove(r);
}
}
示例4: ExtractFeatureVectors
//.........这里部分代码省略.........
string id;
if (featureId.TryGetValue(threeHourInterval, out id)) // if the current model uses the current 3-hour interval as a feature
{
bool covered = false;
for (int interval = startingThreeHourInterval; !covered && interval <= endingThreeHourInterval; ++interval)
if (interval % 8 == k)
covered = true;
threeHourIntervalFeatureValue.Add(IdNumericFeature[id], covered ? 1 : 0);
}
}
#endregion
#region extract feature vectors
foreach (FeatureVectorList featureVectors in base.ExtractFeatureVectors(prediction, training, sliceStart, sliceEnd))
{
if (!featureVectors.Complete)
throw new Exception("Incomplete feature vectors received from base class extractor");
Console.Out.WriteLine("Extracting " + featureId.Count + " time slice features for " + featureVectors.Count + " points.");
foreach (FeatureVector featureVector in featureVectors)
{
Point point = featureVector.DerivedFrom as Point;
if (point.Time == DateTime.MinValue)
point.Time = sliceMid;
else if ((long)(point.Time.Ticks / _timeSliceTicks) != slice)
throw new Exception("Point should not be in slice: " + point);
foreach (LAIR.MachineLearning.NumericFeature threeHourIntervalFeature in threeHourIntervalFeatureValue.Keys)
featureVector.Add(threeHourIntervalFeature, threeHourIntervalFeatureValue[threeHourIntervalFeature]);
double percentThroughPeriod = (slice % _periodTimeSlices) / (double)(_periodTimeSlices - 1);
double radians = 2 * Math.PI * percentThroughPeriod;
foreach (TimeSliceFeature feature in featureId.Keys)
if (feature == TimeSliceFeature.CosinePeriodPosition)
featureVector.Add(IdNumericFeature[featureId[feature]], Math.Cos(radians));
else if (feature == TimeSliceFeature.SinePeriodPosition)
featureVector.Add(IdNumericFeature[featureId[feature]], Math.Sin(radians));
}
if (externalFeatureExtractor == null)
lock (completeFeatureVectorLists)
{
completeFeatureVectorLists.Add(featureVectors);
emitCompleteFeatureVectorLists.Set();
}
else
foreach (FeatureVectorList externalFeatureVectors in externalFeatureExtractor.ExtractFeatures(prediction, featureVectors, training, sliceStart, sliceEnd, false))
if (externalFeatureVectors.Complete)
lock (completeFeatureVectorLists)
{
completeFeatureVectorLists.Add(externalFeatureVectors);
emitCompleteFeatureVectorLists.Set();
}
else
lock (incompleteFeatureVectorLists)
incompleteFeatureVectorLists.Add(externalFeatureVectors);
}
#endregion
}
lock (threads)
threads.Remove(Thread.CurrentThread);
emitCompleteFeatureVectorLists.Set();
}));
lock (threads) { threads.Add(t); }
t.Start(i);
}
while (emitCompleteFeatureVectorLists.WaitOne())
{
lock (completeFeatureVectorLists)
{
foreach (FeatureVectorList completeFeatureVectors in completeFeatureVectorLists)
yield return completeFeatureVectors;
completeFeatureVectorLists.Clear();
}
lock (threads)
if (threads.Count == 0)
break;
}
// emit any remaining completed vectors, which might have arrived just before the last thread was removed (breaking out of the loop above)
foreach (FeatureVectorList completeFeatureVectors in completeFeatureVectorLists)
yield return completeFeatureVectors;
completeFeatureVectorLists.Clear();
Configuration.ProcessorCount = processorCount; // reset system-wide processor count since we're done with threads here
foreach (FeatureVectorList incompleteFeatureVectors in incompleteFeatureVectorLists)
foreach (FeatureVectorList externalFeatureVectors in externalFeatureExtractor.ExtractFeatures(prediction, incompleteFeatureVectors, training, start, end, true))
yield return externalFeatureVectors;
}
示例5: Analyze
//.........这里部分代码省略.........
{
outerTopicAnalysis = new TopicAnalysis();
topicToTopicAnalysis[outerTopic] = outerTopicAnalysis;
// Response.Write("Creating info for " + outerTopic.Name + "<br>");
}
else
{
// Response.Write("Found existing info for " + outerTopic.Name + "<br>");
// Response.Write("[island = " + outerTopicAnalysis.Island + "<br>");
}
if (outerTopicAnalysis.Island != null)
islands.Add(outerTopicAnalysis.Island);
// - foreach outer topic
// islands = new set
// foreach linked topic
// increment refcount for linked topic
// if (linkedtopic is on an island)
// islands add that island
Set inNamespaceLinks = new Set();
foreach (QualifiedTopicRevision linkedTopic in linkedTopics)
{
// Only analyze in this namespace
if (linkedTopic.Namespace != _namespaceManager.Namespace)
{
// Response.Write("Skiping linked topic (" + linkedTopic.Name + ") because namespace doesn't match<br>");
continue;
}
// Only do each topic once; have we seen this one?
if (inNamespaceLinks.Contains(linkedTopic))
{
// Response.Write("Skiping linked topic (" + linkedTopic.Name + ") because seen before<br>");
continue;
}
// Skip self-references
if (linkedTopic.Equals(outerTopic))
{
continue;
}
inNamespaceLinks.Add(linkedTopic);
TopicAnalysis linkedTopicAnalysis = (TopicAnalysis)(topicToTopicAnalysis[linkedTopic]);
if (linkedTopicAnalysis == null)
{
linkedTopicAnalysis = new TopicAnalysis();
topicToTopicAnalysis[linkedTopic] = linkedTopicAnalysis;
// Response.Write("Creating info for " + linkedTopic.Name + "<br>");
}
else
{
// Response.Write("Found existing info for " + linkedTopic.Name + "<br>");
}
linkedTopicAnalysis.RefCount++;
if (linkedTopicAnalysis.Island != null)
islands.Add(linkedTopicAnalysis.Island);
}
// if (islands is empty)
// create new island
// add outer topic and all linked topics
// else if (islands size == 1)
// add all links and the outer topic to that islands
// else
// // need to merge islands
// newset = merged set of all islands
// TopicAnalysiss and replace and of the old islands with the new island
Set newIsland;
if (islands.Count == 1)
newIsland = (Set)(islands.First); // if there's only one, we can just use that one
else
{
newIsland = new Set();
ocean.Add(newIsland);
}
// Add the island and the linkedTopics
newIsland.Add(outerTopic);
outerTopicAnalysis.Island = newIsland;
foreach (QualifiedTopicRevision linkedTopic in inNamespaceLinks)
{
newIsland.Add(linkedTopic);
((TopicAnalysis)(topicToTopicAnalysis[linkedTopic])).Island = newIsland;
// Response.Write("Placing " + linkedTopic.Name + "<br>");
}
// Now merge if there was originally more than one
if (islands.Count > 1)
{
foreach (Set eachIsland in islands)
{
foreach (object o in eachIsland)
newIsland.Add(o);
ocean.Remove(eachIsland);
// Now update all the pointers from the TopicAnalysiss
foreach (QualifiedTopicRevision eachTopic in eachIsland)
((TopicAnalysis)(topicToTopicAnalysis[eachTopic])).Island = newIsland;
}
}
}
}
示例6: DumpGroups
private static void DumpGroups(IGraph graph, Set<INode> nodes, DumpContext dc)
{
// Compute the nesting hierarchy (groups)
Dictionary<INode, DumpGroupNode> groupNodes = new Dictionary<INode, DumpGroupNode>();
Dictionary<INode, INode> containedIn = new Dictionary<INode, INode>();
Set<INode> groupedNodes = new Set<INode>();
// (by iterating the group node types in order of dump declaration and removing the iterated nodes from the available nodes,
// the conflict resolution priorities of debug enable are taken care of)
foreach(GroupNodeType groupNodeType in dc.DumpInfo.GroupNodeTypes)
{
foreach(INode node in graph.GetCompatibleNodes(groupNodeType.NodeType))
{
if(nodes.Contains(node))
{
if(!groupNodes.ContainsKey(node)) groupNodes.Add(node, new DumpGroupNode()); // todo: is the if needed?
nodes.Remove(node);
}
if(dc.DumpInfo.IsExcludedNodeType(node.Type)) continue;
foreach(IEdge edge in node.Incoming)
{
GroupMode grpMode = groupNodeType.GetEdgeGroupMode(edge.Type, edge.Source.Type);
if((grpMode & GroupMode.GroupIncomingNodes) == 0) continue;
if(!dc.Nodes.Contains(edge.Source)) continue;
groupNodes[node].groupedNodes.Add(edge.Source);
if(!containedIn.ContainsKey(edge.Source)) containedIn.Add(edge.Source, node); // crashes without if in case of multiple containment due to dump misspecification by user
groupedNodes.Add(edge.Source);
if((grpMode & GroupMode.Hidden) != 0) dc.ExcludedEdges.Add(edge);
}
foreach(IEdge edge in node.Outgoing)
{
GroupMode grpMode = groupNodeType.GetEdgeGroupMode(edge.Type, edge.Target.Type);
if((grpMode & GroupMode.GroupOutgoingNodes) == 0) continue;
if(!dc.Nodes.Contains(edge.Target)) continue;
groupNodes[node].groupedNodes.Add(edge.Target);
if(!containedIn.ContainsKey(edge.Target)) containedIn.Add(edge.Target, node); // crashes without if in case of multiple containment due to dump misspecification by user
groupedNodes.Add(edge.Target);
if((grpMode & GroupMode.Hidden) != 0) dc.ExcludedEdges.Add(edge);
}
}
}
// Dump the groups (begin at the roots of the group trees)
foreach(KeyValuePair<INode, DumpGroupNode> groupNode in groupNodes)
{
if(!containedIn.ContainsKey(groupNode.Key))
{
DumpGroupTree(groupNode.Key, groupNodes, dc);
DumpEdgesFromNode(groupNode.Key, dc);
}
}
// Dump the rest, which has not been grouped
nodes.Remove(groupedNodes);
foreach(INode node in nodes)
{
DumpNodeAndEdges(node, dc);
}
}