本文整理汇总了Python中networkx.find_cliques函数的典型用法代码示例。如果您正苦于以下问题:Python find_cliques函数的具体用法?Python find_cliques怎么用?Python find_cliques使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了find_cliques函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Cliques
def Cliques(G) :
"""
Return a list of cliques the player belongs to
"""
nx.find_cliques(G)
cliques = []
for i in G.nodes() :
cliques.append(nx.cliques_containing_node(G, i))
return cliques
示例2: __init__
def __init__(self, points, epsilon, labels=None, distfcn=distance.euclidean):
self.pts = points
self.labels = range(len(self.pts)) if labels==None or len(labels)!=len(self.pts) else labels
self.epsilon = epsilon
self.distfcn = distfcn
self.network = self.construct_network(self.pts, self.labels, self.epsilon, self.distfcn)
self.import_simplices(map(tuple, list(nx.find_cliques(self.network))))
示例3: all_cliques
def all_cliques(graph, count):
""" Now, given a large graph, sample the cliques and test for homogeneity
Parameters
----------
graph : a networkx graph
Method
------
* creates a mapping from nodes to communities
* uses networkx to generate several cliques and maps the clique members to
communities, if the clique has at least 4 members
"""
pf = open('cliques_within_the_graph.pkl', 'wb')
part = CD.modularity_run(graph)
cgen = nx.find_cliques(graph)
found = []
for i in xrange(count):
try:
clump = cgen.next()
if len(clump) > 2:
found.append( ([part[n] for n in clump], clump) )
except:
pickle.dump( (graph, part, found) , pf)
pf.close()
return found
pickle.dump( (graph, part, found) , pf)
pf.close()
return found
示例4: getCliques
def getCliques(g):
netscience_graph = g
t0 = datetime.datetime.now()
cliques = list(nx.find_cliques(netscience_graph))
print datetime.datetime.now()-t0,' elapsed time.'
print (len(cliques))
print cliques[0]
示例5: createR
def createR(self):
clases = set()
cliques = 0
for q in nx.find_cliques(self.G):
if (len(q) <3) or (len(q)>6) : continue
cliques += 1
tmp_list_sign = self.getSetSignAASeq(q)['list_signature']
self.how_many_signatures[tuple(tmp_list_sign)] += 1
L = ','.join(map(lambda(x):str(x),sorted(tmp_list_sign)))
self.osisDictString[L].add(','.join(q))
self.osisDict[L].add(tuple(q))
map(lambda(i):self.osisDictElements[L].add(i),q)
rcname = hash(tuple(q))
self.metainfo_node[rcname] = (set(q),tmp_list_sign)
self.HG.add_node(rcname)
for hn in self.HG.nodes():
if self.metainfo_node[hn][0] & self.metainfo_node[rcname][0]:
self.HG.add_edge(hn,rcname)
classindex = 0
for K in xrange(3,7):
for signa in rcd.setSignatures[K]:
self.RCCvector[classindex] = self.how_many_signatures[tuple(signa)]
for n in self.HG.nodes():
if self.metainfo_node[n][1] != signa: continue
self.RCCvector2[classindex] += self.HG.degree(n)
classindex += 1
示例6: get_percolated_cliques
def get_percolated_cliques(G, k):
percolation_graph = nx.Graph()
cliques = [frozenset(c) for c in nx.find_cliques(G) if len(c) >= k]
percolation_graph.add_nodes_from(cliques)
# First index which nodes are in which cliques
membership_dict = defaultdict(list)
for clique in cliques:
for node in clique:
membership_dict[node].append(clique)
def get_adjacent_cliques(clique, membership_dict):
adjacent_cliques = set()
for n in clique:
for adj_clique in membership_dict[n]:
if clique != adj_clique:
adjacent_cliques.add(adj_clique)
return adjacent_cliques
# For each clique, see which adjacent cliques percolate
for clique in cliques:
for adj_clique in get_adjacent_cliques(clique, membership_dict):
if len(clique.intersection(adj_clique)) >= (k - 1):
percolation_graph.add_edge(clique, adj_clique)
print 'percolation graph nodes:', percolation_graph.nodes()
print 'percolation graph edges:', percolation_graph.edges()
# Connected components of clique graph with perc edges
# are the percolated cliques
for component in nx.connected_components(percolation_graph):
yield (frozenset.union(*component))
示例7: calculate_comembership
def calculate_comembership(self, backend=False):
logging.info("Calculating comembership.")
nodes = self.graph.nodes()
n = len(nodes)
if not backend and n > 500:
raise network_big.NetworkTooBigException(n)
cliques = list(find_cliques(self.graph))
w = {}
for clique in cliques:
for node1 in clique:
for node2 in clique:
try:
w[node1,node2] += 1
except KeyError:
w[node1,node2] = 1
nodes = w.keys()
comembership = float(0)
for node1, node2 in nodes:
if node1 != node2: comembership += w[node1,node2]
num_nodes = len(self.graph.nodes())
comembership /= num_nodes*(num_nodes-1)
self.measures['comembership'] = comembership
self.nodesmeasures['comembership'] = None
self.edgesmeasures['comembership'] = w
示例8: plotWeightedCommunities
def plotWeightedCommunities(G, W_lim, k_clique, n_nodes):
for i in range(0,n_nodes):
for j in range(i,n_nodes):
if(i!=j):
if(G[i][j]['weight'] < W_lim):
G.remove_edge(i,j)
cls = nx.find_cliques(G)
communities = list(nx.k_clique_communities(G,k_clique ,cliques = cls))
print(len(communities))
pos=nx.graphviz_layout(G) # positions for all nodes
plt.figure(figsize=(12,12))
#colors = ["green","yellow","red","blue","pink","orange","gray","brown","black","white","purple","green","yellow","red","blue","pink","orange","gray","brown","black","white","purple"]
for i in range(len(communities)):
nx.draw_networkx_nodes(G,pos,nodelist=list(communities[i]),node_color=colors[i])
nx.draw_networkx_edges(G,pos,width=0.5)
# labels
nx.draw_networkx_labels(G,pos,font_size=10,font_family='sans-serif')
plt.axis('off')
plt.savefig("comm_w_"+str(W_lim)+"k"+str(k_clique)+".png") # save as png
plt.close()
示例9: collapsible_patterns
def collapsible_patterns(alms, G, context, ref='pcogids', verbose=False,
use_taxa=["Old_Burmese", "Burmese", "Written_Burmese",
"Rangoon", "Achang_Longchuan", "Xiandao", "Lashi", "Atsi", "Bola", "Maru"]):
if [x for x in use_taxa if x not in alms.taxa]:
raise ValueError("Your list of taxa contains taxa not in the wordlist.")
patterns = defaultdict(list)
for node, data in G.nodes(data=True):
concept = alms[alms.msa[ref][int(node)]['ID'][0], 'concept']
words = []
msa = alms.msa[ref][int(node)]
for i, t in enumerate(use_taxa):
if t in msa['taxa']:
words += [''.join(msa['seqs'][msa['taxa'].index(t)]).replace('-','')]
else:
words += ['Ø']
patterns[data['clique']] += [(node, concept, words)]
collapsible = defaultdict(list)
for pattern, vals in patterns.items():
g = nx.Graph()
for n, c, words in vals:
collapsible[pattern, tuple(words)] += [(n, c)]
g.add_node(n, c=c, w=words)
for (n1, c1, words1), (n2, c2, words2) in combinations(vals, r=2):
if compatible_columns(words1, words2, gap='Ø') >= 1:
g.add_edge(n1, n2)
for clique in nx.find_cliques(g):
if len(clique) > 1:
for n in clique:
print(pattern, '{0:4}'.format(n),
'{0:22}'.format(g.node[n]['c'][:21]),
' '.join(['{0:6}'.format(x) for x in
g.node[n]['w']]))
print('--')
示例10: get_ego_cliques
def get_ego_cliques(ego):
ego_cliques_dmp = join(DATA_DIR, 'cliques', 'cliques_%s.zip'%ego)
if not os.path.exists(ego_cliques_dmp):
print 'Processing cliques: nx.find_cliques, ego:', ego
G = load_ego_graph(ego)
# this can take some time...
# http://pymotw.com/2/zipfile/
with zipfile.ZipFile(ego_cliques_dmp, mode='w') as zf:
fileno = 1
ego_cliques = []
for idx, clqs in enumerate(nx.find_cliques(G)):
if idx%100000==0 and ego_cliques:
_write_cliques_file(zf, fileno, ego_cliques)
fileno += 1
ego_cliques = []
ego_cliques.append(clqs)
_write_cliques_file(zf, fileno, ego_cliques)
ego_cliques = None
if False: #ego==5881:
print 'In get_ego_cliques, skipping ego', ego
else:
print 'Loading cliques for ego:', ego
with zipfile.ZipFile(ego_cliques_dmp, mode='r') as zf:
for f in zf.namelist():
cliques_in_file = json.loads(zf.read(f))
for clique in cliques_in_file:
yield clique
示例11: find_foundations
def find_foundations(self, cache = True):
if cache and isinstance(self._foundations, list):
return self._foundations
foundations = list(nx.find_cliques(self))
foundations = self._reduce_cliques(foundations)
self._foundations = foundations
return self._foundations
示例12: find_disjoint_sets
def find_disjoint_sets(found_sets):
# uses python graph data structure in which each node is a set
# edges are created between nodes if the nodes are disjoint sets
# the maximum clique algorithm is used to calculate the largest collection
# of disjoint sets
# initialize graph
graph = nx.Graph()
# add all sets as nodes in the graph
for i in xrange(len(found_sets)):
graph.add_node(found_sets[i])
# iteraties though each node and adds edges
for node1 in graph.nodes():
for node2 in graph.nodes():
if node1 == node2:
continue
if node2 in graph.neighbors(node1):
continue
else:
if is_disjoint(node1, node2):
graph.add_edge(node1, node2)
# use find_cliques function generator to find the max cliques
max_clique = []
for clique in nx.find_cliques(graph):
if len(max_clique) < len(clique):
max_clique = clique
return max_clique
示例13: nx_cliques
def nx_cliques(ppis, min_len=3, min_weight=0):
G = nx.Graph()
G.add_weighted_edges_from([p[:3] for p in ppis])
qs = [set(c) for c in nx.find_cliques(G) if len(c) >= min_len]
if min_weight:
qs = [q for q in qs if avg_weight(G,q) > min_weight]
return qs
示例14: find_best_clique
def find_best_clique(sim_mat, size):
G = nx.Graph()
for x in xrange(len(sim_mat)):
G.add_node(x)
edges = get_sorted_edges(sim_mat)
x = 0
thresh = 0.05
while thresh <= 1:
while x < len(edges) and edges[x][2] <= thresh:
G.add_edge(edges[x][0], edges[x][1])
x += 1
max_cliques = nx.find_cliques(G)
# bucket sort
by_size = collections.defaultdict(list)
for clique in max_cliques:
by_size[len(clique)].append(clique)
biggest = max(by_size.keys())
if biggest >= size:
# do tie breaking
cliques = by_size[biggest]
best_clique = None
best_score = 1000000
for clique in cliques:
score = max_weight_clique(sim_mat, clique)
if score < best_score:
best_score = score
best_clique = clique
return best_clique
thresh += 0.05
示例15: testConnectNodeList
def testConnectNodeList(self):
node_list = [0, 3, 6, 1, 4, 7, 2, 5, 8]
self.g.connect_node_list(node_list)
for clique in nx.find_cliques(self.g._graph):
expect = len(clique)
break
self.assertEqual(expect, self.n**2)