本文整理汇总了Python中networkx.weakly_connected_components函数的典型用法代码示例。如果您正苦于以下问题:Python weakly_connected_components函数的具体用法?Python weakly_connected_components怎么用?Python weakly_connected_components使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了weakly_connected_components函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _calc_counts
def _calc_counts(G, min_size, gap, scaf_only=False):
sizes = list()
for comp in nx.weakly_connected_components(G):
# skip non scaffolds.
if scaf_only == True:
if len(comp) < 2:
continue
# add contig size.
size = 0
for n in comp:
size += G.node[n]['width']
# add gap size.
if gap != False:
for p,q in G.edges(comp):
size += G[p][q]['gap']
# skip this.
if min_size != False and size < min_size:
continue
# save the size.
sizes.append(len(comp))
return sizes
示例2: comps
def comps(self):
''' generator for connected components '''
# loop over components.
for comp in nx.weakly_connected_components(self):
# create subgraph.
subg = nx.DiGraph()
# build node list.
nlist = []
for n in comp:
nlist.append( (n, self.node[n]) )
# add nodes.
subg.add_nodes_from(nlist)
# build edge list.
elist = []
for e in self.edges(comp):
elist.append( (e[0], e[1], self[e[0]][e[1]]) )
# add edges.
subg.add_edges_from(elist)
# yield the subgraph.
yield subg
示例3: lesion_met_largest_weak_component
def lesion_met_largest_weak_component(G, orig_order=None):
"""
Get largest weak component size of a graph.
Parameters
----------
G : directed networkx graph
Graph to compute largest component for
orig_order : int
Define orig_order if you'd like the largest component proportion
Returns
-------
largest weak component size : int
Proportion of largest remaning component size if orig_order
is defined. Otherwise, return number of nodes in largest component.
"""
components = sorted(nx.weakly_connected_components(G), key=len,
reverse=True)
if len(components) > 0:
largest_component = len(components[0])
else:
largest_component = 0.
# Check if original component size is defined
if orig_order is not None:
return largest_component / float(orig_order)
else:
return largest_component
示例4: mark_vpn
def mark_vpn(graph, vpn_macs):
components = map(frozenset, nx.weakly_connected_components(graph))
components = filter(vpn_macs.intersection, components)
nodes = reduce(lambda a, b: a | b, components, set())
for node in nodes:
for k, v in graph[node].items():
v['vpn'] = True
示例5: report_stats
def report_stats(G, params):
print 'Nodes: %d. Edges: %d'%(G.number_of_nodes(), G.number_of_edges())
sccs = nx.strongly_connected_components(G)
wccs = nx.weakly_connected_components(G)
print 'Strongly ccs: %d, Weakly ccs: %d'%(len(sccs), len(wccs))
sizes_sccs, sizes_wccs = ([len(c) for c in sccs], [len(c) for c in wccs])
print 'Singletons. Strongly: %d, Weakly: %d'%(sum(np.array(sizes_sccs)==1), sum(np.array(sizes_wccs)==1))
print [len(c) for c in sccs[:10]]
示例6: singletons
def singletons(self):
"""A singleton is a weakly connected component that has only one node.
Returns:
A list with the singleton nodes.
"""
components = networkx.weakly_connected_components(self.nxgraph)
return [component[0] for component in components
if len(component) == 1]
示例7: test_weakly_connected_components
def test_weakly_connected_components(testgraph):
"""
Test strongly connected components
"""
comps0 = nx.weakly_connected_components(testgraph[0])
comps1 = sg.components.weak(testgraph[1])
assert_components_equal(comps0, comps1)
示例8: find_len_2_control_kernals
def find_len_2_control_kernals(deterministic_transition_graph, nodes_list, attractor_ID):
"""uses the deterministic_transition_graph and specified attractor to find
all pairs of control nodes if they exist
note: these aren't "strict" control kernels because they specify the states needed to be in the
main attractor. controlling them doesn't necessarily change what attractor you'll be in.
"""
subgraphs = [g for g in nx.weakly_connected_components(deterministic_transition_graph)]
# index_of_largest_subgraph = max(enumerate(all_subgraph_sets), key = lambda tup: len(tup[1]))[0]
all_subgraph_sets = []
all_but_attractor_subgraph = []
for sg in subgraphs:
if attractor_ID not in sg:
all_state_sets = []
for nID in sg:
all_state_sets.append(set(time_evol.decimal_to_binary(nodes_list,nID).items()))
all_subgraph_sets.append(all_state_sets)
all_but_attractor_subgraph.append(all_state_sets)
else:
all_state_sets = []
for nID in sg:
all_state_sets.append(set(time_evol.decimal_to_binary(nodes_list,nID).items()))
all_subgraph_sets.append(all_state_sets)
state_0_list = [0 for i in range(len(nodes_list))]
state_1_list = [1 for i in range(len(nodes_list))]
possible_states = zip(nodes_list,state_0_list) + zip(nodes_list,state_1_list) # list of (node,state)
possible_pairs = [combo for combo in combinations(possible_states, 2)]
possible_pairs_pared = copy.deepcopy(possible_pairs)
# remove pairs where both keys are the same (eg. gF and gF)
for pair in possible_pairs:
if pair[0][0] == pair[1][0] and (pair in possible_pairs_pared):
possible_pairs_pared.remove(pair)
# remove pairs when any of the networks in the non-attractor subgraph contain that pair
# (ie. that pair can not possibly be a control kernel because it is present in the wrong attractor)
for sg in all_but_attractor_subgraph:
for state_set in sg:
for pair in possible_pairs:
if (pair[0] in state_set) and (pair[1] in state_set) and (pair in possible_pairs_pared):
possible_pairs_pared.remove(pair)
return possible_pairs_pared # return a list ((node,state),(node,state)) pairs that are control kernels
示例9: main
def main():
if len(sys.argv) != 2:
print "Error!\nCorrect usage is:\n\t"
print "python visualize_osm_test_region.py [osm_test_region_for_draw.dat]"
return
G = nx.read_gpickle(sys.argv[1])
components = nx.weakly_connected_components(G)
print "There are %d connected components."%len(components)
H = G.subgraph(components[0])
G = H
osm_for_drawing = OSM_DRAW(G)
easting, northing = osm_for_drawing.node_list()
edge_list = osm_for_drawing.edge_list()
#map_decomposition = MapDecomposition()
#map_decomposition.primitive_decomposition(G, 10.0)
#return
fig = plt.figure(figsize=const.figsize)
ax = fig.add_subplot(111, aspect='equal')
#print edge_list
arrow_params = {'length_includes_head':True, 'shape':'full', 'head_starts_at_zero':False}
for segment in edge_list:
u = segment[1][0] - segment[0][0]
v = segment[1][1] - segment[0][1]
ax.arrow(segment[0][0], segment[0][1], u, v, width=0.5, head_width=5,\
head_length=10, overhang=0.5, **arrow_params)
#edge_collection = LineCollection(edge_list, colors='gray', linewidths=2)
#ax.add_collection(edge_collection)
# Junction nodes
for node in G.nodes():
if G.degree(node) > 2:
ax.plot(G.node[node]['data'].easting,
G.node[node]['data'].northing,
'ro')
# Connected components
#for index in range(0, len(components)):
# color = const.colors[index%7]
# print len(components[index])
# for node in components[index]:
# ax.plot(G.node[node]['data'].easting,
# G.node[node]['data'].northing,
# 'o', color=color)
# break
ax.set_xlim([const.RANGE_SW[0], const.RANGE_NE[0]])
ax.set_ylim([const.RANGE_SW[1], const.RANGE_NE[1]])
plt.show()
示例10: main
def main():
parser = OptionParser()
parser.add_option("-m", "--osm", dest="osm_data", help="Input open street map data (typically in gpickle format)", metavar="OSM_DATA", type="string")
parser.add_option("-t", "--track_data", dest="track_data", help="Input GPS tracks", metavar="TRACK_DATA", type="string")
parser.add_option("-o", "--output_osm", dest="output_osm", help="Output file name (suggested extention: gpickle)", metavar="OUTPUT", type="string")
parser.add_option("--test_case", dest="test_case", type="int", help="Test cases: 0: region-0; 1: region-1; 2: SF-region.", default=0)
(options, args) = parser.parse_args()
if not options.osm_data:
parser.error("Input osm_data not found!")
if not options.track_data:
parser.error("Input track_data not found!")
if not options.output_osm:
parser.error("Output image not specified!")
R = const.R
if options.test_case == 0:
LOC = const.Region_0_LOC
elif options.test_case == 1:
LOC = const.Region_1_LOC
elif options.test_case == 2:
LOC = const.SF_LOC
else:
parser.error("Test case indexed %d not supported!"%options.test_case)
G = nx.read_gpickle(options.osm_data)
components = nx.weakly_connected_components(G)
H = G.subgraph(components[0])
G = H
tracks =
osm_for_drawing = OSM_DRAW(G)
edge_lists = osm_for_drawing.edge_list()
line_strings = []
for edge_list in edge_lists:
line = LineString(edge_list)
line_strings.append(line)
fig = plt.figure(figsize=(10, 10))
ax = plt.Axes(fig, [0., 0., 1., 1.], aspect='equal')
ax.set_axis_off()
fig.add_axes(ax)
ROAD_WIDTH = 7 # in meters
for line_string in line_strings:
polygon = line_string.buffer(ROAD_WIDTH)
patch = PolygonPatch(polygon, facecolor='k', edgecolor='k')
ax.add_patch(patch)
ax.set_xlim([LOC[0]-R, LOC[0]+R])
ax.set_ylim([LOC[1]-R, LOC[1]+R])
fig.savefig(options.output_img, dpi=100)
plt.close()
return
示例11: trn_stats
def trn_stats(genes, trn, t_factors, version):
LOGGER.info("Computing TRN statistics")
nodes = sorted(trn.nodes_iter())
node2id = {n: i for (i, n) in enumerate(nodes)}
id2node = {i: n for (i, n) in enumerate(nodes)}
(grn, node2id) = to_simple(trn.to_grn(), return_map=True)
nodes = sorted(grn.nodes_iter())
regulating = {node for (node, deg) in grn.out_degree_iter() if deg > 0}
regulated = set(nodes) - regulating
components = sorted(nx.weakly_connected_components(grn), key=len,
reverse=True)
data = dict()
for (a, b) in itertools.product(("in", "out"), repeat=2):
data["{a}_{b}_ass".format(a=a, b=b)] = nx.degree_assortativity_coefficient(grn, x=a, y=b)
census = triadic_census(grn)
forward = census["030T"]
feedback = census["030C"]
num_cycles = sum(1 for cyc in nx.simple_cycles(grn) if len(cyc) > 2)
in_deg = [grn.in_degree(node) for node in regulated]
out_deg = [grn.out_degree(node) for node in regulating]
data["version"] = version,
data["release"] = pd.to_datetime(RELEASE[version]),
data["num_genes"] = len(genes),
data["num_tf"] = len(t_factors),
data["num_nodes"] = len(nodes),
data["num_regulating"] = len(regulating),
data["num_regulated"] = len(regulated),
data["num_links"] = grn.size(),
data["density"] = nx.density(grn),
data["num_components"] = len(components),
data["largest_component"] = len(components[0]),
data["feed_forward"] = forward,
data["feedback"] = feedback,
data["fis_out"] = trn.out_degree(TranscriptionFactor[FIS_ID, version]),
data["hns_out"] = trn.out_degree(TranscriptionFactor[HNS_ID, version]),
data["cycles"] = num_cycles,
data["regulated_in_deg"] = mean(in_deg),
data["regulating_out_deg"] = mean(out_deg),
data["hub_out_deg"] = max(out_deg)
stats = pd.DataFrame(data, index=[1])
in_deg = [grn.in_degree(node) for node in nodes]
out_deg = [grn.out_degree(node) for node in nodes]
bc = nx.betweenness_centrality(grn)
bc = [bc[node] for node in nodes]
dists = pd.DataFrame({
"version": version,
"release": [pd.to_datetime(RELEASE[version])] * len(nodes),
"node": [id2node[node].unique_id for node in nodes],
"regulated_in_degree": in_deg,
"regulating_out_degree": out_deg,
"betweenness": bc
})
return (stats, dists)
示例12: rooted_core_interface_pairs
def rooted_core_interface_pairs(self, root, thickness=None, for_base=False,
hash_bitmask=None,
radius_list=[],
thickness_list=None,
node_filter=lambda x, y: True):
"""
Parameters
----------
root:
thickness:
args:
Returns
-------
"""
ciplist = super(self.__class__, self).rooted_core_interface_pairs(root, thickness, for_base=for_base,
hash_bitmask=hash_bitmask,
radius_list=radius_list,
thickness_list=thickness_list,
node_filter=node_filter)
# numbering shards if cip graphs not connected
for cip in ciplist:
if not nx.is_weakly_connected(cip.graph):
comps = [list(node_list) for node_list in nx.weakly_connected_components(cip.graph)]
comps.sort()
for i, nodes in enumerate(comps):
for node in nodes:
cip.graph.node[node]['shard'] = i
'''
solve problem of single-ede-nodes in the core
this may replace the need for fix_structure thing
this is a little hard.. may fix later
it isnt hard if i write this code in merge_core in ubergraphlearn
for cip in ciplist:
for n,d in cip.graph.nodes(data=True):
if 'edge' in d and 'interface' not in d:
if 'interface' in cip.graph.node[ cip.graph.successors(n)[0]]:
#problem found
'''
return ciplist
示例13: save
def save(self, filename):
# output the biggest weakly connected components
vertices = list(sorted(nx.weakly_connected_components(self.graph),
key=len, reverse=True))[0]
output_graph = nx.DiGraph()
output_graph.add_nodes_from([(node, data)
for node, data
in self.graph.nodes(data=True)
if node in vertices])
output_graph.add_edges_from([(src, dest, data)
for src, dest, data
in self.graph.edges(data=True)
if src in vertices and dest in vertices])
nx.drawing.nx_pydot.write_dot(output_graph, filename)
return self
示例14: _connected_component
def _connected_component(self):
if self.graph.is_directed():
edges_in_wcc = set()
nodes_in_wcc = set()
wcc_counter = 0
for wcc in nx.weakly_connected_components(self.graph):
wcc_counter += 1
print(type(wcc))
nodes_in_wcc.add(wcc.nodes())
edges_in_wcc.add(wcc.edges())
else: # undirected
scc_counter = 0
for scc in nx.strongly_connected_components(self.graph):
scc_counter += 0
print(type(scc))
示例15: test_zero_d_to_molecule_graph
def test_zero_d_to_molecule_graph(self):
comp_graphs = [self.mol_structure.graph.subgraph(c) for c in
nx.weakly_connected_components(self.mol_structure.graph)]
mol_graph = zero_d_graph_to_molecule_graph(self.mol_structure,
comp_graphs[0])
self.assertEqual(mol_graph.get_connected_sites(0)[0].index, 1)
self.assertEqual(mol_graph.get_connected_sites(1)[1].index, 2)
self.assertEqual(mol_graph.molecule.num_sites, 3)
# test catching non zero dimensionality graphs
comp_graphs = [self.graphite.graph.subgraph(c) for c in
nx.weakly_connected_components(self.graphite.graph)]
self.assertRaises(ValueError, zero_d_graph_to_molecule_graph,
self.graphite, comp_graphs[0])
# test for a troublesome structure
s = loadfn(os.path.join(test_dir, "PH7CN3O3F.json.gz"))
bs = CrystalNN().get_bonded_structure(s)
comp_graphs = [bs.graph.subgraph(c) for c in
nx.weakly_connected_components(bs.graph)]
mol_graph = zero_d_graph_to_molecule_graph(bs, comp_graphs[0])
self.assertEqual(mol_graph.molecule.num_sites, 12)