当前位置: 首页>>代码示例>>Python>>正文


Python networkx.connected_components函数代码示例

本文整理汇总了Python中networkx.connected_components函数的典型用法代码示例。如果您正苦于以下问题:Python connected_components函数的具体用法?Python connected_components怎么用?Python connected_components使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了connected_components函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: connect_module_graph

def connect_module_graph(G, outdegree_list):
    """Connect disconnected modules. Note: This function cannot be used to 
    connect the entire modular graph."""
    cc_tot = list(nx.connected_components(G))  # cc returns the connected components of G as lists cc[0], cc[1], etc.
    isolated_comp, outedge_comp, isolated_comp_count, outedge_comp_count = partition_network_components(
        cc_tot, outdegree_list
    )

    while isolated_comp_count > 0:  # while G is not connected, reduce number of components
        # pick a random node in the largest component cc[0] that has degree > 1
        node1 = rnd.choice(isolated_comp[0])
        # pick a node in another component whose degree >1
        node2 = rnd.choice(outedge_comp[rnd.choice([x for x in xrange(outedge_comp_count)])])
        while G.degree(node2) <= 1:
            node2 = rnd.choice(outedge_comp[rnd.choice([x for x in xrange(outedge_comp_count)])])

        # pick neighbors of node1 and node2
        nbr1 = rnd.choice(G.neighbors(node1))
        nbr2 = rnd.choice(G.neighbors(node2))

        # swap connections between node1,nbr1 with connections between node2,nbr2
        #  to attempt to connect the two components
        G.remove_edges_from([(node1, nbr1), (node2, nbr2)])
        G.add_edges_from([(node1, node2), (nbr1, nbr2)])

        cc_tot = list(nx.connected_components(G))
        isolated_comp, outedge_comp, isolated_comp_count, outedge_comp_count = partition_network_components(
            cc_tot, outdegree_list
        )
开发者ID:prathasah,项目名称:random-modular-network-generator,代码行数:29,代码来源:random_modular_generator_variable_modules.py

示例2: _reduce_graph

    def _reduce_graph(self, graph, min0list):
        """determine how much of the graph to include in the disconnectivity graph
        """
        used_nodes = []
        # make sure we include the subgraph containing min0
        if len(min0list) > 0:
            for min0 in min0list:
                nodes = nx.node_connected_component(graph, min0)
                if len(nodes) > 2:
                    used_nodes += nodes
                else:
                    print("dgraph: too few nodes connected to", min0)  
        if len(used_nodes) == 0: 
            # use the biggest connected cluster
            cc = sorted(nx.connected_components(graph), key=len, reverse=True)
            used_nodes += cc[0]  # list is ordered by size of cluster

        if self.subgraph_size is not None:
            node_lists = nx.connected_components(graph)
            for nodes in node_lists:
                if len(nodes) >= self.subgraph_size:
                    used_nodes += nodes

        newgraph = graph.subgraph(used_nodes).copy()
        return newgraph
开发者ID:pele-python,项目名称:pele,代码行数:25,代码来源:disconnectivity_graph.py

示例3: runGirvanNewman

def runGirvanNewman(G, Orig_deg, m_):
    #let's find the best split of the graph
    BestQ = 0.0
    Q = 0.0
    print "runGirvanNewman"
    while True:    
        CmtyGirvanNewmanStep(G)
        Q = _GirvanNewmanGetModularity(G, Orig_deg, m_);
        print "current modularity: %f" % Q
        if len(nx.connected_components(G)) >= 10 or Q >= 0.5:
            break;
        if Q > BestQ:
            BestQ = Q
            Bestcomps = nx.connected_components(G)    #Best Split
 #           print "comps:"
 #           print Bestcomps
        if G.number_of_edges() == 0:
            break
    if BestQ > 0.0:
#        print "Best Q: %f" % BestQ
        result_data = {};
        result_data['num_clusters'] = len(Bestcomps)
        result_data['list'] = Bestcomps
        return result_data
    else:
#        print "Best Q: %f" % BestQ
        result_data = {};
        result_data['num_clusters'] = len(nx.connected_components(G))
        result_data['list'] = nx.connected_components(G)
        return result_data
开发者ID:kuss,项目名称:FBSocialGraph,代码行数:30,代码来源:cmty.py

示例4: fix_connectivity_of_network

def fix_connectivity_of_network(initial_g, pos, tx, threshold=80.0):
    g = initial_g
    x = [(len(c), c) for c in nx.connected_components(g)]
    maximum = max(c[0] for c in x)
    if maximum < len(pos)*threshold/100:
        return False
    while len(x) > 1:
        x.sort(lambda a, b: -1 if a[0] < b[0] else 1)
        sizes = [c[0] for c in x]
        # print len(x), " components with sizes: ", sizes
        idx_c = 0
        c = x[0]
        # print "\thola: ", c[1]
        ttt = [ find_closest(pos, x, idx, idx_c) for idx in c[1] ]
        ttt.sort(lambda a, b: -1 if a[0] < b[0] else 1)
        pair = ttt[0]
        # print "\t\tminimum distance : ", pair[0] - tx, ", to node:", pair[1]
        toward = pair[1]
        x0 = pos[pair[2]][0]
        y0 = pos[pair[2]][1]
        x1 = pos[toward][0]
        y1 = pos[toward][1]
        d = pair[0] - tx + 1
        px = (x1 - x0)/d
        py = (y1 - y0)/d
        # move all nodes in this component
        for idx in c[1]:
            pos[idx][0] += px
            pos[idx][1] += py
        g = build_graph(pos, tx)
        x = [(len(c), c) for c in nx.connected_components(g)]
    return True
开发者ID:tima-overlays,项目名称:infocom2017,代码行数:32,代码来源:buildTopology.py

示例5: conn_comps

	def conn_comps(self, g2):
		# then try to do connectedness
		num_e = len(g2.edges())
		conn_comps = networkx.connected_components(g2)
		conn_trials = 0
		while len(conn_comps) > 1:
			small_comps = conn_comps[1:]
			small_comps.reverse()   # start with smallest components, easiest to fix (?)
			for comp in small_comps:
				edges = []
				for u in comp:
					i = self.state.get_partition_of_node(u)
					nodes_i = self.state.get_nodes(i)
					# sample by edges
					for v in nodes_i:
						if u != v and g2.degree(v) >= 2:
							edges += [ (u, v, w) for w in g2.neighbors(v) \
							if (w != u and not g2.has_edge(u,w)) ]

				if len(edges) == 0:
					continue
				(u, v, w) = random.choice(edges)
				g2.remove_edge(v,w)
				assert u != w
				assert not g2.has_edge(u,w)
				g2.add_edge(u,w)
				assert g2.has_edge(u,w)
				assert num_e == len(g2.edges()), "expected %d vs. actual %d: u=%s v=%s w=%s" % (num_e, len(g2.edges()), str(u), str(v), str(w))
			conn_comps = networkx.connected_components(g2)
			conn_trials += 1
			if conn_trials > 50:
				print "After %d tries, could not connect graph, leaving %d components" % (conn_trials, len(conn_comps))
				break
开发者ID:michaelghay,项目名称:graph-gen,代码行数:33,代码来源:partitioner.py

示例6: communities

	def communities(self, nCommunities, weight=None):
		"""
		Compute communities.

		Parameters
		----------
		nCommunities - number of communities to be returned.
			This is added to simplify the process, the original GN algorithm doesn't 
			need predecided number of communities. 
			Other measures like a threshold on betweenness centrality can be used instead.
		
		weight (string) - If None, all edge weights are considered equal. 
			Otherwise holds the name of the edge attribute used as weight. 


		Returns
		--------
		A list of communities where each community is a list of the nodes in the community.	 
		"""
		gr = self.g
		n = nx.number_connected_components(gr)
		components = nx.connected_components(gr)

		while (n < nCommunities):
			gr = self.communitySplits(gr, weight=weight)
			components = nx.connected_components(gr)
			n = nx.number_connected_components(gr)
			if gr.number_of_edges() == 0:
				break
		return components
开发者ID:BBischof,项目名称:visa_free,代码行数:30,代码来源:CommunityDetection.py

示例7: select_clusters_for_length

def select_clusters_for_length(selected_protein_pairs, genome_graph, genome_graph_query):
	# defines the shorteste length of a synteny otholog cluster
	pairs={}
	for n in selected_protein_pairs:	
		pairs[n[0]]=n[1]


	valid_graph=nx.Graph()
	edges=list()
	for edge in genome_graph.edges():


		if edge[0] in pairs.keys() and edge[1] in pairs.keys():
			valid_graph.add_edge(edge[0],edge[1])

	for cluster in nx.connected_components(valid_graph):
		if len(cluster)>4:
			query_nodes={}
			valid_query_graph=nx.Graph()
			for node in cluster:
				
				query_nodes[pairs[node]]=node


			for query_edge in genome_graph_query.edges():
				if query_edge[0] in query_nodes.keys() and query_edge[1] in query_nodes.keys():
					
					valid_query_graph.add_edge(query_edge[0],query_edge[1])
			
			for query_cluster in nx.connected_components(valid_query_graph):
				if len(query_cluster)>4:
					for query_node in sorted(query_cluster):
						print query_nodes[query_node]+"\t"+query_node
开发者ID:danielwuethrich87,项目名称:Synteny_ortholog_finder,代码行数:33,代码来源:SynOrF.py

示例8: connect_simple_graph

def connect_simple_graph(G):
    """check if simple graph G is disconnected and connect if necessary"""

    cc = list(nx.connected_components(G))  # cc returns the connected components of G as lists cc[0], cc[1], etc
    component_count = len(cc)
    while component_count > 1:  # while G is not connected, reduce number of components

        # pick a random node in the largest component cc[0] that has degree > 1
        node1 = rnd.choice(cc[0])
        while G.degree(node1) == 1:
            node1 = rnd.choice(cc[0])

            # pick a node in another component
        node2 = rnd.choice(cc[1])

        # pick neighbors of node1 and node2
        nbr1 = rnd.choice(G.neighbors(node1))
        nbr2 = rnd.choice(G.neighbors(node2))

        # swap connections between node1,nbr1 with connections between node2,nbr2
        #  to attempt to connect the two components
        G.remove_edges_from([(node1, nbr1), (node2, nbr2)])
        G.add_edges_from([(node1, node2), (nbr1, nbr2)])

        cc = list(nx.connected_components(G))
        component_count = len(cc)
开发者ID:prathasah,项目名称:random-modular-network-generator,代码行数:26,代码来源:random_modular_generator_variable_modules.py

示例9: compareGraphs

def compareGraphs(g1, g2):
    
    """#Compares the quantitative properties of two graph. So I can check the coarse graining. """

    
    #Nodes and edges
    print 'Graph1: #(Nodes, Edges) = (' + str(len(g1.nodes())) + ', ' + str(len(g1.edges())) + ')'
    print 'Graph2: #(Nodes, Edges) = (' + str(len(g2.nodes())) + ', ' + str(len(g2.edges())) + ')'

    #Connected Components
    #print '\n#CCs for graph 1: ' + str(len(nx.connected_components(g1)))
    #print '#CCs for graph 2: ' + str(len(nx.connected_components(g2)))
    
    plt.hist([len(i) for i in nx.connected_components(g1)])
    plt.hist([len(i) for i in nx.connected_components(g2)])
    plt.title('Cluster Size')
    plt.xlabel('Cluster Size')
    plt.ylabel('#Cluster')
    show()
    
    #Degree Distribution
    plt.hist(nx.degree_histogram(g1))
    plt.hist(nx.degree_histogram(g2))
    plt.title('Degree Distribution' )
    plt.xlabel('Degree')
    plt.ylabel('#Nodes')
    show()
    
    #Betweeness --- this is by far the most compuationally demanding.
    plt.hist(nx.betweenness_centrality(g1, normalized = False).values())
    plt.hist(nx.betweenness_centrality(g2, normalized = False).values())
    plt.title('Distribution of Betweenness' )
    plt.xlabel('Betweenness')
    plt.ylabel('#Nodes')
    show()        
开发者ID:Khev,项目名称:coarse_grain_networks,代码行数:35,代码来源:CoarseGrainLibrary.py

示例10: runGirvanNewman

def runGirvanNewman(G, Orig_deg, m_):
    #let's find the best split of the graph    
    BestQ = 0.0
    Q = 0.0
    Bestcomps = list(nx.connected_components(G))
    while True:    
        CmtyGirvanNewmanStep(G)
        Q = _GirvanNewmanGetModularity(G, Orig_deg, m_);
        #print "current modularity: %f" % Q
        if Q > BestQ:
            BestQ = Q
            Bestcomps = list(nx.connected_components(G))    #Best Split
            #print "comps:" 
            #print Bestcomps
        if G.number_of_edges() == 0:
            #print "at last use this break"
            break
    """
    if BestQ > 0.0:
        print "Best Q: %f" % BestQ
        print Bestcomps
    else:
        print "Best Q: %f" % BestQ
    """
    return Bestcomps
开发者ID:liupenggl,项目名称:hybrid,代码行数:25,代码来源:gn.py

示例11: betweenness_fracture

def betweenness_fracture(infile, outfile, fraction, recalculate = False):
    """
    Removes given fraction of nodes from infile network in reverse order of 
    betweenness centrality (with or without recalculation of centrality values 
    after each node removal) and saves the network in outfile.
    """

    g = networkx.read_gml(infile)
    m = networkx.betweenness_centrality(g)
    l = sorted(m.items(), key = operator.itemgetter(1), reverse = True)
    largest_component = max(networkx.connected_components(g), key = len)
    n = len(g.nodes())
    for i in range(1, n):
        g.remove_node(l.pop(0)[0])
        if recalculate:
            m = networkx.betweenness_centrality(g)
            l = sorted(m.items(), key = operator.itemgetter(1), 
                       reverse = True)
        largest_component = max(networkx.connected_components(g), key = len)
        if i * 1. / n >= fraction:
            break
    components = networkx.connected_components(g)
    component_id = 1
    for component in components:
        for node in component:
            g.node[node]["component"] = component_id
        component_id += 1
    networkx.write_gml(g, outfile)
开发者ID:swamiiyer,项目名称:robustness,代码行数:28,代码来源:robustness.py

示例12: closeness

def closeness(infile, recalculate = False):
    """
    Performs robustness analysis based on closeness centrality,  
    on the network specified by infile using sequential (recalculate = True) 
    or simultaneous (recalculate = False) approach. Returns a list 
    with fraction of nodes removed, a list with the corresponding sizes of 
    the largest component of the network, and the overall vulnerability 
    of the network.
    """

    g = networkx.read_gml(infile)
    m = networkx.closeness_centrality(g)
    l = sorted(m.items(), key = operator.itemgetter(1), reverse = True)
    x = []
    y = []
    largest_component = max(networkx.connected_components(g), key = len)
    n = len(g.nodes())
    x.append(0)
    y.append(len(largest_component) * 1. / n)
    R = 0.0
    for i in range(1, n):
        g.remove_node(l.pop(0)[0])
        if recalculate:
            m = networkx.closeness_centrality(g)
            l = sorted(m.items(), key = operator.itemgetter(1), 
                       reverse = True)
        largest_component = max(networkx.connected_components(g), key = len)
        x.append(i * 1. / n)
        R += len(largest_component) * 1. / n
        y.append(len(largest_component) * 1. / n)
    return x, y, 0.5 - R / n
开发者ID:swamiiyer,项目名称:robustness,代码行数:31,代码来源:robustness.py

示例13: eigenvector

def eigenvector(g, recalculate=False):
    """
    Performs robustness analysis based on eigenvector centrality,  
    on the network specified by infile using sequential (recalculate = True) 
    or simultaneous (recalculate = False) approach. Returns a list 
    with fraction of nodes removed, a list with the corresponding sizes of 
    the largest component of the network, and the overall vulnerability 
    of the network.
    """

    m = networkx.eigenvector_centrality(g, max_iter=5000)
    l = sorted(m.items(), key=operator.itemgetter(1), reverse=True)
    x = []
    y = []
    largest_component = max(networkx.connected_components(g), key=len)
    n = len(g.nodes())
    x.append(0)
    y.append(len(largest_component) * 1. / n)
    r = 0.0
    for i in range(1, n - 1):
        g.remove_node(l.pop(0)[0])
        if recalculate:

            try:
                m = networkx.eigenvector_centrality(g, max_iter=5000)
            except networkx.NetworkXError:
                break

            l = sorted(m.items(), key=operator.itemgetter(1),
                       reverse=True)
        largest_component = max(networkx.connected_components(g), key=len)
        x.append(i * 1. / n)
        r += len(largest_component) * 1. / n
        y.append(len(largest_component) * 1. / n)
    return x, y, r / n
开发者ID:computational-center,项目名称:complexNetworksMeasurements,代码行数:35,代码来源:robustness2.py

示例14: rand

def rand(infile):
    """
    Performs robustness analysis based on random attack, on the network 
    specified by infile. Returns a list with fraction of nodes removed, a 
    list with the corresponding sizes of the largest component of the 
    network, and the overall vulnerability of the network.
    """

    g = networkx.read_gml(infile)
    l = [(node, 0) for node in g.nodes()]
    random.shuffle(l)
    x = []
    y = []
    largest_component = max(networkx.connected_components(g), key = len)
    n = len(g.nodes())
    x.append(0)
    y.append(len(largest_component) * 1. / n)
    R = 0.0
    for i in range(1, n):
        g.remove_node(l.pop(0)[0])
        largest_component = max(networkx.connected_components(g), key = len)
        x.append(i * 1. / n)
        R += len(largest_component) * 1. / n
        y.append(len(largest_component) * 1. / n)
    return x, y, 0.5 - R / n
开发者ID:swamiiyer,项目名称:robustness,代码行数:25,代码来源:robustness.py

示例15: find_communities_modularity

def find_communities_modularity(G, max_iter=None):
    '''
    INPUT:
        G: networkx Graph
        max_iter: (optional) if given, maximum number of iterations
    OUTPUT: list of lists of strings (node names)

    Run the Girvan-Newman algorithm on G and find the communities with the
    maximum modularity.
    '''
    degrees = G.degree()
    num_edges = G.number_of_edges()
    G1 = G.copy()
    best_modularity = -1.0
    best_comps = nx.connected_components(G1)
    i = 0
    while G1.number_of_edges() > 0:
        subgraphs = nx.connected_component_subgraphs(G1)
        modularity = get_modularity(subgraphs, degrees, num_edges)
        if modularity > best_modularity:
            best_modularity = modularity
            best_comps = list(nx.connected_components(G1))
        girvan_newman_step(G1)
        i += 1
        if max_iter and i >= max_iter:
            break
    return best_comps
开发者ID:sarubenfeld,项目名称:clusters,代码行数:27,代码来源:clusters_model.py


注:本文中的networkx.connected_components函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。