当前位置: 首页>>代码示例>>Python>>正文


Python networkx.density函数代码示例

本文整理汇总了Python中networkx.density函数的典型用法代码示例。如果您正苦于以下问题:Python density函数的具体用法?Python density怎么用?Python density使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了density函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: classify

def classify(request, pk):
	#gets object based on id given
	graph_file = get_object_or_404(Document, pk=pk)
	#reads file into networkx graph based on extension
	if graph_file.extension() == ".gml":
		G = nx.read_gml(graph_file.uploadfile)
	else:
		G = nx.read_gexf(graph_file.uploadfile)
	#closes file so we can delete it
	graph_file.uploadfile.close()
	#loads the algorithm and tests the algorithm against the graph
	g_json = json_graph.node_link_data(G)
	#save graph into json file
	with open(os.path.join(settings.MEDIA_ROOT, 'graph.json'), 'w') as graph:
			json.dump(g_json, graph)
	with open(os.path.join(settings.MEDIA_ROOT, 'rf_classifier.pkl'), 'rb') as malgo:
		algo_loaded = pickle.load(malgo, encoding="latin1")
		dataset = np.array([G.number_of_nodes(), G.number_of_edges(), nx.density(G), nx.degree_assortativity_coefficient(G), nx.average_clustering(G), nx.graph_clique_number(G)])
		print (dataset)
		#creates X to test against
		X = dataset
		prediction = algo_loaded.predict(X)
		
		
		
		graph_type = check_prediction(prediction)
		graph = GraphPasser(G.number_of_nodes(), G.number_of_edges(), nx.density(G), nx.degree_assortativity_coefficient(G), nx.average_clustering(G), nx.graph_clique_number(G))
	#gives certain variables to the view

	return render(
		request,
		'classification/classify.html',
		{'graph': graph, 'prediction': graph_type}
		)
开发者ID:Kaahan,项目名称:networkclassification,代码行数:34,代码来源:views.py

示例2: show_network_metrics

def show_network_metrics(G):
    '''
    Print the local and global metrics of the network
    '''
    print(nx.info(G))

    # density
    print("Density of the network")
    print(nx.density(G))    
    
    # average  betweeness
    print("Average  betweeness of the network")
    print(np.sum(list(nx.betweenness_centrality(G).values()))/len(nx.betweenness_centrality(G)))

    # Average clustering coefficient
    print("Average clustering coefficient:")
    print(nx.average_clustering(G))


    #create metrics dataframe
    by_node_metrics = pd.DataFrame({"Betweeness_Centrality":nx.betweenness_centrality(G),"Degree_Centrality":nx.degree_centrality(G),
        "Clustering_Coefficient":nx.clustering(G), "Triangels":nx.algorithms.cluster.triangles(G)})
    print(by_node_metrics)

    by_node_metrics.to_excel("metrics.xlsx")
开发者ID:tyty233,项目名称:Music-Classification-and-Ranking-Analysis,代码行数:25,代码来源:networkv2.py

示例3: gen_network

def gen_network(graph,machines,basedata):
    """ Generates an LLD network from a graph
        distributing participants in a list of machines
    """
    network = ET.Element('network')
    #network.set('type',graphtype)
    network.set('participants',str(graph.number_of_nodes()))
    network.set('edges',str(graph.size()))
    network.set('density',str(NX.density(graph)))

    network.set('connected',str(NX.is_weakly_connected(graph)))
    network.set('stronglyconnected',str(NX.is_strongly_connected(graph)))

    for node in graph.nodes_iter():
        nodelement = ET.SubElement(network,'participant')
        nodelement.set('id','participant'+str(node))
        hostelem = ET.SubElement(nodelement,'host')
        #hostelem.text = 'node'+str(int(node) % len(machines))
        hostelem.text = machines[int(node) % len(machines)]
        portelem = ET.SubElement(nodelement,'port')
        portelem.text = str(20500+int(node))
        baseelem = ET.SubElement(nodelement,'basedata')
        baseelem.text = basedata
        nodelement.append(gen_dynamic())
        for source in gen_sources(graph,node):
            nodelement.append(source)
    return network
开发者ID:ldibanyez,项目名称:livelinkeddata,代码行数:27,代码来源:lldgen.py

示例4: ltDecomposeTestBatFull

def ltDecomposeTestBatFull(dsName, path, outfile, cd, wccOnly, revEdges, undir, diaF, fillF):
    origNet = loadNw(dsName, path, cd, wccOnly, revEdges, undir)
    prodNet = origNet
    # prodNet = copy.deepcopy(origNet)
    # print("dc")
    outfile = open(path + outfile + ".csv", "w")
    intFlag = False
    print("NW-WIDE MEASURES:\n")

    nodeStr = str(origNet.number_of_nodes())
    edgeStr = str(origNet.number_of_edges())
    avgDeg = str(float(origNet.number_of_edges()) / float(origNet.number_of_nodes()))
    dens = str(nx.density(origNet))
    avgCl = "--"
    # avgCl = str(nx.average_clustering(origNet))

    if diaF:
        print("  Starting dia calc")
        diameter = str(nx.diameter(origNet))
        print("  --> done w. dia calc")
    else:
        diameter = "---"

        # outfile.write("Dataset,NumNodes,NumEdges,avgDeg,dens,avgCl,diameter\n")
        # outfile.write(dsName+","+nodeStr+","+edgeStr+","+avgDeg+","+dens+","+avgCl+","+diameter+"\n")
        # if fillF:
        # 	print("FULL THRESH TEST\n")
        # outfile.write("Dataset,ThreshType,ThreshVal,PercSize,NumNodes,NumEdges,TimeAlg,TimeAlgAndSetup,Check\n")
        # thresh=1.0
        # outfile.write(ltDecomposeNoSetWithCheck(prodNet,thresh,dsName,intFlag,origNet))

    outfile.close()
    print("Done.")
开发者ID:joeyh321,项目名称:ORCA,代码行数:33,代码来源:ltDecomp3.py

示例5: updateGraphStats

    def updateGraphStats(self, graph):

        origgraph = graph
        if nx.is_connected(graph):
            random = 0
        else:
            connectedcomp = nx.connected_component_subgraphs(graph)
            graph = max(connectedcomp)

        if len(graph) > 1:
            pathlength = nx.average_shortest_path_length(graph)
        else:
            pathlength = 0

        # print graph.nodes(), len(graph), nx.is_connected(graph)

        stats = {
            "radius": nx.radius(graph),
            "density": nx.density(graph),
            "nodecount": len(graph.nodes()),
            "center": nx.center(graph),
            "avgcluscoeff": nx.average_clustering(graph),
            "nodeconnectivity": nx.node_connectivity(graph),
            "components": nx.number_connected_components(graph),
            "avgpathlength": pathlength
        }

        # print "updated graph stats", stats
        return stats
开发者ID:hopeatina,项目名称:flask_heroku,代码行数:29,代码来源:simulator.py

示例6: NetStats

def NetStats(G):
    return { 'radius': nx.radius(G),
             'diameter': nx.diameter(G),
             'connected_components': nx.number_connected_components(G),
             'density' : nx.density(G),
             'shortest_path_length': nx.shortest_path_length(G),
             'clustering': nx.clustering(G)}
开发者ID:CSB-IG,项目名称:NinNX,代码行数:7,代码来源:__init__.py

示例7: info

    def info(self, graph, title=None):
        degree = sorted(nx.degree(graph).items(), key=lambda x: x[1], reverse=True)
        print('Highest degree nodes: ')
        if not title:
            for (node, value) in degree:
                print('{}:{}'.format(self.singer_dict[int(node)].split('|')[0], str(value)))
                if value < 90:
                    break

        avg = (0.0 + sum(value for (node, value) in degree)) / (0.0 + len(degree))
        (max_node, max_value) = degree[0]
        (min_node, min_value) = degree[len(degree) - 1]
        inf = list()
        if not title:
            inf.append('Number of nodes: {0}'.format(nx.number_of_nodes(graph)))
            inf.append('Number of edges: {0}'.format(nx.number_of_edges(graph)))
            inf.append('Is connected: {0}'.format(nx.is_connected(graph)))
        if title:
            inf.append(title)
        inf.append('Degree:')
        inf.append('Avg: {0}'.format(round(avg, 4)))
        inf.append('Max: {1} ({0})'.format(max_node, max_value))
        inf.append('Min: {1} ({0})'.format(min_node, min_value))
        inf.append('Density: {}'.format(round(nx.density(graph), 4)))
        return inf
开发者ID:vslovik,项目名称:ARS,代码行数:25,代码来源:analyzer.py

示例8: plot_distribution

def plot_distribution(distribution_type,legend,graph,list_communities,out=None):
	x = [i for i in range(0,len(list_communities[0]))]
	for communities in list_communities:
		if distribution_type.lower() == "nodes":
			y = list(map(len,communities))
		else:
			y = []
			for l in communities:
				H = graph.subgraph(l)
				if distribution_type.lower() == "density":
					y.append(nx.density(H))
				elif distribution_type.lower() == "transitivity":
					y.append(nx.transitivity(H))
				else:
					return None
		plt.plot(x,y,linewidth=2,alpha=0.8)
		#plt.yscale("log")

	plt.legend(legend, loc='upper left')
	plt.xlabel("Comunity ID")
	plt.ylabel(distribution_type)

	if out == None:
		plt.show()
	else:
		plt.savefig(out+".svg",bbox_inches="tight")
	plt.close()
开发者ID:pigna90,项目名称:lastfm_network_analysis,代码行数:27,代码来源:community_discovery.py

示例9: calGraph

def calGraph(infile, mode = 1):
	#init Parameter
	inputpath = 'edge_list/'
	outputpath = 'network_output/'
	n = mode
	Data_G = inputpath+infile+'_'+str(n)+'.edgelist'
	
	#init Graph
	G = nx.read_edgelist(Data_G, create_using=nx.DiGraph())
	GU = nx.read_edgelist(Data_G)
	#basci info
	print nx.info(G),'\n', nx.info(GU) 
	average_degree = float(sum(nx.degree(G).values()))/len(G.nodes())
	print 'average degree :', average_degree 
	degree_histogram = nx.degree_histogram(G)
	print 'degree histogram max :', degree_histogram[1]
	desity = nx.density(G)
	print 'desity :', desity

	#Approximation
	#Centrality
	degree_centrality = nx.degree_centrality(G)
	print 'degree centrality top 10 !', sorted_dict(degree_centrality)[:2]
	out_degree_centrality = nx.out_degree_centrality(G)
	print 'out degree centrality top 10 !', sorted_dict(out_degree_centrality)[:2]
开发者ID:carlzhangxuan,项目名称:For_Recruit,代码行数:25,代码来源:L3_NetworkX_basic.py

示例10: print_info

def print_info(G):
  #info prints name, type, number of nodes and edges, and average degree already
  print(nx.info(G))
  print "Density: ", nx.density(G)
  print "Number of connected components: ", nx.number_connected_components(G)

  all_degree_cent = nx.degree_centrality(G)
  all_bet_cent = nx.betweenness_centrality(G)
  all_close_cent = nx.closeness_centrality(G)
  
  oldest = []
  agerank = 0
  
  names = []
  
  print ("Node, Degree Centrality, Betweenness Centrality, Closeness Centrality:")
  for x in range(G.number_of_nodes()):
    names.append(G.nodes(data=True)[x][1]['label'])
    
    if G.nodes(data=True)[x][1]['agerank'] >= agerank:
      if G.nodes(data=True)[x][1]['agerank'] != agerank:
        oldest = [] 
        agerank = G.nodes(data=True)[x][1]['agerank']
        oldest.append(G.nodes(data=True)[x][1])
        
    print G.nodes(data=True)[x][1]['label'],' %.2f' % all_degree_cent.get(x),\
    ' %.2f' % all_bet_cent.get(x),\
    ' %.2f' % all_close_cent.get(x)
  
  print "Oldest facebook(s): ", ', '.join([x['label'] for x in oldest])

  return names
开发者ID:lucasbibiano,项目名称:devdist-facebook,代码行数:32,代码来源:devdist.py

示例11: calculateDensity

def calculateDensity(Graph, community):
	result = []
	for com in community:
		subg = Graph.subgraph(com[1:])
		# print subg.nodes()
		result.append(nx.density(subg))
	return result
开发者ID:shawnzhesun,项目名称:Collecting-Hub-Modeling-for-Community-Detection,代码行数:7,代码来源:fb_main.py

示例12: get_single_network_measures

def get_single_network_measures(G, thr):
	f = open(out_prfx + 'single_network_measures.dat', 'a')
	N = nx.number_of_nodes(G)
	L = nx.number_of_edges(G)
	D = nx.density(G)
	cc = nx.average_clustering(G)
	compon = nx.number_connected_components(G)
	Con_sub = nx.connected_component_subgraphs(G)

	values = []
	values_2 =[]

	for node in G:
		values.append(G.degree(node))
	ave_deg = float(sum(values)) / float(N)
	
	f.write("%f\t%d\t%f\t%f\t%f\t%f\t" % (thr, L, D, cc, ave_deg, compon))
	#1. threshold, 2. edges, 3. density 4.clustering coefficient
	#5. average degree, 6. number of connected components
	
	for i in range(len(Con_sub)):
		if nx.number_of_nodes(Con_sub[i])>1:
			values_2.append(nx.average_shortest_path_length(Con_sub[i]))

	if len(values_2)==0:
		f.write("0.\n")
	else:
		f.write("%f\n" % (sum(values_2)/len(values_2)))
	#7. shortest pathway
	f.close()
开发者ID:rudimeier,项目名称:MSc_Thesis,代码行数:30,代码来源:sb_randomization.py

示例13: make_ground_truth

def make_ground_truth():
  edge_map, venue_edge_map, node_map = map_for_nx(CITEMAP_FILE)
  components = []
  for conference in venue_edge_map.keys():
    edges = venue_edge_map[conference]
    graph = nx.Graph()
    edge_ids = [(int(edge.source), int(edge.target)) for edge in edges]
    graph.add_edges_from(edge_ids)
    median_degree = np.median(graph.degree(graph.nodes()).values())
    for component in nx.connected_components(graph):
      if len(component) >= MIN_SIZE:
        community = graph.subgraph(component)
        v_count = len(community.nodes())
        fomd = sum([1 for v in component if len(set(graph.neighbors(v)) & set(component)) > median_degree]) / v_count
        internal_density = nx.density(community)
        components.append((component, fomd, internal_density))
  components = sorted(components, key=lambda x: x[1], reverse=True)[:3000]
  components = sorted(components, key=lambda x: x[2], reverse=True)[:int(0.75 * len(components))]
  f_id = open(TRUTH_ID_FILE, 'wb')
  f_name = open(TRUTH_NAME_FILE, 'wb')
  for component, fomd, internal_density in components:
    component = map(str, component)
    author_names = ", ".join([node_map[node_id].name for node_id in component])
    author_ids = ", ".join(component)
    f_id.write(author_ids + "\n")
    f_name.write(author_names + "\n")
  f_id.close()
  f_name.close()
开发者ID:ai-se,项目名称:citemap,代码行数:28,代码来源:truther.py

示例14: test_fast_versions_properties_threshold_graphs

    def test_fast_versions_properties_threshold_graphs(self):
        cs='ddiiddid'
        G=nxt.threshold_graph(cs)
        assert_equal(nxt.density('ddiiddid'), nx.density(G))
        assert_equal(sorted(nxt.degree_sequence(cs)),
                     sorted(G.degree().values()))

        ts=nxt.triangle_sequence(cs)
        assert_equal(ts, list(nx.triangles(G).values()))
        assert_equal(sum(ts) // 3, nxt.triangles(cs))

        c1=nxt.cluster_sequence(cs)
        c2=list(nx.clustering(G).values())
        assert_almost_equal(sum([abs(c-d) for c,d in zip(c1,c2)]), 0)

        b1=nx.betweenness_centrality(G).values()
        b2=nxt.betweenness_sequence(cs)
        assert_true(sum([abs(c-d) for c,d in zip(b1,b2)]) < 1e-14)

        assert_equal(nxt.eigenvalues(cs), [0, 1, 3, 3, 5, 7, 7, 8])

        # Degree Correlation
        assert_true(abs(nxt.degree_correlation(cs)+0.593038821954) < 1e-12)
        assert_equal(nxt.degree_correlation('diiiddi'), -0.8)
        assert_equal(nxt.degree_correlation('did'), -1.0)
        assert_equal(nxt.degree_correlation('ddd'), 1.0)
        assert_equal(nxt.eigenvalues('dddiii'), [0, 0, 0, 0, 3, 3])
        assert_equal(nxt.eigenvalues('dddiiid'), [0, 1, 1, 1, 4, 4, 7])
开发者ID:NikitaVAP,项目名称:pycdb,代码行数:28,代码来源:test_threshold.py

示例15: compute

 def compute(self, model):
     if self.show_progress is True:
         print("Calculating Number of Hosts")
     self.stats['Number of hosts'] = number_of_nodes(model[0])
     if self.show_progress is True:
         print("Calculating Risk")
     self.stats['Risk'] = model.risk
     if self.show_progress is True:
         print("Calculating Cost")
     self.stats['Cost'] = model.cost
     if self.show_progress is True:
         print("Calculating Mean of Path lengths")
     self.stats['Mean of attack path lengths'] = model[0].mean_path_length()
     if self.show_progress is True:
         print("Calculating Mode of Path lengths")
     self.stats['Mode of attack path lengths'] = model[0].mode_path_length()
     if self.show_progress is True:
         print("Calculating Standard deviation")
     self.stats['Standard Deviation of attack path lengths'] = \
         model[0].stdev_path_length()
     if self.show_progress is True:
         print("Calculating attack path length")
     self.stats['Shortest attack path length'] = model[0].shortest_path_length()
     if self.show_progress is True:
         print("Calculating Return on Attack")
     self.stats['Return on Attack'] = model[0].return_on_attack()
     if self.show_progress is True:
         print("Calculating Density")
     self.stats['Density'] = density(model[0])
     self.stats['Probability of attack success'] = model[0].probability_attack_success()
     self.compute_status = True
开发者ID:whistlebee,项目名称:harmat,代码行数:31,代码来源:reports.py


注:本文中的networkx.density函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。