本文整理汇总了Python中networkx.degree_centrality函数的典型用法代码示例。如果您正苦于以下问题:Python degree_centrality函数的具体用法?Python degree_centrality怎么用?Python degree_centrality使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了degree_centrality函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: degree_removal
def degree_removal(g, recalculate=False):
"""
Performs robustness analysis based on degree centrality,
on the network specified by infile using sequential (recalculate = True)
or simultaneous (recalculate = False) approach. Returns a list
with fraction of nodes removed, a list with the corresponding sizes of
the largest component of the network, and the overall vulnerability
of the network.
"""
m = nx.degree_centrality(g)
l = sorted(m.items(), key=operator.itemgetter(1), reverse=True)
x = []
y = []
dimension = fd.fractal_dimension(g, iterations=100, debug=False)
n = len(g.nodes())
x.append(0)
y.append(dimension)
for i in range(1, n-1):
g.remove_node(l.pop(0)[0])
if recalculate:
m = nx.degree_centrality(g)
l = sorted(m.items(), key=operator.itemgetter(1),
reverse=True)
dimension = fd.fractal_dimension(g, iterations=100, debug=False)
x.append(i * 1. / n)
y.append(dimension)
return x, y
示例2: degree_fracture
def degree_fracture(infile, outfile, fraction, recalculate = False):
"""
Removes given fraction of nodes from infile network in reverse order of
degree centrality (with or without recalculation of centrality values
after each node removal) and saves the network in outfile.
"""
g = networkx.read_gml(infile)
m = networkx.degree_centrality(g)
l = sorted(m.items(), key = operator.itemgetter(1), reverse = True)
largest_component = max(networkx.connected_components(g), key = len)
n = len(g.nodes())
for i in range(1, n - 1):
g.remove_node(l.pop(0)[0])
if recalculate:
m = networkx.degree_centrality(g)
l = sorted(m.items(), key = operator.itemgetter(1),
reverse = True)
largest_component = max(networkx.connected_components(g), key = len)
if i * 1. / n >= fraction:
break
components = networkx.connected_components(g)
component_id = 1
for component in components:
for node in component:
g.node[node]["component"] = component_id
component_id += 1
networkx.write_gml(g, outfile)
示例3: degree
def degree(infile, recalculate = False):
"""
Performs robustness analysis based on degree centrality,
on the network specified by infile using sequential (recalculate = True)
or simultaneous (recalculate = False) approach. Returns a list
with fraction of nodes removed, a list with the corresponding sizes of
the largest component of the network, and the overall vulnerability
of the network.
"""
g = networkx.read_gml(infile)
m = networkx.degree_centrality(g)
l = sorted(m.items(), key = operator.itemgetter(1), reverse = True)
x = []
y = []
largest_component = max(networkx.connected_components(g), key = len)
n = len(g.nodes())
x.append(0)
y.append(len(largest_component) * 1. / n)
R = 0.0
for i in range(1, n - 1):
g.remove_node(l.pop(0)[0])
if recalculate:
m = networkx.degree_centrality(g)
l = sorted(m.items(), key = operator.itemgetter(1),
reverse = True)
largest_component = max(networkx.connected_components(g), key = len)
x.append(i * 1. / n)
R += len(largest_component) * 1. / n
y.append(len(largest_component) * 1. / n)
return x, y, 0.5 - R / n
示例4: degree_component
def degree_component(seed_num, graph=None, graph_json_filename=None, graph_json_str=None):
if graph_json_filename is None and graph_json_str is None and graph is None:
return []
G = None
if graph is not None:
G = graph
elif graph_json_str is None:
G = util.load_graph(graph_json_filename=graph_json_filename)
else:
G = util.load_graph(graph_json_str=graph_json_str)
components = list(nx.connected_components(G))
components = filter(lambda x: len(x) > 0.1 * len(G), components)
total_size = sum(map(lambda x: len(x), components))
total_nodes = 0
rtn = []
for comp in components[1:]:
num_nodes = int(float(len(comp)) / total_size * seed_num)
component = G.subgraph(list(comp))
clse_cent = nx.degree_centrality(component)
collector = collections.Counter(clse_cent)
clse_cent = collector.most_common(num_nodes)
rtn += map(lambda (x, y): x, clse_cent)
total_nodes += num_nodes
num_nodes = seed_num - total_nodes
component = G.subgraph(list(components[0]))
clse_cent = nx.degree_centrality(component)
collector = collections.Counter(clse_cent)
clse_cent = collector.most_common(num_nodes)
rtn += map(lambda (x, y): x, clse_cent)
return rtn
示例5: sna_calculations
def sna_calculations(g, play_file):
"""
:param g: a NetworkX graph object
:type g: object
:param play_file: the location of a play in .txt format
:type play_file: string
:return: returns a dictionary containing various network related figures
:rtype: dict
:note: also writes into results/file_name-snaCalculations.csv and results/allCharacters.csv
"""
file_name = os.path.splitext(os.path.basename(play_file))[0]
sna_calculations_list = dict()
sna_calculations_list['playType'] = file_name[0]
sna_calculations_list['avDegreeCentrality'] = numpy.mean(numpy.fromiter(iter(nx.degree_centrality(g).values()),
dtype=float))
sna_calculations_list['avDegreeCentralityStd'] = numpy.std(
numpy.fromiter(iter(nx.degree_centrality(g).values()), dtype=float))
sna_calculations_list['avInDegreeCentrality'] = numpy.mean(
numpy.fromiter(iter(nx.in_degree_centrality(g).values()), dtype=float))
sna_calculations_list['avOutDegreeCentrality'] = numpy.mean(
numpy.fromiter(iter(nx.out_degree_centrality(g).values()), dtype=float))
try:
sna_calculations_list['avShortestPathLength'] = nx.average_shortest_path_length(g)
except:
sna_calculations_list['avShortestPathLength'] = 'not connected'
sna_calculations_list['density'] = nx.density(g)
sna_calculations_list['avEigenvectorCentrality'] = numpy.mean(
numpy.fromiter(iter(nx.eigenvector_centrality(g).values()), dtype=float))
sna_calculations_list['avBetweennessCentrality'] = numpy.mean(
numpy.fromiter(iter(nx.betweenness_centrality(g).values()), dtype=float))
sna_calculations_list['DegreeCentrality'] = nx.degree_centrality(g)
sna_calculations_list['EigenvectorCentrality'] = nx.eigenvector_centrality(g)
sna_calculations_list['BetweennessCentrality'] = nx.betweenness_centrality(g)
# sna_calculations.txt file
sna_calc_file = csv.writer(open('results/' + file_name + '-snaCalculations.csv', 'wb'), quoting=csv.QUOTE_ALL,
delimiter=';')
for key, value in sna_calculations_list.items():
sna_calc_file.writerow([key, value])
# all_characters.csv file
if not os.path.isfile('results/allCharacters.csv'):
with open('results/allCharacters.csv', 'w') as f:
f.write(
'Name;PlayType;play_file;DegreeCentrality;EigenvectorCentrality;BetweennessCentrality;speech_amount;AverageUtteranceLength\n')
all_characters = open('results/allCharacters.csv', 'a')
character_speech_amount = speech_amount(play_file)
for character in sna_calculations_list['DegreeCentrality']:
all_characters.write(character + ';' + str(sna_calculations_list['playType']) + ';' + file_name + ';' + str(
sna_calculations_list['DegreeCentrality'][character]) + ';' + str(
sna_calculations_list['EigenvectorCentrality'][character]) + ';' + str(
sna_calculations_list['BetweennessCentrality'][character]) + ';' + str(
character_speech_amount[0][character]) + ';' + str(character_speech_amount[1][character]) + '\n')
all_characters.close()
return sna_calculations
示例6: __init__
def __init__(self, time, voteomat):
self.foldername = voteomat.network_func_name + voteomat.distribution_func_name
self.foldertime = time
self.path = "Statistics//"+self.foldername+"//"
self.path += g_candidates_affecting_nodes + "=" + str(voteomat.candidates_affecting) + "_"
self.path += g_candidates_affected_by_median + "=" + str(voteomat.candidates_affected) + "_"
self.path += g_neighbours_affecting_each_other + "=" + str(voteomat.affecting_neighbours) + "_"
self.path += g_counterforce_affecting_candidates + "=" + str(voteomat.counter_force_affecting) + "_"
self.path += "counterforce_left="+str(voteomat.counter_force_left)+"_"+"counterforce_right="+str(voteomat.counter_force_right)+ "_" + time
self.make_sure_path_exists(self.path)
self.file = open(self.path + "//statistic.csv", 'w')
self.statistic = {}
self.statistic["networkfunc"] = voteomat.network_func_name
self.statistic["distributionfunc"] = voteomat.distribution_func_name
self.statistic["acceptance"] = voteomat.acceptance
median, avg, std = voteomat.get_statistic()
self.statistic["median"] = []
self.statistic["median"].append(median)
self.statistic["avg"] = []
self.statistic["avg"].append(avg)
self.statistic["std"] = []
self.statistic["std"].append(std)
self.statistic["node_with_highest_degree_centrality"] = []
self.max_degree_node = max( nx.degree_centrality(voteomat.get_network()).items(),key = lambda x: x[1])[0]
self.statistic["node_with_highest_degree_centrality"].append(voteomat.get_network().nodes(data = True)[self.max_degree_node][1]["orientation"])
self.statistic["node_with_minimum_degree_centrality"] = []
self.min_degree_node = min(nx.degree_centrality(voteomat.get_network()).items(), key = lambda x: x[1])[0]
self.statistic["node_with_minimum_degree_centrality"].append(voteomat.get_network().nodes(data = True)[self.min_degree_node][1]["orientation"])
self.statistic["node_with_highest_closeness_centrality"] = []
self.max_closeness_node = max( nx.closeness_centrality(voteomat.get_network()).items(),key = lambda x: x[1])[0]
self.statistic["node_with_highest_closeness_centrality"].append(voteomat.get_network().nodes(data = True)[self.max_closeness_node][1]["orientation"])
self.statistic["node_with_highest_betweenness_centrality"] = []
self.max_betweenness_node = max(nx.betweenness_centrality(voteomat.get_network()).items() ,key = lambda x: x[1])[0]
self.statistic["node_with_highest_betweenness_centrality"].append(voteomat.get_network().nodes(data = True)[self.max_betweenness_node][1]["orientation"])
try:
self.statistic["node_with_highest_eigenvector_centrality"] = []
self.max_eigenvector_node = max( nx.eigenvector_centrality(voteomat.get_network(), max_iter = 1000).items(),key = lambda x: x[1])[0]
self.statistic["node_with_highest_eigenvector_centrality"].append(voteomat.get_network().nodes(data = True)[self.max_eigenvector_node][1]["orientation"])
except nx.NetworkXError:
print "Eigenvector centrality not possible."
freeman = self.freeman_centrality([x[1] for x in nx.degree_centrality(voteomat.get_network()).items()], max( nx.degree_centrality(voteomat.get_network()).items(),key = lambda x: x[1])[1])
self.statistic["freeman_centrality"] = round(freeman,2)
self.statistic["affecting_neighbours"] = voteomat.affecting_neighbours
self.statistic["affecting_candidates"] = voteomat.candidates_affecting
self.statistic["affected_canddiates"] = voteomat.candidates_affected
self.statistic["affecting_counter_force"] = voteomat.counter_force_affecting
self.statistic["affecting_counter_force_left"] = voteomat.counter_force_left
self.statistic["affecting_counter_force_right"] = voteomat.counter_force_right
self.statistic["candidates"] = []
for candidate in voteomat.candidates:
self.statistic["candidates"].append(candidate.to_save())
self.statistic["network"] = voteomat.get_network().nodes(data=True);
示例7: degree_apl
def degree_apl(g, recalculate=False):
"""
Performs robustness analysis based on degree centrality,
on the network specified by infile using sequential (recalculate = True)
or simultaneous (recalculate = False) approach. Returns a list
with fraction of nodes removed, a list with the corresponding sizes of
the largest component of the network, and the overall vulnerability
of the network.
"""
m = networkx.degree_centrality(g)
l = sorted(m.items(), key=operator.itemgetter(1), reverse=True)
x = []
y = []
average_path_length = 0.0
number_of_components = 0
n = len(g.nodes())
for sg in networkx.connected_component_subgraphs(g):
average_path_length += networkx.average_shortest_path_length(sg)
number_of_components += 1
average_path_length = average_path_length / number_of_components
initial_apl = average_path_length
x.append(0)
y.append(average_path_length * 1. / initial_apl)
r = 0.0
for i in range(1, n - 2):
g.remove_node(l.pop(0)[0])
if recalculate:
m = networkx.degree_centrality(g)
l = sorted(m.items(), key=operator.itemgetter(1),
reverse=True)
average_path_length = 0.0
number_of_components = 0
for sg in networkx.connected_component_subgraphs(g):
if len(sg.nodes()) > 1:
average_path_length += networkx.average_shortest_path_length(sg)
number_of_components += 1
average_path_length = average_path_length / number_of_components
x.append(i * 1. / initial_apl)
r += average_path_length * 1. / initial_apl
y.append(average_path_length * 1. / initial_apl)
return x, y, r / initial_apl
示例8: labels
def labels(G, threshhold = 95):
'''return labels(dictionary) for nodes with high centrality for a given percentile'''
labels = {}
# create cutoff based on the given percentile
cen_cutoff = np.percentile(list(nx.degree_centrality(G).values()), threshhold)
# put nodes label in the dictionary if the centrality passes the threshold
for key,value in nx.degree_centrality(G).items():
if value >= cen_cutoff:
labels[key] = key
return labels
示例9: __init__
def __init__(self) :
self.g = nx.barabasi_albert_graph(random.randint(100,1000),random.randint(2,7))
self.degree_centrality = nx.degree_centrality(self.g)
self.deg = nx.degree_centrality(self.g)
self.sorted_deg = sorted(self.deg.items(), key=operator.itemgetter(1))
self.nodes = len(self.g.nodes())
self.edges = len(self.g.edges())
self.degree_rank()
self.degree_dict = self.g.degree()
self.avg_deg = sum(self.g.degree().values())/float(len(self.g.nodes()))
#print self.rank
#print self.degree_dict
self.form_dataset()
示例10: degree_centrality
def degree_centrality(self, withme=True, node=None, average=False):
if node==None:
if withme:
my_dict = nx.degree_centrality(self.mynet)
new = {}
new2={}
for i in my_dict:
new[self.id_to_name(i)] = my_dict[i]
new2[i] = my_dict[i]
if average:
print "The average is " + str(round(sum(new.values())/float(len(new.values())),4))
else:
for i,j in new.items():
print i, round(j,4)
return new2
else:
my_dict = nx.degree_centrality(self.no_ego_net)
new = {}
new2={}
for i in my_dict:
new[self.id_to_name(i)] = my_dict[i]
new2[i] = my_dict[i]
if average:
print "The average is " + str(round(sum(new.values())/float(len(new.values())),4))
else:
for i,j in new.items():
print i, round(j,4)
return new2
else:
if withme:
my_dict = nx.degree_centrality(self.mynet)
try:
print "The coefficient for node "+str(node)+ "is "+ str(round(my_dict[node],4))
except:
try:
return my_dict [self.name_to_id(node)]
except:
print "Invalid node name"
else:
my_dict = nx.degree_centrality(self.no_ego_net)
try:
print "The coefficient for node "+str(node)+ "is "+ str(round(my_dict[node],4))
except:
try:
print "The coefficient for node "+str(node)+ "is "+ str(round(my_dict[[self.name_to_id(node)]],4))
except:
print "Invalid node name"
示例11: centralities
def centralities(self):
'''
Get info on centralities of data
Params:
None
Returns:
dictionary of centrality metrics with keys(centralities supported):
degree - degree centrality
betweeness - betweeness centrality
eigenvector - eigenvector centrality
hub - hub scores - not implemented
authority - authority scores - not implemented
katz - katz centrality with params X Y
pagerank - pagerank centrality with params X Y
'''
output = {}
output['degree'] = nx.degree_centrality(self.G)
output['betweeness'] = nx.betweenness_centrality(self.G)
try:
output['eigenvector'] = nx.eigenvector_centrality(self.G)
output['katz'] = nx.katz_centrality(self.G)
except:
output['eigenvector'] = 'empty or exception'
output['katz'] = 'empty or exception'
# output['hub'] = 'Not implemented'
# output['authority'] = 'Not implemented'
# output['pagerank'] = 'Not implemented'
return output
示例12: run_main
def run_main(file):
NumberOfStations=465
print file
adjmatrix = np.loadtxt(file,delimiter=' ',dtype=np.dtype('int32'))
# for i in range (0,NumberOfStations):
# if(adjmatrix[i,i]==1):
# print "posicion: ["+str(i)+","+str(i)+"]"
g = nx.from_numpy_matrix(adjmatrix, create_using = nx.MultiGraph())
degree = g.degree()
density = nx.density(g)
degree_centrality = nx.degree_centrality(g)
clossness_centrality = nx.closeness_centrality(g)
betweenless_centrality = nx.betweenness_centrality(g)
print degree
print density
print degree_centrality
print clossness_centrality
print betweenless_centrality
#nx.draw(g)
# np.savetxt(OutputFile, Matrix, delimiter=' ',newline='\n',fmt='%i')
示例13: calculate_network_measures
def calculate_network_measures(net, analyser):
deg=nx.degree_centrality(net)
clust=[]
if(net.is_multigraph()):
net = analyser.flatGraph(net)
if(nx.is_directed(net)):
tmp_net=net.to_undirected()
clust=nx.clustering(tmp_net)
else:
clust=nx.clustering(net)
if(nx.is_directed(net)):
tmp_net=net.to_undirected()
paths=nx.shortest_path(tmp_net, source=None, target=None, weight=None)
else:
paths=nx.shortest_path(net, source=None, target=None, weight=None)
lengths = [map(lambda a: len(a[1]), x[1].items()[1:]) for x in paths.items()]
all_lengths=[]
for a in lengths:
all_lengths.extend(a)
max_value=max(all_lengths)
#all_lengths = [x / float(max_value) for x in all_lengths]
return deg.values(),clust.values(),all_lengths
示例14: mean_degree_centrality
def mean_degree_centrality(pg, normalize=0):
"""
mean_degree_centrality(pg) calculates mean in- and out-degree
centralities for directed graphs and simple degree-centralities
for undirected graphs. If the normalize flag is set, each node's
centralities are weighted by the number of edges in the (di)graph.
"""
centrality = {}
try:
if networkx.is_directed_acyclic_graph(pg):
cent_sum_in, cent_sum_out = 0, 0
for n in pg.nodes():
n_cent_in = pg.in_degree(n)
n_cent_out = pg.out_degree(n)
if normalize:
n_cent_in = float(n_cent_in) / float(pg.size()-1)
n_cent_out = float(n_cent_out) / float(pg.size()-1)
cent_sum_in = cent_sum_in + n_cent_in
cent_sum_out = cent_sum_out + n_cent_out
centrality['in'] = cent_sum_in / float(pg.order())
centrality['out'] = cent_sum_out / float(pg.order())
else:
cent_sum = 0
for n in pg.nodes():
if not normalize:
n_cent = pg.degree(n)
else:
n_cent = networkx.degree_centrality(pg,n)
cent_sum = cent_sum + n_cent
centrality['all'] = cent_sum / float(pg.order())
except:
logging.error('pyp_network.mean_degree_centrality() failed!')
return centrality
示例15: allocate
def allocate(G_phy, G_bgp):
log.info("Allocating route reflectors")
graph_phy = G_phy._graph
for asn, devices in G_phy.groupby("asn").items():
routers = [d for d in devices if d.is_router]
router_ids = ank_utils.unwrap_nodes(routers)
subgraph_phy = graph_phy.subgraph(router_ids)
if len(subgraph_phy) == 1:
continue # single node in graph, no ibgp
betw_cen = nx.degree_centrality(subgraph_phy)
ordered = sorted(subgraph_phy.nodes(), key = lambda x: betw_cen[x], reverse = True)
rr_count = len(subgraph_phy)/5 # Take top 20% to be route reflectors
route_reflectors = ordered[:rr_count] # most connected 20%
rr_clients = ordered[rr_count:] # the other routers
route_reflectors = list(ank_utils.wrap_nodes(G_bgp, route_reflectors))
rr_clients = list(ank_utils.wrap_nodes(G_bgp, rr_clients))
G_bgp.update(route_reflectors, route_reflector = True) # mark as route reflector
# rr <-> rr
over_links = [(rr1, rr2) for rr1 in route_reflectors for rr2 in route_reflectors if rr1 != rr2]
G_bgp.add_edges_from(over_links, type = 'ibgp', direction = 'over')
# client -> rr
up_links = [(client, rr) for (client, rr) in itertools.product(rr_clients, route_reflectors)]
G_bgp.add_edges_from(up_links, type = 'ibgp', direction = 'up')
# rr -> client
down_links = [(rr, client) for (client, rr) in up_links] # opposite of up
G_bgp.add_edges_from(down_links, type = 'ibgp', direction = 'down')
log.debug("iBGP done")