本文整理汇总了Python中networkx.eigenvector_centrality_numpy函数的典型用法代码示例。如果您正苦于以下问题:Python eigenvector_centrality_numpy函数的具体用法?Python eigenvector_centrality_numpy怎么用?Python eigenvector_centrality_numpy使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了eigenvector_centrality_numpy函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: eval_proximity_importance
def eval_proximity_importance(network,graph_xml):
'''returns the proximity of page rank scores distributions between synthetic network(test) and real network (goal)'''
#we need to reverse the network to get a score such that the importance of a node is related to the importance of nodes that point towards it.
if network.is_directed() :
importance_test = nx.eigenvector_centrality_numpy(network.reverse()).values()
else :
importance_test = nx.eigenvector_centrality_numpy(network).values()
importance_goal = eval(graph_xml.find('importance').get('value'))
proximity = proximity_distributions_different_size(importance_goal,importance_test)
return proximity
示例2: calculate_eigenvector
def calculate_eigenvector(self):
eigen_attack = []
G = nx.Graph()
G.add_nodes_from(range(self.node_num))
G.add_weighted_edges_from(self.aggregated_list)
eigen = nx.eigenvector_centrality_numpy(G)
eigen_sort = sorted(eigen, key=eigen.__getitem__, reverse=True)
eigen_attack.append(eigen_sort[0])
for num_of_deletion in range (0,self.node_num/2-1):
G.remove_node(eigen_sort[0])
eigen = nx.eigenvector_centrality_numpy(G)
eigen_sort = sorted(eigen, key=eigen.__getitem__, reverse=True)
eigen_attack.append(eigen_sort[0])
return eigen_attack
示例3: centrality
def centrality(net):
values ={}
close = nx.closeness_centrality(net, normalized= True)
eigen = nx.eigenvector_centrality_numpy(net)
page = nx.pagerank(net)
bet = nx.betweenness_centrality(net,normalized= True)
flow_c = nx.current_flow_closeness_centrality(net,normalized= True)
flow_b = nx.current_flow_betweenness_centrality(net,normalized= True)
load = nx.load_centrality(net, normalized = True)
com_c = nx.communicability_centrality(net)
com_b = nx.communicability_betweenness_centrality(net, normalized= True)
degree = net.degree()
file3 = open("bl.csv",'w')
for xt in [bet,load,degree,page,flow_b,com_c,com_b,eigen,close,flow_c]:#[impo,bet,flow_b,load,com_c,com_b] :
for yt in [bet,load,degree,page,flow_b,com_c,com_b,eigen,close,flow_c]:#[impo,bet,flow_b,load,com_c,com_b] :
corr(xt.values(),yt.values(),file3)
print
file3.write("\n")
file3.close()
#plt.plot(x,y, 'o')
#plt.plot(x, m*x + c, 'r', label='Fitted line')
#plt.show()
#for key,item in close.iteritems() :
#values[key] = [impo.get(key),bet.get(key),flow_b.get(key), load.get(key),com_c.get(key),com_b.get(key)]
return values
示例4: get_sna
def get_sna(path):
sna_data = {}
print 'Building relations graph'
G = nx.read_gexf(path)
print 'Nodes:', len(G.nodes())
print 'Edges:', len(G.edges())
print 'Calculating centralities:'
print ' -degrees'
degrees = G.degree()
for c in degrees:
sna_data[c] = { 'degree':degrees[c],
'betweenness':0,
'closeness':0,
'eigenvector':0}
print ' -betweenness'
betweenness = nx.betweenness_centrality(G)
for c in betweenness:
sna_data[c]['betweenness'] = betweenness[c]
print ' -closeness'
closeness = nx.closeness_centrality(G)
for c in closeness:
sna_data[c]['closeness'] = closeness[c]
print ' -eigenvector'
eigenvector = nx.eigenvector_centrality_numpy(G)
for c in eigenvector:
sna_data[c]['eigenvector'] = eigenvector[c]
return sna_data
示例5: centrality_scores
def centrality_scores(vote_matrix, season_graph):
deg = nx.degree(season_graph)
deg = {k: round(v,1) for k,v in deg.iteritems()}
close = nx.closeness_centrality(season_graph)
close = {k: round(v,3) for k,v in close.iteritems()}
btw = nx.betweenness_centrality(season_graph)
btw = {k: round(v,3) for k,v in btw.iteritems()}
eig = nx.eigenvector_centrality_numpy(season_graph)
eig = {k: round(v,3) for k,v in eig.iteritems()}
page = nx.pagerank(season_graph)
page = {k: round(v,3) for k,v in page.iteritems()}
# Add contestant placement (rank)
order = list(vote_matrix.index)
place_num = list(range(len(order)))
place = {order[i]:i+1 for i in place_num}
names = season_graph.nodes()
# Build a table with centralities
table=[[name, deg[name], close[name], btw[name], eig[name], page[name], place[name]] for name in names]
# Convert table to pandas df
headers = ['name', 'deg', 'close', 'btw', 'eig', 'page', 'place']
df = pd.DataFrame(table, columns=headers)
df = df.sort_values(['page', 'eig', 'deg'], ascending=False)
return df
示例6: test_P3_unweighted
def test_P3_unweighted(self):
"""Eigenvector centrality: P3"""
G=nx.path_graph(3)
b_answer={0: 0.5, 1: 0.7071, 2: 0.5}
b=nx.eigenvector_centrality_numpy(G, weight=None)
for n in sorted(G):
assert_almost_equal(b[n],b_answer[n],places=4)
示例7: concepts
def concepts(self, terms):
paths = self._synset_paths(terms)
root = _path_root(paths).split('.')[0]
self.graph = _create_subgraph(paths, root)
return sorted(nx.eigenvector_centrality_numpy(self.graph, weight='w').items(),
key=lambda x: x[1], reverse=True)
示例8: augmentNodes
def augmentNodes(g):
r1 = nx.eigenvector_centrality_numpy(g)
r2 = nx.degree_centrality(g) # DP MY
r3 = nx.betweenness_centrality(g)
r5 = nx.load_centrality(g,weight='weight') # DY, WY-writename # Scientific collaboration networks: II. Shortest paths, weighted networks, and centrality, M. E. J. Newman, Phys. Rev. E 64, 016132 (2001).
r6 = nx.pagerank(g, alpha=0.85, personalization=None, max_iter=100, tol=1e-08, nstart=None, weight='weight')
if nx.is_directed(g) == True:
r8 = nx.in_degree_centrality(g)
r9 = nx.out_degree_centrality(g)
# r10 = nx.hits(g, max_iter=100, tol=1e-08, nstart=None)
else:
r4 = nx.communicability_centrality(g)
r7 = nx.clustering(g, weight='weight')
for x in g.nodes():
g.node[x]['eigenvector_centrality_numpy'] = r1[x]
g.node[x]['degree_centrality'] = r2[x]
g.node[x]['betweenness_centrality'] = r3[x]
g.node[x]['load_centrality'] = r5[x]
g.node[x]['pagerank'] = r6[x]
if nx.is_directed(g) == True:
g.node[x]['in_degree_centrality'] = r8[x]
g.node[x]['out_degree_centrality'] = r9[x]
# g.node[x]['hits'] = r10[x]
else:
g.node[x]['communicability_centrality'] = r4[x]
g.node[x]['clustering'] = r7[x]
return g
示例9: test_eigenvector_v_katz_random
def test_eigenvector_v_katz_random(self):
G = nx.gnp_random_graph(10,0.5, seed=1234)
l = float(max(eigvals(nx.adjacency_matrix(G).todense())))
e = nx.eigenvector_centrality_numpy(G)
k = nx.katz_centrality_numpy(G, 1.0/l)
for n in G:
assert_almost_equal(e[n], k[n])
示例10: Centrality
def Centrality(Au):
"""docstring for Centrality"""
b = nx.betweenness_centrality(Au)
e = nx.eigenvector_centrality_numpy(Au)
c = nx.closeness_centrality(Au)
d = nx.degree_centrality(Au)
return b, e, c, d
示例11: randomEigenvectorSampling
def randomEigenvectorSampling(G_, keptNodes):
sumEigen = 0.0
eigenvector = nx.eigenvector_centrality_numpy(G_)
for node in G_.nodes():
sumEigen = sumEigen+eigenvector[node]
probs = []
picked = []
for node in G_.nodes():
probs.append(eigenvector[node]/sumEigen)
cumEigenProbs = cumulative_sum(probs)
cumEigenProbs[len(cumEigenProbs)-1] = 1.0
num = 0
while num < keptNodes:
random.seed(time.clock())
number = random.random()
for node in range(0, len(G_.nodes())):
if (number <= cumEigenProbs[node]):
if(G_.nodes()[node] not in picked):
print "Adding node "+ str(G_.nodes()[node])
picked.append(G_.nodes()[node])
num = num+1
break
else:
#print "Collision"
break
return picked
示例12: analyze_graph
def analyze_graph(G):
#centralities and node metrics
out_degrees = G.out_degree()
in_degrees = G.in_degree()
betweenness = nx.betweenness_centrality(G)
eigenvector = nx.eigenvector_centrality_numpy(G)
closeness = nx.closeness_centrality(G)
pagerank = nx.pagerank(G)
avg_neighbour_degree = nx.average_neighbor_degree(G)
redundancy = bipartite.node_redundancy(G)
load = nx.load_centrality(G)
hits = nx.hits(G)
vitality = nx.closeness_vitality(G)
for name in G.nodes():
G.node[name]['out_degree'] = out_degrees[name]
G.node[name]['in_degree'] = in_degrees[name]
G.node[name]['betweenness'] = betweenness[name]
G.node[name]['eigenvector'] = eigenvector[name]
G.node[name]['closeness'] = closeness[name]
G.node[name]['pagerank'] = pagerank[name]
G.node[name]['avg-neigh-degree'] = avg_neighbour_degree[name]
G.node[name]['redundancy'] = redundancy[name]
G.node[name]['load'] = load[name]
G.node[name]['hits'] = hits[name]
G.node[name]['vitality'] = vitality[name]
#communities
partitions = community.best_partition(G)
for member, c in partitions.items():
G.node[member]['community'] = c
return G
示例13: set_evaluation_datas
def set_evaluation_datas(graph,graph_xml,**kwargs) :
'''if no precise evaluation method is given, we compute every possible measure (wrong !!)'''
evaluation_method = kwargs.get('evaluation_method','')
def add_sub(name,value):
sub = xml.SubElement(graph_xml,name)
sub.attrib['value'] = str(value)
#First relevant infos are number of nodes and number of edges,
#should be dependant on the method used to develop the network,
#but until now they are necessary and always stored
add_sub('number_of_nodes',nx.number_of_nodes(graph))
add_sub('number_of_edges',nx.number_of_edges(graph))
#number of nodes
nodes = nx.number_of_nodes(graph)
#should be replaced by getattr(graph, variable) loop
if graph.is_directed() :
if 'vertices' in evaluation_method :
add_sub('vertices',nx.number_of_edges(graph)/(nodes*(nodes-1)))
if 'degrees' in evaluation_method :
add_sub('degree_in',graph.in_degree().values())
add_sub('degree_out', graph.out_degree().values())
if 'importance' in evaluation_method :
add_sub('importance',nx.eigenvector_centrality_numpy(graph.reverse()).values())
if 'clustering' in evaluation_method or 'heterogeneity' in evaluation_method :
add_sub('clustering',nx.clustering(graph.to_undirected()).values())
if 'community_structure' in evaluation_method :
add_sub('degree',graph.degree().values())
else :
if 'vertices' in evaluation_method :
add_sub('vertices',2*nx.number_of_edges(graph)/(nodes*(nodes-1)))
if 'communities' in evaluation_method :
add_sub('communities',get_communities(graph))
if 'degrees' in evaluation_method or 'community_structure' in evaluation_method :
add_sub('degrees',graph.degree().values())
if 'clustering' in evaluation_method or 'heterogeneity' in evaluation_method :
add_sub('clustering',nx.clustering(graph).values())
if 'importance' in evaluation_method :
add_sub('importance',nx.eigenvector_centrality_numpy(graph).values())
if 'distances' in evaluation_method :
add_sub('distances',list(it.chain.from_iterable([ dict_of_length.values() for dict_of_length in nx.shortest_path_length(graph).values()])))
示例14: betweenness_centrality
def betweenness_centrality(graph):
#centrality = nx.betweenness_centrality(graph, normalized=True)
#centrality = nx.closeness_centrality(graph)
centrality = nx.eigenvector_centrality_numpy(graph)
nx.set_node_attributes(graph, 'centrality', centrality)
degrees = sorted(centrality.items(), key=itemgetter(1), reverse=True)
for idx, item in enumerate(degrees[0:10]):
item = (idx+1,) + item
print "%i. %s: %0.3f" % item
示例15: analyze_graph
def analyze_graph(G):
betweenness = nx.betweenness_centrality(G)
eigenvector = nx.eigenvector_centrality_numpy(G)
closeness = nx.closeness_centrality(G)
pagerank = nx.pagerank(G)
degrees = G.degree()
for name in G.nodes():
G.node[name]['betweenness'] = betweenness[name]
G.node[name]['eigenvector'] = eigenvector[name]
G.node[name]['closeness'] = closeness[name]
G.node[name]['pagerank'] = pagerank[name]
G.node[name]['degree'] = degrees[name]
components = nx.connected_component_subgraphs(G)
i = 0
for cc in components:
#Set the connected component for each group
for node in cc:
G.node[node]['component'] = i
i += 1
cent_betweenness = nx.betweenness_centrality(cc)
cent_eigenvector = nx.eigenvector_centrality_numpy(cc)
cent_closeness = nx.closeness_centrality(cc)
for name in cc.nodes():
G.node[name]['cc-betweenness'] = cent_betweenness[name]
G.node[name]['cc-eigenvector'] = cent_eigenvector[name]
G.node[name]['cc-closeness'] = cent_closeness[name]
#Assign each person to his bigger clique
cliques = list(nx.find_cliques(G))
j = 0
for clique in cliques:
clique_size = len(clique)
for member in clique:
if G.node[member]['clique-size'] < clique_size:
G.node[member]['clique-size'] = clique_size
G.node[member]['clique'] = j
j +=1
return G