本文整理汇总了Python中networkx.transitivity函数的典型用法代码示例。如果您正苦于以下问题:Python transitivity函数的具体用法?Python transitivity怎么用?Python transitivity使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了transitivity函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: smallWorldness
def smallWorldness(graph):
return_values = []
#Small-worldness criteria
n = len(nx.nodes(graph))
e = len(nx.edges(graph))
#probability of edges: (number of edges in real graph)/possible edges
p = e/float((n*(n-1)/2.0))
##
#generate random graph using probability
rand_graph = nx.fast_gnp_random_graph(n, p, seed=1)
#calculate values for real graph and random graph
Creal = nx.transitivity(graph) #float
Crand = nx.transitivity(rand_graph) #float
Lreal = 0
Lrand = 0
real_sum = 0
rand_sum = 0
splReal = shortest_path_lengths(graph)
splRand = shortest_path_lengths(rand_graph)
for i in range(len(splReal)):
real_sum += splReal[i]
rand_sum += splRand[i]
Lreal = real_sum / len(splReal)
Lrand = rand_sum / len(splRand)
#compare with actual graph
if(Lreal != 0 and Lrand !=0 and Crand !=0):
S = (Creal)/(Crand) / (float(Lreal)/(Lrand))
else:
S = 0
return_values.append(S)
return return_values
示例2: draw_graph
def draw_graph(label_flag=True, remove_isolated=True, different_size=True, iso_level=10, node_size=40):
G=build_graph(fb.get_friends_network())
betweenness=nx.betweenness_centrality(G)
degree=nx.degree_centrality(G)
degree_num=[ degree[v] for v in G]
maxdegree=max(degree_num);mindegree=min(degree_num);
print maxdegree,mindegree
clustering=nx.clustering(G)
print nx.transitivity(G)
# Judge whether remove the isolated point from graph
if remove_isolated is True:
H = nx.empty_graph()
for SG in nx.connected_component_subgraphs(G):
if SG.number_of_nodes() > iso_level:
H = nx.union(SG, H)
G = H
# Ajust graph for better presentation
if different_size is True:
L = nx.degree(G)
G.dot_size = {}
for k, v in L.items():
G.dot_size[k] = v
#node_size = [betweenness[v] *1000 for v in G]
node_size = [G.dot_size[v] * 10 for v in G]
node_color= [((degree[v]-mindegree))/(maxdegree-mindegree) for v in G]
#edge_width = [getcommonfriends(u,v) for u,v in G.edges()]
pos = nx.spring_layout(G, iterations=15)
nx.draw_networkx_edges(G, pos, alpha=0.05)
nx.draw_networkx_nodes(G, pos, node_size=node_size, node_color=node_color, vmin=0.0,vmax=1.0, alpha=0.3)
# Judge whether shows label
if label_flag is True:
nx.draw_networkx_labels(G, pos, font_size=6,alpha=0.1)
#nx.draw_graphviz(G)
plt.show()
return G
示例3: gen_graph_stats
def gen_graph_stats (graph):
G = nx.read_graphml(graph)
stats = {}
edges, nodes = 0,0
for e in G.edges_iter(): edges += 1
for n in G.nodes_iter(): nodes += 1
stats['Edges'] = (edges,'The number of edges within the Graph')
stats['Nodes'] = (nodes, 'The number of nodes within the Graph')
print "%i edges, %i nodes" % (edges, nodes)
# Accessing the highest degree node
center, degree = sorted(G.degree().items(), key=itemgetter(1), reverse=True)[0]
stats['Center Node'] = ('%s: %0.5f' % (center,degree),'The center most node in the graph. Which has the highest degree')
hairball = nx.subgraph(G, [x for x in nx.connected_components(G)][0])
print "Average shortest path: %0.4f" % nx.average_shortest_path_length(hairball)
stats['Average Shortest Path Length'] = (nx.average_shortest_path_length(hairball), '')
# print "Center: %s" % G[center]
# print "Shortest Path to Center: %s" % p
print "Degree: %0.5f" % degree
stats['Degree'] = (degree,'The node degree is the number of edges adjacent to that node.')
print "Order: %i" % G.number_of_nodes()
stats['Order'] = (G.number_of_nodes(),'The number of nodes in the graph.')
print "Size: %i" % G.number_of_edges()
stats['Size'] = (G.number_of_edges(),'The number of edges in the graph.')
print "Clustering: %0.5f" % nx.average_clustering(G)
stats['Average Clustering'] = (nx.average_clustering(G),'The average clustering coefficient for the graph.')
print "Transitivity: %0.5f" % nx.transitivity(G)
stats['Transitivity'] = (nx.transitivity(G),'The fraction of all possible triangles present in the graph.')
part = community.best_partition(G)
# values = [part.get(node) for node in G.nodes()]
# nx.draw_spring(G, cmap = plt.get_cmap('jet'), node_color = values, node_size=30, with_labels=False)
# plt.show()
mod = community.modularity(part,G)
print "modularity: %0.5f" % mod
stats['Modularity'] = (mod,'The modularity of a partition of a graph.')
knn = nx.k_nearest_neighbors(G)
print knn
stats['K Nearest Neighbors'] = (knn,'the average degree connectivity of graph.\nThe average degree connectivity is the average nearest neighbor degree of nodes with degree k. For weighted graphs, an analogous measure can be computed using the weighted average neighbors degre')
return G, stats
示例4: get_small_worldness
def get_small_worldness(filename):
import networkx as nx
threshold = 0
f = open(filename[:-4]+'_small_worldness.dat','w')
for i in range(0,101):
threshold = float(i)/100
G = get_threshold_matrix(filename, threshold)
ER_graph = nx.erdos_renyi_graph(nx.number_of_nodes(G), nx.density(G))
cluster = nx.average_clustering(G)
ER_cluster = nx.average_clustering(ER_graph)
transi = nx.transitivity(G)
ER_transi = nx.transitivity(ER_graph)
print 'threshold: %f, average cluster coefficient: %f, random nw: %f, transitivity: %f, random nw: %f' %(threshold, cluster, ER_cluster, transi, ER_transi)
f.write("%f\t%f\t%f" % (threshold, cluster, ER_cluster))
components = nx.connected_component_subgraphs(G)
ER_components = nx.connected_component_subgraphs(ER_graph)
values = []
ER_values = []
for i in range(len(components)):
if nx.number_of_nodes(components[i]) > 1:
values.append(nx.average_shortest_path_length(components[i]))
for i in range(len(ER_components)):
if nx.number_of_nodes(ER_components[i]) > 1:
ER_values.append(nx.average_shortest_path_length(ER_components[i]))
if len(values) == 0:
f.write("\t0.")
else:
f.write("\t%f" % (sum(values)/len(values)))
if len(ER_values) == 0:
f.write("\t0.")
else:
f.write("\t%f" % (sum(ER_values)/len(ER_values)))
f.write("\t%f\t%f" % (transi, ER_transi))
if (ER_cluster*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
S_WS = (cluster/ER_cluster) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))
else:
S_WS = 0.
if (ER_transi*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
S_Delta = (transi/ER_transi) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))
else:
S_Delta = 0.
f.write("\t%f\t%f" % (S_WS, S_Delta))
f.write("\n")
f.close()
print "1:threshold 2:cluster-coefficient 3:random-cluster-coefficient 4:shortest-pathlength 5:random-shortest-pathlength 6:transitivity 7:random-transitivity 8:S-Watts-Strogatz 9:S-transitivity"
示例5: get_small_worldness
def get_small_worldness(G, thr):
f = open(out_prfx + 'small_worldness.dat', 'a')
g = open(out_prfx + 'cc_trans_ER.dat', 'a')
#g.write('r(thre.)\t\cc_A\tcc_ER\ttran_A\ttran_ER\n')
ER_graph = nx.erdos_renyi_graph(nx.number_of_nodes(G), nx.density(G))
# erdos-renyi, binomial random graph generator ...(N,D:density)
cluster = nx.average_clustering(G) # clustering coef. of whole network
ER_cluster = nx.average_clustering(ER_graph) #cc of random graph
transi = nx.transitivity(G)
ER_transi = nx.transitivity(ER_graph)
g.write("%f\t%f\t%f\t%f\t%f\n" % (thr, cluster,ER_cluster,transi,ER_transi ))
f.write("%f\t%f\t%f" % (thr, cluster, ER_cluster))
components = nx.connected_component_subgraphs(G)
ER_components = nx.connected_component_subgraphs(ER_graph)
values = []
ER_values = []
for i in range(len(components)):
if nx.number_of_nodes(components[i]) > 1:
values.append(nx.average_shortest_path_length(components[i]))
for i in range(len(ER_components)):
if nx.number_of_nodes(ER_components[i]) > 1:
ER_values.append(nx.average_shortest_path_length(ER_components[i]))
if len(values) == 0:
f.write("\t0.")
else:
f.write("\t%f" % (sum(values)/len(values))) # pathlenght
if len(ER_values) == 0:
f.write("\t0.")
else:
f.write("\t%f" % (sum(ER_values)/len(ER_values)))
f.write("\t%f\t%f" % (transi, ER_transi))
if (ER_cluster*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
S_WS = (cluster/ER_cluster) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))
else:
S_WS = 0.
if (ER_transi*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
S_Delta = (transi/ER_transi) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))
else:
S_Delta = 0.
f.write("\t%f\t%f" % (S_WS, S_Delta)) # S_WS ~ small worldness
f.write("\n")
f.close()
g.close()
示例6: printStats
def printStats(filename):
'''
Converts json adjacency list into networkx to calculate and print the
graphs's
- average clustering coefficient
- overall clustering coefficient
- maximum diameter
- average diameter
- number of paritions using community.best_parition
- modularity of community.best_partition
'''
g = makeGraphFromJSON(filename)
print "Average Clustering Coefficient: %f" % nx.average_clustering(g)
print "Overall Clustering Coefficient: %f" % nx.transitivity(g)
connected_subgraphs = list(nx.connected_component_subgraphs(g))
largest = max(nx.connected_component_subgraphs(g), key=len)
print "# Connected Components: %d" % len(connected_subgraphs)
print " Maximal Diameter: %d" % nx.diameter(largest)
print " Average Diameter: %f" % nx.average_shortest_path_length(largest)
# Find partition that maximizes modularity using Louvain's algorithm
part = community.best_partition(g)
print "# Paritions: %d" % (max(part.values()) + 1)
print "Louvain Modularity: %f" % community.modularity(part, g)
示例7: netstats_simple
def netstats_simple(graph):
G = graph
if nx.is_connected(G):
d = nx.diameter(G)
r = nx.radius(G)
else:
d = 'NA - graph is not connected' #should be calculatable on unconnected graph - see example code for hack
r = 'NA - graph is not connected'
#using dictionary to pack values and variablesdot, eps, ps, pdf break equally
result = {#"""single value measures"""
'nn': G.number_of_nodes(),
'ne': G.number_of_edges(),
'd': d,
'r': r,
'conn': nx.number_connected_components(G),
'asp': nx.average_shortest_path_length(G),
# """number of the largest clique"""
'cn': nx.graph_clique_number(G),
# """number of maximal cliques"""
'mcn': nx.graph_number_of_cliques(G),
# """transitivity - """
'tr': nx.transitivity(G),
#cc = nx.clustering(G) """clustering coefficient"""
'avgcc': nx.average_clustering(G) }
# result['d'] = nx.diameter(G)
print result
return result
示例8: compute_singlevalued_measures
def compute_singlevalued_measures(ntwk, weighted=True, calculate_cliques=False):
"""
Returns a single value per network
"""
iflogger.info("Computing single valued measures:")
measures = {}
iflogger.info("...Computing degree assortativity (pearson number) ...")
try:
measures["degree_pearsonr"] = nx.degree_pearsonr(ntwk)
except AttributeError: # For NetworkX 1.6
measures["degree_pearsonr"] = nx.degree_pearson_correlation_coefficient(ntwk)
iflogger.info("...Computing degree assortativity...")
try:
measures["degree_assortativity"] = nx.degree_assortativity(ntwk)
except AttributeError:
measures["degree_assortativity"] = nx.degree_assortativity_coefficient(ntwk)
iflogger.info("...Computing transitivity...")
measures["transitivity"] = nx.transitivity(ntwk)
iflogger.info("...Computing number of connected_components...")
measures["number_connected_components"] = nx.number_connected_components(ntwk)
iflogger.info("...Computing average clustering...")
measures["average_clustering"] = nx.average_clustering(ntwk)
if nx.is_connected(ntwk):
iflogger.info("...Calculating average shortest path length...")
measures["average_shortest_path_length"] = nx.average_shortest_path_length(ntwk, weighted)
if calculate_cliques:
iflogger.info("...Computing graph clique number...")
measures["graph_clique_number"] = nx.graph_clique_number(ntwk) # out of memory error
return measures
示例9: get_motifs
def get_motifs(filename):
import networkx as nx
from math import factorial
threshold = 0
f = open(filename[:-4]+'_motifs.dat','w')
for i in range(0,101):
threshold = float(i)/100
G = get_threshold_matrix(filename, threshold)
tri_dict = nx.triangles(G)
summe = 0
for node in tri_dict:
summe += tri_dict[node]
N = nx.number_of_nodes(G)
ratio = summe / (3. * binomialCoefficient(N,3))
transi = nx.transitivity(G)
if transi > 0:
triads = summe / transi
ratio_triads = triads / (3 * binomialCoefficient(N,3))
else:
triads = 0.
ratio_triads = 0.
print 'threshold: %f, number of triangles: %f, ratio: %f, triads: %f, ratio: %f' %(threshold, summe/3, ratio, triads, ratio_triads)
f.write("%f\t%d\t%f\t%f\t%f\n" % (threshold, summe/3, ratio, triads, ratio_triads))
f.close()
print "1:threshold 2:#triangles 3:ratio-to-potential-triangles 4:triads 5:ratio-to-potential-triads"
示例10: connected_components
def connected_components(self):
"""
Returns basic statistics about the connected components of the
graph. This includes their number, order, size, diameter, radius,
average clusttering coefficient, transitivity, in addition to basic
info about the largest and smallest connected components.
"""
cc_stats = {}
cc = nx.connected_components(self.graph.structure)
for index, component in enumerate(cc):
cc_stats[index] = {}
this_cc = cc_stats[index]
this_cc["order"] = len(component)
this_cc["size"] = len(self.graph.structure.edges(component))
subgraph = self.graph.structure.subgraph(component)
this_cc["avg_cluster"] = nx.average_clustering(subgraph)
this_cc["transitivity"] = nx.transitivity(subgraph)
eccentricity = nx.eccentricity(subgraph)
ecc_values = eccentricity.values()
this_cc["diameter"] = max(ecc_values)
this_cc["radius"] = min(ecc_values)
return cc_stats
示例11: plot_distribution
def plot_distribution(distribution_type,legend,graph,list_communities,out=None):
x = [i for i in range(0,len(list_communities[0]))]
for communities in list_communities:
if distribution_type.lower() == "nodes":
y = list(map(len,communities))
else:
y = []
for l in communities:
H = graph.subgraph(l)
if distribution_type.lower() == "density":
y.append(nx.density(H))
elif distribution_type.lower() == "transitivity":
y.append(nx.transitivity(H))
else:
return None
plt.plot(x,y,linewidth=2,alpha=0.8)
#plt.yscale("log")
plt.legend(legend, loc='upper left')
plt.xlabel("Comunity ID")
plt.ylabel(distribution_type)
if out == None:
plt.show()
else:
plt.savefig(out+".svg",bbox_inches="tight")
plt.close()
示例12: get_network_property
def get_network_property(graph):
"""Returns various property of the graph.
It calculates the richness coefficient, triangles and transitivity
coefficient. To do so, it removes self-loops *in-place*. So, there
is a possibility that the graph passed as parameter has been
changed.
"""
remove_self_loop(graph)
# If number of nodes is less than three
# no point in calculating these property.
if len(graph.nodes()) < 3:
return ({0: 0.0}, 0, 0)
try:
richness = nx.rich_club_coefficient(graph)
except nx.NetworkXAlgorithmError:
# NetworkXAlgorithmError is raised when
# it fails achieve desired swaps after
# maximum number of attempts. It happened
# for a really small graph. But, just to
# guard against those cases.
richness = nx.rich_club_coefficient(graph, False)
triangle = nx.triangles(graph)
transitivity = nx.transitivity(graph)
return (richness, triangle, transitivity)
示例13: degree_statistics
def degree_statistics(G):
n_nodes = G.number_of_nodes()
start = time.clock()
# list of sampled graphs
g_list[:] = []
for i in range(N_SAMPLES):
g_list.append(generate_sample(G))
print "Sampling graphs - Elapsed ", (time.clock() - start)
#####
# number of edges s_NE
s_NE = sum(e[2]['p'] for e in G.edges_iter(data=True))
# average degree s_AD
s_AD = 2*s_NE /n_nodes
# maximal degree s_MD
sum_MD = 0.0
for aG in g_list:
max_deg = max(aG.degree().itervalues())
sum_MD += max_deg
s_MD = sum_MD/N_SAMPLES
# degree variance s_DV
sum_DV = 0.0
for aG in g_list:
deg_var = 1.0/n_nodes * sum((d - s_AD)*(d-s_AD) for d in aG.degree().itervalues())
sum_DV += deg_var
s_DV = sum_DV/N_SAMPLES
# clustering coefficient s_CC
sum_CC = 0.0
for aG in g_list:
cc = nx.transitivity(aG)
sum_CC += cc
s_CC = sum_CC/N_SAMPLES
# degree distribution
deg_list = [0 for i in range(MAX_DEG)]
for aG in g_list:
for d in aG.degree().itervalues():
deg_list[d] += 1
i = MAX_DEG-1
while deg_list[i] == 0:
i = i-1
deg_list = deg_list[:i+1]
print "len(deg_list) =", len(deg_list)
print deg_list
for i in range(len(deg_list)):
deg_list[i] = float(deg_list[i])/N_SAMPLES
#
return s_NE, s_AD, s_MD, s_DV, s_CC, deg_list
示例14: cluster
def cluster():
if created == 0:
print 'No graph created!'
elif created == 1:
try:
print 'The clustering coefficient for the whole graph is %0.4f.'%(nx.transitivity(G))
except nx.NetworkXError, e:
print e
示例15: preferentialAttachment
def preferentialAttachment(G):
n = G.number_of_nodes()
m = random.randrange(15,20)
PG = nx.barabasi_albert_graph(n,m)
plot(PG)
l = math.log(n)/math.log(math.log(n))
print 'Global Clustering: {0}\t'.format(str(nx.transitivity(PG))),
print 'Average path length : {0}\n'.format(str(l))