本文整理汇总了Python中networkx.closeness_centrality函数的典型用法代码示例。如果您正苦于以下问题:Python closeness_centrality函数的具体用法?Python closeness_centrality怎么用?Python closeness_centrality使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了closeness_centrality函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: closeness_component
def closeness_component(seed_num, graph_json_filename=None, graph_json_str=None):
if graph_json_filename is None and graph_json_str is None:
return []
G = None
if graph_json_str is None:
G = util.load_graph(graph_json_filename=graph_json_filename)
else:
G = util.load_graph(graph_json_str=graph_json_str)
components = list(nx.connected_components(G))
components = filter(lambda x: len(x) > 0.1 * len(G), components)
total_size = sum(map(lambda x: len(x), components))
total_nodes = 0
rtn = []
for comp in components[1:]:
num_nodes = int(float(len(comp)) / total_size * seed_num)
component = G.subgraph(list(comp))
clse_cent = nx.closeness_centrality(component)
collector = collections.Counter(clse_cent)
clse_cent = collector.most_common(num_nodes)
rtn += map(lambda (x, y): x, clse_cent)
total_nodes += num_nodes
num_nodes = seed_num - total_nodes
component = G.subgraph(list(components[0]))
clse_cent = nx.closeness_centrality(component)
collector = collections.Counter(clse_cent)
clse_cent = collector.most_common(num_nodes)
rtn += map(lambda (x, y): x, clse_cent)
return rtn
开发者ID:shimmy1996,项目名称:Pandemaniac,代码行数:31,代码来源:closeness_component.py
示例2: closeness_fracture
def closeness_fracture(infile, outfile, fraction, recalculate = False):
"""
Removes given fraction of nodes from infile network in reverse order of
closeness centrality (with or without recalculation of centrality values
after each node removal) and saves the network in outfile.
"""
g = networkx.read_gml(infile)
m = networkx.closeness_centrality(g)
l = sorted(m.items(), key = operator.itemgetter(1), reverse = True)
largest_component = max(networkx.connected_components(g), key = len)
n = len(g.nodes())
for i in range(1, n):
g.remove_node(l.pop(0)[0])
if recalculate:
m = networkx.closeness_centrality(g)
l = sorted(m.items(), key = operator.itemgetter(1),
reverse = True)
largest_component = max(networkx.connected_components(g), key = len)
if i * 1. / n >= fraction:
break
components = networkx.connected_components(g)
component_id = 1
for component in components:
for node in component:
g.node[node]["component"] = component_id
component_id += 1
networkx.write_gml(g, outfile)
开发者ID:swamiiyer,项目名称:robustness,代码行数:28,代码来源:robustness.py
示例3: compute_static_graph_statistics
def compute_static_graph_statistics(G,start_time,end_time):
verts = G.vertices
n = len(verts)
m = float(end_time - start_time)
agg_statistics = [dict.fromkeys(verts,0),dict.fromkeys(verts,0),dict.fromkeys(verts,0)]*3
avg_statistics = [dict.fromkeys(verts,0),dict.fromkeys(verts,0),dict.fromkeys(verts,0)]*3
aggregated_graph = nx.Graph()
aggregated_graph.add_nodes_from(verts)
start_time = max(1,start_time)
for t in xrange(start_time,end_time+1):
aggregated_graph.add_edges_from(G.snapshots[t].edges_iter())
dc = G.snapshots[t].degree()
cc = nx.closeness_centrality(G.snapshots[t])
bc = nx.betweenness_centrality(G.snapshots[t])
for v in verts:
avg_statistics[0][v] += dc[v]/(n-1.0)
avg_statistics[1][v] += cc[v]
avg_statistics[2][v] += bc[v]
for v in verts:
avg_statistics[0][v] = avg_statistics[0][v]/m
avg_statistics[1][v] = avg_statistics[1][v]/m
avg_statistics[2][v] = avg_statistics[2][v]/m
dc = nx.degree_centrality(aggregated_graph)
cc = nx.closeness_centrality(aggregated_graph)
bc = nx.betweenness_centrality(aggregated_graph)
for v in verts:
agg_statistics[0][v] = dc[v]
agg_statistics[1][v] = cc[v]
agg_statistics[2][v] = bc[v]
return (agg_statistics, avg_statistics)
开发者ID:juancamilog,项目名称:temporal_centrality,代码行数:33,代码来源:temporal_graph.py
示例4: closeness_removal
def closeness_removal(g, recalculate=False):
"""
Performs robustness analysis based on closeness centrality,
on the network specified by infile using sequential (recalculate = True)
or simultaneous (recalculate = False) approach. Returns a list
with fraction of nodes removed, a list with the corresponding sizes of
the largest component of the network, and the overall vulnerability
of the network.
"""
m = nx.closeness_centrality(g)
l = sorted(m.items(), key=operator.itemgetter(1), reverse=True)
x = []
y = []
dimension = fd.fractal_dimension(g, iterations=100, debug=False)
n = len(g.nodes())
x.append(0)
y.append(dimension)
for i in range(1, n-1):
g.remove_node(l.pop(0)[0])
if recalculate:
m = nx.closeness_centrality(g)
l = sorted(m.items(), key=operator.itemgetter(1),
reverse=True)
dimension = fd.fractal_dimension(g, iterations=100, debug=False)
x.append(i * 1. / n)
y.append(dimension)
return x, y
开发者ID:hernandcb,项目名称:complexNetworksMeasurements,代码行数:31,代码来源:dimensionPlotsOBCA.py
示例5: closeness
def closeness(infile, recalculate = False):
"""
Performs robustness analysis based on closeness centrality,
on the network specified by infile using sequential (recalculate = True)
or simultaneous (recalculate = False) approach. Returns a list
with fraction of nodes removed, a list with the corresponding sizes of
the largest component of the network, and the overall vulnerability
of the network.
"""
g = networkx.read_gml(infile)
m = networkx.closeness_centrality(g)
l = sorted(m.items(), key = operator.itemgetter(1), reverse = True)
x = []
y = []
largest_component = max(networkx.connected_components(g), key = len)
n = len(g.nodes())
x.append(0)
y.append(len(largest_component) * 1. / n)
R = 0.0
for i in range(1, n):
g.remove_node(l.pop(0)[0])
if recalculate:
m = networkx.closeness_centrality(g)
l = sorted(m.items(), key = operator.itemgetter(1),
reverse = True)
largest_component = max(networkx.connected_components(g), key = len)
x.append(i * 1. / n)
R += len(largest_component) * 1. / n
y.append(len(largest_component) * 1. / n)
return x, y, 0.5 - R / n
开发者ID:swamiiyer,项目名称:robustness,代码行数:31,代码来源:robustness.py
示例6: test_digraph
def test_digraph(self):
G = nx.path_graph(3, create_using=nx.DiGraph())
c = nx.closeness_centrality(G)
cr = nx.closeness_centrality(G.reverse())
d = {0: 0.0, 1: 0.500, 2: 0.667}
dr = {0: 0.667, 1: 0.500, 2: 0.0}
for n in sorted(self.P3):
assert_almost_equal(c[n], d[n], places=3)
assert_almost_equal(cr[n], dr[n], places=3)
开发者ID:networkx,项目名称:networkx,代码行数:9,代码来源:test_closeness_centrality.py
示例7: closeness_centrality_distribution
def closeness_centrality_distribution(G, return_dictionary=False):
"""Return a distribution of unweighted closeness centralities, as used in
Borges, Coppersmith, Meyer, and Priebe 2011.
If return_dictionary is specified, we return a dictionary indexed by
vertex name, rather than just the values (as returned by default).
"""
if return_dictionary:
return nx.closeness_centrality(G)
else:
return nx.closeness_centrality(G).values()
开发者ID:jovo,项目名称:shuffled-graph-theory,代码行数:10,代码来源:graph_invariants.py
示例8: test_wf_improved
def test_wf_improved(self):
G = nx.union(self.P4, nx.path_graph([4, 5, 6]))
c = nx.closeness_centrality(G)
cwf = nx.closeness_centrality(G, wf_improved=False)
res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25,
4: 0.222, 5: 0.333, 6: 0.222}
wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5,
4: 0.667, 5: 1.0, 6: 0.667}
for n in G:
assert_almost_equal(c[n], res[n], places=3)
assert_almost_equal(cwf[n], wf_res[n], places=3)
开发者ID:networkx,项目名称:networkx,代码行数:11,代码来源:test_closeness_centrality.py
示例9: closeness_apl
def closeness_apl(g, recalculate=False):
"""
Performs robustness analysis based on closeness centrality,
on the network specified by infile using sequential (recalculate = True)
or simultaneous (recalculate = False) approach. Returns a list
with fraction of nodes removed, a list with the corresponding sizes of
the largest component of the network, and the overall vulnerability
of the network.
"""
m = networkx.closeness_centrality(g)
l = sorted(m.items(), key=operator.itemgetter(1), reverse=True)
x = []
y = []
average_path_length = 0.0
number_of_components = 0
n = len(g.nodes())
for sg in networkx.connected_component_subgraphs(g):
average_path_length += networkx.average_shortest_path_length(sg)
number_of_components += 1
average_path_length = average_path_length / number_of_components
initial_apl = average_path_length
x.append(0)
y.append(average_path_length * 1. / initial_apl)
r = 0.0
for i in range(1, n):
g.remove_node(l.pop(0)[0])
if recalculate:
m = networkx.closeness_centrality(g)
l = sorted(m.items(), key=operator.itemgetter(1),
reverse=True)
average_path_length = 0.0
number_of_components = 0
for sg in networkx.connected_component_subgraphs(g):
if len(sg.nodes()) > 1:
average_path_length += networkx.average_shortest_path_length(sg)
number_of_components += 1
average_path_length = average_path_length / number_of_components
x.append(i * 1. / initial_apl)
r += average_path_length * 1. / initial_apl
y.append(average_path_length * 1. / initial_apl)
return x, y, r / initial_apl
开发者ID:computational-center,项目名称:complexNetworksMeasurements,代码行数:51,代码来源:robustness2.py
示例10: closeness_centrality
def closeness_centrality(self, withme=False, node=None, average=False):
if node==None:
if withme:
my_dict = nx.closeness_centrality(self.mynet)
new = {}
new2={}
for i in my_dict:
new[self.id_to_name(i)] = my_dict[i]
new2[i] = my_dict[i]
if average:
print "The average is " + str(round(sum(new.values())/float(len(new.values())),4))
else:
for i,j in new.items():
print i, round(j,4)
return new2
else:
my_dict = nx.closeness_centrality(self.no_ego_net)
new = {}
new2={}
for i in my_dict:
new[self.id_to_name(i)] = my_dict[i]
new2[i] = my_dict[i]
if average:
print "The average is " + str(round(sum(new.values())/float(len(new.values())),4))
else:
for i,j in new.items():
print i, round(j,4)
return new2
else:
if withme:
my_dict = nx.closeness_centrality(self.mynet)
try:
print "The coefficient for node "+str(node)+ "is "+ str(round(my_dict[node],4))
except:
try:
print "The coefficient for node "+str(node)+ "is "+ str(my_dict[[self.name_to_id(node)]])
except:
print "Invalid node name"
else:
my_dict = nx.closeness_centrality(self.no_ego_net)
try:
print "The coefficient for node "+str(node)+ "is "+ str(round(my_dict[node],4))
except:
try:
print "The coefficient for node "+str(node)+ "is "+ str(round(my_dict[[self.name_to_id(node)]],4))
except:
print "Invalid node name"
开发者ID:atwel,项目名称:BigData2015,代码行数:48,代码来源:networks_lab.py
示例11: attack_based_max_closeness
def attack_based_max_closeness(G):
""" Recalcuat closeness attack
"""
n = G.number_of_nodes()
tot_ND = [0] * (n+1)
tot_T = [0] * (n+1)
ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
tot_ND[0] = ND
tot_T[0] = 0
# remember when all the closeness have been zero for all nodes
for i in range(1, n+1):
all_closeness = nx.closeness_centrality(G)
# get node with max betweenness
node = max(all_closeness, key=all_closeness.get)
# remove all the edges adjacent to node
if not nx.is_directed(G): # undirected graph
for key in G[node].keys():
G.remove_edge(node, key)
else: # directed graph
for x in [v for u, v in G.out_edges_iter(node)]:
G.remove_edge(node, x)
for x in [u for u, v in G.in_edges_iter(node)]:
G.remove_edge(x, node)
# calculate driver node number ND
ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
tot_ND[i] = ND
tot_T[i] = i
return (tot_ND, tot_T, Max_Betweenness_Zero_T)
开发者ID:python27,项目名称:NetworkControllability,代码行数:31,代码来源:AttackBasedOnNode.py
示例12: plot_closeness_dist
def plot_closeness_dist (graph, path):
"""Plot distribution of closeness centrality of the graph and save the figure
at the given path. On X-axis we have closeness centrality values and on
Y-axis we have percentage of the nodes that have that closeness value"""
N = float(graph.order())
node_to_closeness = nx.closeness_centrality(graph)
closeness_to_percent = {}
# calculate percentages of nodes with certain closeness value
for node in node_to_closeness:
closeness_to_percent[node_to_closeness[node]] = 1 + \
closeness_to_percent.get(node_to_closeness[node], 0)
for c in closeness_to_percent:
closeness_to_percent[c] = closeness_to_percent[c] / N * 100
x = sorted(closeness_to_percent.keys(), reverse = True)
y = [closeness_to_percent[i] for i in x]
plt.loglog(x, y, 'b-', marker = '.')
plt.title("Closeness Centrality Distribution")
plt.ylabel("Percentage")
plt.xlabel("Closeness value")
plt.axis('tight')
plt.savefig(path)
开发者ID:jillzz,项目名称:protein-interaction,代码行数:25,代码来源:interaction_graph_info.py
示例13: computeLeague
def computeLeague(libSNA, session):
d = nx.degree(libSNA.graph)
c = nx.closeness_centrality(libSNA.graph)
b = nx.betweenness_centrality(libSNA.graph)
ds = sorted_map(d)
cs = sorted_map(c)
bs = sorted_map(b)
weights = [.50, .30, .20]
names1 = [x[0] for x in ds[:10]]
names2 = [x[0] for x in cs[:10]]
names3 = [x[0] for x in bs[:10]]
names = list(set(names1) | set(names2) | set(names3))
names = sorted(names, key = lambda name: (float(d[name])/ds[0][1])*weights[0] + (float(c[name])/cs[0][1])*weights[1] + (float(b[name])/bs[0][1])*weights[2], reverse = True)
result = fbutils.fql(
"SELECT uid, name FROM user WHERE uid IN ( " \
"SELECT uid2 FROM friend WHERE uid1 = me() )",
session['access_token'])
nodes = {}
for node in result:
nodes[str(node['uid'])] = node['name']
return [[name, nodes[name], str(d[name]), str(c[name]), str(b[name])] for name in names]
开发者ID:biancini,项目名称:Rorschach-Test-Platform,代码行数:28,代码来源:networkleague.py
示例14: closeness_neighbors
def closeness_neighbors(seed_num, graph=None, graph_json_filename=None, graph_json_str=None):
if graph_json_filename is None and graph_json_str is None and graph is None:
return []
G = None
if graph is not None:
G = graph
elif graph_json_str is None:
G = util.load_graph(graph_json_filename=graph_json_filename)
else:
G = util.load_graph(graph_json_str=graph_json_str)
clse_cent = nx.get_node_attributes(G, "centrality")
if len(clse_cent) == 0:
clse_cent = nx.closeness_centrality(G)
nx.set_node_attributes(G, "centrality", clse_cent)
print "closeness neighbors"
collector = collections.Counter(clse_cent)
clse_cent = collector.most_common(SURROUND_TOP)
nodes = map(lambda (x, y): x, clse_cent)
current_seed = 0
rtn = []
while current_seed < seed_num:
current_node = nodes[current_seed % len(nodes)]
current_neighbors = G.neighbors(current_node)
rtn += random.sample(set(current_neighbors) - set(rtn) - set(nodes), 1)
current_seed += 1
return rtn
开发者ID:shimmy1996,项目名称:Pandemaniac,代码行数:31,代码来源:closeness_neighbors.py
示例15: centrality_scores
def centrality_scores(vote_matrix, season_graph):
deg = nx.degree(season_graph)
deg = {k: round(v,1) for k,v in deg.iteritems()}
close = nx.closeness_centrality(season_graph)
close = {k: round(v,3) for k,v in close.iteritems()}
btw = nx.betweenness_centrality(season_graph)
btw = {k: round(v,3) for k,v in btw.iteritems()}
eig = nx.eigenvector_centrality_numpy(season_graph)
eig = {k: round(v,3) for k,v in eig.iteritems()}
page = nx.pagerank(season_graph)
page = {k: round(v,3) for k,v in page.iteritems()}
# Add contestant placement (rank)
order = list(vote_matrix.index)
place_num = list(range(len(order)))
place = {order[i]:i+1 for i in place_num}
names = season_graph.nodes()
# Build a table with centralities
table=[[name, deg[name], close[name], btw[name], eig[name], page[name], place[name]] for name in names]
# Convert table to pandas df
headers = ['name', 'deg', 'close', 'btw', 'eig', 'page', 'place']
df = pd.DataFrame(table, columns=headers)
df = df.sort_values(['page', 'eig', 'deg'], ascending=False)
return df
开发者ID:bchugit,项目名称:Survivor-Project,代码行数:32,代码来源:network.py
示例16: run_main
def run_main(file):
NumberOfStations=465
print file
adjmatrix = np.loadtxt(file,delimiter=' ',dtype=np.dtype('int32'))
# for i in range (0,NumberOfStations):
# if(adjmatrix[i,i]==1):
# print "posicion: ["+str(i)+","+str(i)+"]"
g = nx.from_numpy_matrix(adjmatrix, create_using = nx.MultiGraph())
degree = g.degree()
density = nx.density(g)
degree_centrality = nx.degree_centrality(g)
clossness_centrality = nx.closeness_centrality(g)
betweenless_centrality = nx.betweenness_centrality(g)
print degree
print density
print degree_centrality
print clossness_centrality
print betweenless_centrality
#nx.draw(g)
# np.savetxt(OutputFile, Matrix, delimiter=' ',newline='\n',fmt='%i')
开发者ID:Joan93,项目名称:BigData,代码行数:25,代码来源:AdjMatrix_Analisys.py
示例17: generate_seeds
def generate_seeds(num_players, num_seeds, G):
# Initialize see array to zeros
seeds = np.zeros(num_seeds, dtype=np.int)
neighbors = nx.to_dict_of_lists(G).values()
m = nx.closeness_centrality(G)
centralities = m.values()
degrees = np.zeros(len(neighbors), dtype=np.int) # tuple of (node_id, degree)
for i in range(len(neighbors)):
degrees[i] = len(neighbors[i])
scores = [(None, None)] * len(neighbors)
degree_max = max(degrees)
cent_max = max(centralities)
for i in range(len(neighbors)):
norm_degree = float(degrees[i]) / degree_max
norm_cent = float(centralities[i]) / cent_max
scores[i] = (i, norm_degree * DEGREE_WEIGHT + norm_cent * CENT_WEIGHT)
sorted_scores = sorted(scores, key=itemgetter(1), reverse=True)
for i in range(num_seeds):
seeds[i] = sorted_scores[i][0]
return seeds
开发者ID:jordanbonilla,项目名称:pandemaniac,代码行数:26,代码来源:central_and_degree.py
示例18: test_networkx_roundtrip
def test_networkx_roundtrip(self):
print("\n---------- NetworkX Data Roundtrip Test Start -----------\n")
g = nx.newman_watts_strogatz_graph(100, 3, 0.5)
nodes = g.nodes()
edges = g.edges()
# Add some attributes
g.graph["name"] = "original"
g.graph["density"] = nx.density(g)
nx.set_node_attributes(g, "betweenness", nx.betweenness_centrality(g))
nx.set_node_attributes(g, "degree", nx.degree(g))
nx.set_node_attributes(g, "closeness", nx.closeness_centrality(g))
nx.set_edge_attributes(g, "eb", nx.edge_betweenness(g))
cyjs1 = util.from_networkx(g)
g2 = util.to_networkx(cyjs1)
self.assertEqual(len(g2.nodes()), len(nodes))
self.assertEqual(len(g2.edges()), len(edges))
edge_set = set(list(map(lambda x: (int(x[0]), int(x[1])), g2.edges())))
self.assertEqual(0, len(edge_set.difference(set(edges))))
node_original = g.node[1]
node_generated = g2.node["1"]
print(node_original)
print(node_generated)
self.assertEqual(node_original["degree"], node_generated["degree"])
self.assertEqual(node_original["betweenness"], node_generated["betweenness"])
self.assertEqual(node_original["closeness"], node_generated["closeness"])
开发者ID:scholer,项目名称:py2cytoscape,代码行数:35,代码来源:test_util.py
示例19: closeness_centrality_report
def closeness_centrality_report(graph, n):
""" reports on the top n most central individuals on the graph """
pr = nx.closeness_centrality(graph)
nodes = sorted(pr.items(), key=operator.itemgetter(1), reverse=True)[:n]
print("degree centrality - top {} individuals".format(n))
for n in nodes:
print(" {:30}:\t{}".format(n[0], n[1]))
开发者ID:csrhau,项目名称:sandpit,代码行数:7,代码来源:networker.py
示例20: closeness_centrality
def closeness_centrality(graph, outfile, records=10):
""" Perform a closeness centrality analysis on graph """
ranking = nx.closeness_centrality(graph)
ordering = sorted(ranking.items(), key=operator.itemgetter(1), reverse=True)[:records]
print("Employee,Degree Centrality", file=outfile)
for employee, rank in ordering:
print("{},{}".format(employee, rank), file=outfile)
开发者ID:csrhau,项目名称:sandpit,代码行数:7,代码来源:graph_summary.py
注:本文中的networkx.closeness_centrality函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论