• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Python networkx.number_of_nodes函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中networkx.number_of_nodes函数的典型用法代码示例。如果您正苦于以下问题:Python number_of_nodes函数的具体用法?Python number_of_nodes怎么用?Python number_of_nodes使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了number_of_nodes函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: eccentricityAttributes

def eccentricityAttributes(graph):
	return_values = []
	#Average effective eccentricity
	eccVals = []
	e = 0
	for n in graph.nodes():
		try: 
			eccVals.append(nx.eccentricity(graph, v=n))	
		except nx.NetworkXError:
			eccVals.append(0)
	eccSum = 0
	center_nodes = 0
	phobic = 0
	diameter = max(eccVals)
	radius = min(eccVals)
	for i in range(len(eccVals)):
		if eccVals[i] ==  radius:
			center_nodes += 1
			if graph.node[i]['hydro'] == 'phobic':
				phobic += 1
		eccSum += eccVals[i]
	return_values.append(eccSum / float(nx.number_of_nodes(graph)))	
	#Effective diameter
	return_values.append(diameter)
	#Effective radius
	return_values.append(radius)
	#Percentage central nodes
	return_values.append(center_nodes / float(nx.number_of_nodes(graph)))
	#Percentage central nodes that are hydrophobic
	return_values.append(phobic / float(center_nodes))
	return return_values
开发者ID:hlhendy,项目名称:Protein-Structure-Prediction-ML,代码行数:31,代码来源:GraphAttributes.py


示例2: bfs

    def bfs(self, Asmall, start, path=True):
        queue = [start]
        chain = []
        extra_nodes_search = np.ones((nx.number_of_nodes(self.graph),), dtype=np.int)
        node_list = -1 * np.ones((nx.number_of_nodes(self.graph),), dtype=np.int)
        node_list[start] = start

        while queue:
            neighbors = np.array(np.nonzero(Asmall[queue[0], :])[1])  # get neighbors as numpy array
            for i in range(0, np.size(neighbors)):
                if node_list[neighbors[i]] == -1:  # Simon's computer dies here
                    node_list[neighbors[i]] = queue[0]
                    queue.append(neighbors[i])
            qsize = len(queue)
            furthest_node = queue[qsize - 1]
            queue = queue[1:]
        if path:
            curr = furthest_node
            while curr != start:
                chain.append(curr)
                extra_nodes_search[curr] = 0
                curr = node_list[curr]
            chain.append(start)
            chain = list(reversed(chain))
            extra_nodes_search[start] = 0

        return furthest_node, chain
开发者ID:nanddalal,项目名称:VoG_Graph_Summarization,代码行数:27,代码来源:structures.py


示例3: RWall2

def RWall2(G,p):
    check = False
    while not check:
        count = 0
        G1 = nx.Graph()
        n = int(p*nx.number_of_nodes(G))
        start_id = random.randint(0,nx.number_of_nodes(G)-1)
        s_node = nx.nodes(G)[start_id]
        G1.add_node(s_node)

        now_node = s_node
        while True:
            neighbor_list = G.neighbors(now_node)
            next_id = random.randint(0,len(neighbor_list)-1)
            next_node = neighbor_list[next_id]

            G1.add_node(next_node)
            count += 1

            if random.random() < 0:
                next_id = random.randint(0,nx.number_of_nodes(G1)-1)
                next_node = G1.nodes()[next_id]
            now_node = next_node
            if nx.number_of_nodes(G1) >= n:
                check = True
                break
            if count > 50*n:
                break

    return NodeConnect(G,G1)
开发者ID:iwaken71,项目名称:sampling,代码行数:30,代码来源:sampling.py


示例4: allRW

def allRW(G,p):
	if p > 1:
		p = 1
	n = int(p*nx.number_of_nodes(G))
	G1 = nx.Graph()
	while nx.number_of_nodes(G1) < n:
		count = 0
		start_id = random.randint(0,nx.number_of_nodes(G)-1)
		s_node = nx.nodes(G)[start_id]
		now_node = s_node
		while count < 5*n:
            #print(now_node)
			neighber_list = G.neighbors(now_node)
		        for node in neighber_list:
                                G1.add_edge(node,now_node)
                        next_id = random.randint(0,len(neighber_list)-1)
			next_node = neighber_list[next_id]
		#	G1.add_edge(now_node,next_node)
			count += 1

			now_node = next_node
			if nx.number_of_nodes(G1) >= n:
				break
        #print(now_node)
        #print(G.neighbors(now_node))

	return G1
开发者ID:iwaken71,项目名称:sampling,代码行数:27,代码来源:sampling.py


示例5: BFS

def BFS(G,p):
	if p > 1:
		p = 1
	n = int(p*nx.number_of_nodes(G))
       # print(n)
        G1 = nx.Graph()
        check = False
        while nx.number_of_nodes(G1) < n:
                process = []
                start_id = random.randint(0,nx.number_of_nodes(G)-1)
                s_node = nx.nodes(G)[start_id]
                process.append(s_node)
               ######### print(process)
                while True:
                        now_node = process[0]
                        neighbor = G.neighbors(now_node)
                        for next_node in neighbor:
                                if next_node not in G1.nodes():
                                        G1.add_node(now_node)
                                        G1.add_node(next_node)
                                        process.append(next_node)
                                else:
                                        G1.add_node(now_node)
                                        G1.add_node(next_node)

                        process.remove(now_node)
                        if nx.number_of_nodes(G1)>=n:
                                check = True
                                break
                        if len(process) == 0:
                                break
                if check:
                        break
        return NodeConnect(G,G1)
开发者ID:iwaken71,项目名称:sampling,代码行数:34,代码来源:sampling.py


示例6: init

def init():
    global projectname
    global version_aray
    global pos
    global x
    global y
    global size_array
    global numframes
    global sg
    for i in range(6):
        data_directory = projectname + "_history/" + projectname + version_array[i] + "/" + projectname
        [g, lines] = creategraph.readfile(data_directory)
        if i == 0:
            sg = creategraph.refine(g, 45)
            [pos, x, y] = creategraph.coordinate(sg)
            size = creategraph.point_sizes(sg, lines)
            zeros = np.array([0] * len(size))
            print 'len(size) = ', len(size)
            print 'zeros = ', zeros
            size_array.append(zeros)
            size_array.append(size)
        else:
            # create the graph induced by nodes from sg
            subg = nx.subgraph(g, nx.nodes(sg))
            print subg, sg
            if nx.number_of_nodes(subg) != nx.number_of_nodes(sg):
                print 'panic at 34' 
            else: #                            v  this seems to be a error, but not
                size = creategraph.point_sizes(sg, lines)
                size_array.append(size)


    x = np.array(x)
    y = np.array(y)
    size_array = np.array(size_array)
开发者ID:plumer,项目名称:codana,代码行数:35,代码来源:animate.py


示例7: algorithm

def algorithm(w1,w2,w3,w4,G1,G2,G3,G4):
	try:
		cc=np.array([nx.average_clustering(G1,weight='weight'),nx.average_clustering(G2,weight='weight'),nx.average_clustering(G3,weight='weight'),nx.average_clustering(G4,weight='weight')])
		spl=np.array([nx.average_shortest_path_length(G1,weight='weight'),nx.average_shortest_path_length(G2,weight='weight'),nx.average_shortest_path_length(G3,weight='weight'),nx.average_shortest_path_length(G4,weight='weight')])
		nds=np.array([nx.number_of_nodes(G1),nx.number_of_nodes(G2),nx.number_of_nodes(G3),nx.number_of_nodes(G4)])
		edgs= np.array([nx.number_of_edges(G1),nx.number_of_edges(G2),nx.number_of_edges(G3),nx.number_of_edges(G4)])
		if valid(cc):
			cc=stats.zscore(cc)
		else:
			cc=np.array([.1,.1,.1,.1])
		cc= cc-min(cc)+.1
		if valid(spl):
			spl=stats.zscore(spl)
		else:
			spl=np.array([.1,.1,.1,.1])
		spl= spl-min(spl)+.1
		if valid(nds):
			nds=stats.zscore(nds)
		else:
			nds=np.array([.1,.1,.1,.1])
		nds = nds-min(nds)+.1
		if valid(edgs):
			edgs=stats.zscore(edgs)
		else:
			edgs=np.array([.1,.1,.1,.1])
		edgs=edgs-min(edgs)+.1
		r1=(w1*cc[0]+w2*spl[0]+w3*nds[0]+w4*edgs[0])*1000
		r2=(w1*cc[1]+w2*spl[1]+w3*nds[1]+w4*edgs[1])*1000
		r3=(w1*cc[2]+w2*spl[2]+w3*nds[2]+w4*edgs[2])*1000
		r4=(w1*cc[3]+w2*spl[3]+w3*nds[3]+w4*edgs[3])*1000
		d={'Player 1:': r1, 'Player 2:': r2,'Player 3:': r3, 'Player 4:': r4}
		rank = sorted(d.items(), key=lambda x: x[1], reverse=True)
		return ["USAU RANKINGS",str(rank[0][0])+ " " + str(int(rank[0][1])),str(rank[1][0])+" "+ str(int(rank[1][1])),str(rank[2][0])+" "+ str(int(rank[2][1])),str(rank[3][0])+" "+str(int(rank[3][1]))]
	except:
		return ["Unable to compute rankings!  Need data","Player 1","Player 2","Player 3","Player 4"]
开发者ID:dagley11,项目名称:Garuda_Game,代码行数:35,代码来源:Graph.py


示例8: main

def main():
    timeStart = time.time()
    if rank == 0:
        proc = random.sample(range(1, nx.number_of_nodes(G)), size)
        for i in range(1, size):
            comm.send(proc[i], dest=i)
        starting_node = proc[0]
        if check_neighbours(starting_node, None):
            print "Graph is hamiltonian! (process", rank, "starting node", starting_node,")"
        else:
            print "Graph is not hamiltonian (process", rank, "starting node", starting_node,")"
        timeEnd = time.time() - timeStart
        print timeEnd
        comm.Abort()
    elif rank < nx.number_of_nodes(G):
        starting_node = comm.recv(source=0)
        if check_neighbours(starting_node, None):
            print "Graph is hamiltonian! (process", rank, "starting node", starting_node,")"
        else:
            print "Graph is not hamiltonian (process", rank, "starting node", starting_node,")"
        timeEnd = time.time() - timeStart
        print timeEnd
        comm.Abort()
    else:
        MPI.Finalize()        
开发者ID:cmarok,项目名称:Hamiltonian-Completion,代码行数:25,代码来源:parallel_starts.py


示例9: reduceGraph

def reduceGraph(read_g, write_g, minEdgeWeight, minNodeDegree, Lp, Sp):
    """
    Simplify the undirected graph and then update the 3 undirected weight properties.
    :param read_g: is the graph pickle to read
    :param write_g: is the updated graph pickle to write
    :param minEdgeWeight: the original weight of each edge should be >= minEdgeWeight
    :param minNodeDegree: the degree of each node should be >= minNodeDegree. the degree here is G.degree(node), NOT G.degree(node,weight='weight)
    :return: None
    """
    G=nx.read_gpickle(read_g)
    print 'number of original nodes: ', nx.number_of_nodes(G)
    print 'number of original edges: ', nx.number_of_edges(G)

    for (u,v,w) in G.edges(data='weight'):
        if w < minEdgeWeight:
            G.remove_edge(u,v)

    for n in G.nodes():
        if G.degree(n)<minNodeDegree:
            G.remove_node(n)

    print 'number of new nodes: ', nx.number_of_nodes(G)
    print 'number of new edges: ', nx.number_of_edges(G)

    for (a, b, w) in G.edges_iter(data='weight'):
        unweight_allocation(G, a, b, w,Lp,Sp)

    print 'update weight ok'
    nx.write_gpickle(G, write_g)

    return
开发者ID:FengShi0705,项目名称:webapp,代码行数:31,代码来源:main.py


示例10: run_main

def run_main():
    file = str(sys.argv[1])
    f = open(file, 'r')
    print "\nReading inputfile:", file, "..."
    
    edgelist = []
    for line in f.readlines():
        edgelist.append((int(line.split()[0]), int(line.split()[1])))
    
    
    Directed_G = nx.DiGraph(edgelist)
    Undirected_G = Directed_G.to_undirected()
    #plt.figure(figsize=(8,8))
    #nx.draw(Directed_G,pos=nx.spring_layout(Directed_G))
    #plt.draw()
    #time.sleep(0.1)

    # compute other things
    print "Number of nodes involved in network:", nx.number_of_nodes(Undirected_G)
    print "Number of edges:", nx.number_of_edges(Undirected_G)
    print "Average degree:", nx.number_of_edges(Undirected_G) / float(nx.number_of_nodes(Undirected_G))
    t0 = time.clock()
    print "Average clustering coefficient:", compute_clustering_coefficient(Directed_G, Undirected_G)
    print "Took:", time.clock() - t0, "seconds"
    t1 = time.clock()
    print "Average path length:", average_shortest_path(Directed_G, Undirected_G)
    print "Took:", time.clock() - t1, "seconds"
    print "Total time:", time.clock() - t0, "seconds"
           
    report_final_stats()
    counter += 1
    second_counter += 1
开发者ID:kryczko,项目名称:twitterexp,代码行数:32,代码来源:analyze_network.py


示例11: attacco

def attacco(compagnia):
    adiacenzaFinal = numpy.genfromtxt(
        ("/home/protoss/Documenti/Siscomp_datas/data/AdiacenzaEuclidea_{0}.csv".format(compagnia)),
        delimiter=",",
        dtype="int",
    )
    grafoFinal = networkx.Graph(adiacenzaFinal)

    graphSize = networkx.number_of_nodes(grafoFinal)
    steps = graphSize
    passo = 1
    i = 0
    ascisse.append(i)
    aziendaFinal.append(compagnia)
    diametro.append(2)
    relSizeGC.append(1)

    while networkx.number_of_nodes(grafoFinal) > passo:
        gradiFinal = pandas.DataFrame(grafoFinal.degree().items(), columns=["index", "grado"])
        gradiFinal.sort(["grado"], ascending=[False], inplace=True)
        sortedIDnode = gradiFinal["index"].values

        grafoFinal.remove_node(sortedIDnode[0])

        giantCluster = max(networkx.connected_component_subgraphs(grafoFinal), key=len)

        i += 100 / float(steps)
        ascisse.append(i)
        aziendaFinal.append(compagnia)

        newGraphSize = networkx.number_of_nodes(grafoFinal)
        #        diametro.append(networkx.diameter(giantCluster, e=None))
        relSizeGC.append((networkx.number_of_nodes(giantCluster)) / (float(newGraphSize)))
开发者ID:FedericoMuciaccia,项目名称:SistemiComplessi,代码行数:33,代码来源:Iuri.py


示例12: get_global_efficiency

def get_global_efficiency(filename):
    import networkx as nx

    threshold = 0
    f = open(filename[:-4] + "_global_efficiency.dat", "w")
    g = open(filename[:-4] + "_node_global_efficiency.dat", "w")
    for i in range(0, 101):
        threshold = float(i) / 100
        G = get_threshold_matrix(filename, threshold)
        global_efficiency = 0.0
        for node_i in G:
            sum_inverse_dist = 0.0
            for node_j in G:
                if node_i != node_j:
                    if nx.has_path(G, node_i, node_j) == True:
                        sum_inverse_dist += 1.0 / nx.shortest_path_length(G, node_i, node_j)
            g.write("%d\t%f\t%f\n" % ((node_i + 1), threshold, (sum_inverse_dist / nx.number_of_nodes(G))))
            global_efficiency += sum_inverse_dist / (nx.number_of_nodes(G) - 1.0)
        g.write("\n")
        global_efficiency = global_efficiency / nx.number_of_nodes(G)
        f.write("%f\t%f\n" % (threshold, global_efficiency))
        print "global efficiency for threshold %f: %f " % (threshold, global_efficiency)

    f.close()
    g.close()
开发者ID:rudimeier,项目名称:MSc_Thesis,代码行数:25,代码来源:threshold_matrix.py


示例13: failure

def failure(compagnia):
    adiacenzaFinal = numpy.genfromtxt(("/home/protoss/Documenti/Siscomp_datas/data/AdiacenzaEuclidea_{0}.csv".format(compagnia)),delimiter=',',dtype='int')
    grafoFinal = networkx.Graph(adiacenzaFinal)

    graphSize = networkx.number_of_nodes(grafoFinal)
    steps = graphSize
    passo = 1
    i = 0
    ascisse.append(i)
    aziendaFinal.append(compagnia)
    diametro.append(2)
    relSizeGC.append(1)
    
    while (networkx.number_of_nodes(grafoFinal) > passo):
        gradiFinal = pandas.DataFrame(grafoFinal.degree().items(), columns=['index', 'grado'])
        randomante = gradiFinal['index'].values
        randomante = numpy.random.permutation(randomante)

        grafoFinal.remove_node(randomante[0])
    
        giantCluster = max(networkx.connected_component_subgraphs(grafoFinal), key = len)
                            
        i += 100/steps
        ascisse.append(i)
        aziendaFinal.append(compagnia)

        graphSize = networkx.number_of_nodes(grafoFinal)
        diametro.append(networkx.diameter(giantCluster, e=None))
        relSizeGC.append((networkx.number_of_nodes(giantCluster))/(float(graphSize)))
开发者ID:FedericoMuciaccia,项目名称:SistemiComplessi,代码行数:29,代码来源:Attack+&+Failure.py


示例14: get_my_degree_distribution

def get_my_degree_distribution(filename) :
	threshold = 0
	f = open(filename[:-4]+'_degree_distr.dat','w')
	print(f)
	for i in range(0,101) :
		threshold = float(i)/100
		G = get_my_threshold_matrix(filename,threshold)
		check_sum = 0
		degree_hist = {}
		for node in G :
			if G.degree(node) not in degree_hist :		
				degree_hist[G.degree(node)] = 1
			else :
				degree_hist[G.degree(node)] += 1
		keys = degree_hist.keys()
		keys.sort()
		degrees = range(0, nx.number_of_nodes(G)+1 , 1) #?
		for item in degrees :
			if item in keys :
				prob = float(degree_hist[item])/float(nx.number_of_nodes(G))
				check_sum += prob
				f.write('%d\t%f\t%d\t%f\n'%(item, threshold, degree_hist[item], prob))
			else :
				f.write('%d\t%f\t0\t0.\n' % (item, threshold))
    		f.write("\n")
    	print 'degree distr of threshold: %f, check sum: %f' % (threshold, check_sum)
	f.close()
开发者ID:sheyma,项目名称:lab_rot_berlin,代码行数:27,代码来源:python_begin_03.py


示例15: nc_recursive

def nc_recursive(node, G, ind):
	if networkx.number_of_nodes(G) < 3:
		n = Node(None)
		n.add_child(Node(ind[G.nodes()[0]]))
		n.add_child(Node(ind[G.nodes()[1]]))
		node.add_child(n)
	else:
		C =  normalized_cut(G)
#		print(C)
	
		(G1, G2) = get_subgraphs(G, C)

		if networkx.number_of_nodes(G1) > 1:
			l = Node(None)
			nc_recursive(l, G1, ind)
			node.add_child(l)
		else:
			l = Node(ind[G1.nodes()[0]])
			node.add_child(l)
	
#		print(C)
#		print("P1 = ")
#		print(P1)
#		print("P2 = ")
#		print(P2)

		if networkx.number_of_nodes(G2) > 1:
			r = Node(None)
			nc_recursive(r, G2, ind)
			node.add_child(r)
		else:
			r = Node(ind[G2.nodes()[0]])
			node.add_child(r)
开发者ID:arleilps,项目名称:network-process-discovery,代码行数:33,代码来源:graph_signal_proc.py


示例16: get_single_network_measures

def get_single_network_measures(G, thr):
	f = open(out_prfx + 'single_network_measures.dat', 'a')
	N = nx.number_of_nodes(G)
	L = nx.number_of_edges(G)
	D = nx.density(G)
	cc = nx.average_clustering(G)
	compon = nx.number_connected_components(G)
	Con_sub = nx.connected_component_subgraphs(G)

	values = []
	values_2 =[]

	for node in G:
		values.append(G.degree(node))
	ave_deg = float(sum(values)) / float(N)
	
	f.write("%f\t%d\t%f\t%f\t%f\t%f\t" % (thr, L, D, cc, ave_deg, compon))
	#1. threshold, 2. edges, 3. density 4.clustering coefficient
	#5. average degree, 6. number of connected components
	
	for i in range(len(Con_sub)):
		if nx.number_of_nodes(Con_sub[i])>1:
			values_2.append(nx.average_shortest_path_length(Con_sub[i]))

	if len(values_2)==0:
		f.write("0.\n")
	else:
		f.write("%f\n" % (sum(values_2)/len(values_2)))
	#7. shortest pathway
	f.close()
开发者ID:rudimeier,项目名称:MSc_Thesis,代码行数:30,代码来源:sb_randomization.py


示例17: create_graph_of_agent

def create_graph_of_agent(agent):
    # takes a string as input and outputs a Networkx graph object
    (nodes, edges) = parse(agent)
    G = nx.MultiDiGraph()
    G.add_nodes_from(nodes)
    G.add_edges_from(edges)
    active_nodes = [1]
    next_nodes = []
    # print nx.number_of_nodes(G)

    for i in range(nx.number_of_nodes(G)):
        
        active_nodes = list(set(active_nodes + next_nodes))
        next_nodes = []

        for node in active_nodes:          
            next_nodes += G.successors(node) #0 is predecessors, 1 is successors
            #print "next_nodes ",next_nodes
    #print active_nodes
    #active_nodes=list(set(sorted(active_nodes)))
    
    #print active_nodes
    #print nodes
    #print edges
    
    
    extra_nodes = sorted(list(set(range(1, nx.number_of_nodes(G) +1)) 
                       - set(active_nodes)), reverse=True)
    #print extra_nodes
    for i in extra_nodes:
        G.remove_node(i)
        #print nx.number_of_nodes(G)
    
    return G
开发者ID:Frybo,项目名称:msc-dissertation,代码行数:34,代码来源:IPD_simulation.py


示例18: fast_search

def fast_search(G, F, k, n, ind):
	start = numpy.ones(networkx.number_of_nodes(G))
	C = laplacian_complete(networkx.number_of_nodes(G))
	A = weighted_adjacency_complete(G, F, ind)
	CAC = fast_cac(G, F, ind)

	return simple_spectral_cut(CAC, start, F, G, 1., k, n, ind)
开发者ID:arleilps,项目名称:network-process-discovery,代码行数:7,代码来源:optimal_cut.py


示例19: get_degree_distr

def get_degree_distr(filename):
  import networkx as nx
  threshold = 0
  f = open(filename[:-4]+'_degreedistr.dat','w')
  for i in range(0,101):
    threshold = float(i)/100
    G = get_threshold_matrix(filename, threshold)
    check_sum = 0.
    degree_hist = {}
    for node in G:
      if G.degree(node) not in degree_hist:
	degree_hist[G.degree(node)] = 1
      else:
	degree_hist[G.degree(node)] += 1
    degrees = range(0, nx.number_of_nodes(G)+1, 1)
    keys = degree_hist.keys()
    keys.sort()
    for item in degrees:
      if item in keys:
        check_sum += float(degree_hist[item])/float(nx.number_of_nodes(G))
        f.write('%d\t%f\t%d\t%f\n' % (item, threshold, degree_hist[item], float(degree_hist[item])/float(nx.number_of_nodes(G))))
        #print item, degree_hist[item], float(degree_hist[item])/float(nx.number_of_nodes(G)))
      else:
        f.write('%d\t%f\t0\t0.\n' % (item, threshold))
    f.write("\n")
    print 'degree distribution for threshold: %f, check sum: %f' % (threshold, check_sum)
  f.close()
开发者ID:sheyma,项目名称:lab_rot_berlin,代码行数:27,代码来源:threshold_matrix.py


示例20: get_my_global_efficiency

def get_my_global_efficiency(filename) :
	threshold = 0
	f = open(filename[:-4]+'_global_efficiency.dat','w')
	g = open(filename[:-4]+'_node_global_efficiency.dat','w')
	print f
	print g
	f.write('threshold\tglob_effic\n')
	g.write('node\tthreshold\tnode_glob_effc\n')	
	for i in range(0,101):
		threshold = float(i)/100
		G = get_my_threshold_matrix(filename, threshold)
		global_efficiency = 0.
		for node_i in G :
			sum_inverse_dist = 0.
			for node_j in G :
				if node_i != node_j :
					if nx.has_path(G, node_i, node_j) == True :
						sum_inverse_dist += 1. / nx.shortest_path_length(G, node_i, node_j) 
			g.write('%d\t%f\t%f\n' % ((node_i+1), threshold, (sum_inverse_dist / nx.number_of_nodes(G)) )) ##?
			global_efficiency += sum_inverse_dist / (nx.number_of_nodes(G) -1.)
		g.write("\n")
		global_efficiency = global_efficiency / nx.number_of_nodes(G)
		f.write("%f\t%f\n" % (threshold, global_efficiency))
	f.close()
	g.close()
开发者ID:sheyma,项目名称:lab_rot_berlin,代码行数:25,代码来源:python_begin_03.py



注:本文中的networkx.number_of_nodes函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python networkx.pagerank函数代码示例发布时间:2022-05-27
下一篇:
Python networkx.number_of_edges函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap