summaryrefslogtreecommitdiff
path: root/networkx
diff options
context:
space:
mode:
authorJarrod Millman <jarrod.millman@gmail.com>2018-01-19 15:11:00 -0800
committerJarrod Millman <jarrod.millman@gmail.com>2018-01-20 01:27:28 -0800
commit7e63811866e86e87e244790b5310098737c22ec4 (patch)
treeb8f6c3c711f72e7dd0b96312116c1ebba0899ad7 /networkx
parentea3a8826385f543775e2c6b147cbffcd289527da (diff)
downloadnetworkx-7e63811866e86e87e244790b5310098737c22ec4.tar.gz
Pep8 fixes
Diffstat (limited to 'networkx')
-rw-r--r--networkx/algorithms/__init__.py4
-rw-r--r--networkx/algorithms/approximation/clique.py1
-rw-r--r--networkx/algorithms/approximation/clustering_coefficient.py1
-rw-r--r--networkx/algorithms/approximation/connectivity.py27
-rw-r--r--networkx/algorithms/approximation/ramsey.py1
-rw-r--r--networkx/algorithms/approximation/tests/test_approx_clust_coeff.py20
-rw-r--r--networkx/algorithms/approximation/tests/test_connectivity.py52
-rw-r--r--networkx/algorithms/approximation/tests/test_independent_set.py3
-rw-r--r--networkx/algorithms/approximation/tests/test_kcomponents.py128
-rw-r--r--networkx/algorithms/approximation/tests/test_matching.py3
-rw-r--r--networkx/algorithms/approximation/tests/test_ramsey.py1
-rw-r--r--networkx/algorithms/assortativity/connectivity.py1
-rw-r--r--networkx/algorithms/assortativity/correlation.py65
-rw-r--r--networkx/algorithms/assortativity/neighbor_degree.py20
-rw-r--r--networkx/algorithms/assortativity/pairs.py33
-rw-r--r--networkx/algorithms/assortativity/tests/base_test.py79
-rw-r--r--networkx/algorithms/assortativity/tests/test_connectivity.py144
-rw-r--r--networkx/algorithms/assortativity/tests/test_correlation.py80
-rw-r--r--networkx/algorithms/assortativity/tests/test_mixing.py239
-rw-r--r--networkx/algorithms/assortativity/tests/test_neighbor_degree.py104
-rw-r--r--networkx/algorithms/assortativity/tests/test_pairs.py161
-rw-r--r--networkx/algorithms/bipartite/basic.py62
-rw-r--r--networkx/algorithms/bipartite/cluster.py79
-rw-r--r--networkx/algorithms/bipartite/covering.py2
-rw-r--r--networkx/algorithms/bipartite/edgelist.py82
-rw-r--r--networkx/algorithms/bipartite/projection.py1
-rw-r--r--networkx/algorithms/bipartite/spectral.py8
-rw-r--r--networkx/algorithms/bipartite/tests/test_basic.py112
-rw-r--r--networkx/algorithms/bipartite/tests/test_centrality.py223
-rw-r--r--networkx/algorithms/bipartite/tests/test_cluster.py61
-rw-r--r--networkx/algorithms/bipartite/tests/test_edgelist.py174
-rw-r--r--networkx/algorithms/bipartite/tests/test_generators.py202
-rw-r--r--networkx/algorithms/bipartite/tests/test_matching.py4
-rw-r--r--networkx/algorithms/bipartite/tests/test_matrix.py68
-rw-r--r--networkx/algorithms/bipartite/tests/test_project.py533
-rw-r--r--networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py119
-rw-r--r--networkx/algorithms/centrality/betweenness_subset.py4
-rw-r--r--networkx/algorithms/centrality/degree_alg.py12
-rw-r--r--networkx/algorithms/centrality/flow_matrix.py14
-rw-r--r--networkx/algorithms/centrality/harmonic.py2
-rw-r--r--networkx/algorithms/centrality/katz.py12
-rw-r--r--networkx/algorithms/centrality/load.py4
-rw-r--r--networkx/algorithms/centrality/reaching.py4
-rw-r--r--networkx/algorithms/centrality/tests/test_betweenness_centrality.py564
-rw-r--r--networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py14
-rw-r--r--networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py209
-rw-r--r--networkx/algorithms/centrality/tests/test_current_flow_closeness.py33
-rw-r--r--networkx/algorithms/centrality/tests/test_degree_centrality.py85
-rw-r--r--networkx/algorithms/centrality/tests/test_eigenvector_centrality.py130
-rw-r--r--networkx/algorithms/centrality/tests/test_harmonic_centrality.py32
-rw-r--r--networkx/algorithms/centrality/tests/test_katz_centrality.py6
-rw-r--r--networkx/algorithms/centrality/tests/test_subgraph.py77
-rw-r--r--networkx/algorithms/coloring/tests/test_coloring.py96
-rw-r--r--networkx/algorithms/community/community_generators.py12
-rw-r--r--networkx/algorithms/community/kclique.py6
-rw-r--r--networkx/algorithms/community/label_propagation.py2
-rw-r--r--networkx/algorithms/community/tests/test_asyn_fluidc.py2
-rw-r--r--networkx/algorithms/community/tests/test_centrality.py3
-rw-r--r--networkx/algorithms/community/tests/test_label_propagation.py2
-rw-r--r--networkx/algorithms/community/tests/test_quality.py6
-rw-r--r--networkx/algorithms/components/tests/test_attracting.py2
-rw-r--r--networkx/algorithms/components/tests/test_biconnected.py51
-rw-r--r--networkx/algorithms/components/tests/test_connected.py16
-rw-r--r--networkx/algorithms/components/tests/test_semiconnected.py1
-rw-r--r--networkx/algorithms/components/tests/test_strongly_connected.py13
-rw-r--r--networkx/algorithms/components/tests/test_subgraph_copies.py4
-rw-r--r--networkx/algorithms/components/tests/test_weakly_connected.py26
-rw-r--r--networkx/algorithms/connectivity/__init__.py2
-rw-r--r--networkx/algorithms/connectivity/cuts.py11
-rw-r--r--networkx/algorithms/connectivity/disjoint_paths.py6
-rw-r--r--networkx/algorithms/connectivity/kcutsets.py21
-rw-r--r--networkx/algorithms/connectivity/tests/test_connectivity.py64
-rw-r--r--networkx/algorithms/connectivity/tests/test_cuts.py47
-rw-r--r--networkx/algorithms/connectivity/tests/test_disjoint_paths.py6
-rw-r--r--networkx/algorithms/connectivity/tests/test_edge_augmentation.py2
-rw-r--r--networkx/algorithms/connectivity/tests/test_edge_kcomponents.py4
-rw-r--r--networkx/algorithms/connectivity/tests/test_stoer_wagner.py32
-rw-r--r--networkx/algorithms/connectivity/utils.py2
-rw-r--r--networkx/algorithms/covering.py2
-rw-r--r--networkx/algorithms/cycles.py4
-rw-r--r--networkx/algorithms/dag.py2
-rw-r--r--networkx/algorithms/distance_measures.py4
-rw-r--r--networkx/algorithms/flow/capacityscaling.py3
-rw-r--r--networkx/algorithms/flow/gomory_hu.py10
-rw-r--r--networkx/algorithms/flow/mincost.py30
-rw-r--r--networkx/algorithms/flow/networksimplex.py3
-rw-r--r--networkx/algorithms/flow/tests/test_gomory_hu.py1
-rw-r--r--networkx/algorithms/flow/tests/test_maxflow.py185
-rw-r--r--networkx/algorithms/flow/tests/test_maxflow_large_graph.py39
-rw-r--r--networkx/algorithms/flow/tests/test_mincost.py151
-rw-r--r--networkx/algorithms/isomorphism/__init__.py1
-rw-r--r--networkx/algorithms/isomorphism/isomorph.py52
-rw-r--r--networkx/algorithms/isomorphism/isomorphvf2.py92
-rw-r--r--networkx/algorithms/isomorphism/temporalisomorphvf2.py3
-rw-r--r--networkx/algorithms/isomorphism/tests/test_isomorphism.py33
-rw-r--r--networkx/algorithms/isomorphism/tests/test_match_helpers.py14
-rw-r--r--networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py14
-rw-r--r--networkx/algorithms/isomorphism/tests/test_vf2userfunc.py54
-rw-r--r--networkx/algorithms/isomorphism/vf2userfunc.py14
-rw-r--r--networkx/algorithms/link_analysis/pagerank_alg.py6
-rw-r--r--networkx/algorithms/link_analysis/tests/test_hits.py75
-rw-r--r--networkx/algorithms/link_analysis/tests/test_pagerank.py11
-rw-r--r--networkx/algorithms/lowest_common_ancestors.py4
-rw-r--r--networkx/algorithms/matching.py12
-rw-r--r--networkx/algorithms/minors.py1
-rw-r--r--networkx/algorithms/mis.py4
-rw-r--r--networkx/algorithms/operators/tests/test_all.py195
-rw-r--r--networkx/algorithms/operators/tests/test_binary.py296
-rw-r--r--networkx/algorithms/operators/tests/test_unary.py59
-rw-r--r--networkx/algorithms/operators/unary.py4
-rw-r--r--networkx/algorithms/shortest_paths/__init__.py1
-rw-r--r--networkx/algorithms/shortest_paths/generic.py15
-rw-r--r--networkx/algorithms/shortest_paths/tests/test_dense.py102
-rw-r--r--networkx/algorithms/shortest_paths/tests/test_dense_numpy.py47
-rw-r--r--networkx/algorithms/shortest_paths/tests/test_generic.py93
-rw-r--r--networkx/algorithms/shortest_paths/tests/test_unweighted.py88
-rw-r--r--networkx/algorithms/shortest_paths/tests/test_weighted.py76
-rw-r--r--networkx/algorithms/similarity.py50
-rw-r--r--networkx/algorithms/tests/test_cycles.py28
-rw-r--r--networkx/algorithms/tests/test_dag.py3
-rw-r--r--networkx/algorithms/tests/test_link_prediction.py2
-rw-r--r--networkx/algorithms/tests/test_matching.py55
-rw-r--r--networkx/algorithms/tests/test_mis.py2
-rw-r--r--networkx/algorithms/tests/test_similarity.py23
-rw-r--r--networkx/algorithms/tests/test_simple_paths.py40
-rw-r--r--networkx/algorithms/traversal/depth_first_search.py8
-rw-r--r--networkx/algorithms/traversal/edgedfs.py3
-rw-r--r--networkx/algorithms/traversal/tests/test_dfs.py72
-rw-r--r--networkx/algorithms/traversal/tests/test_edgedfs.py9
-rw-r--r--networkx/algorithms/tree/branchings.py32
-rw-r--r--networkx/algorithms/tree/recognition.py1
-rw-r--r--networkx/algorithms/tree/tests/test_operations.py1
-rw-r--r--networkx/algorithms/tree/tests/test_recognition.py34
-rw-r--r--networkx/classes/coreviews.py2
-rw-r--r--networkx/classes/graphviews.py6
-rw-r--r--networkx/classes/reportviews.py16
-rw-r--r--networkx/classes/tests/test_multigraph.py2
-rw-r--r--networkx/classes/tests/test_reportviews.py6
-rw-r--r--networkx/classes/tests/test_subgraphviews.py2
-rw-r--r--networkx/drawing/layout.py16
-rw-r--r--networkx/drawing/nx_agraph.py6
-rw-r--r--networkx/drawing/nx_pylab.py2
-rw-r--r--networkx/drawing/tests/test_agraph.py8
-rw-r--r--networkx/drawing/tests/test_pylab.py8
-rw-r--r--networkx/generators/geometric.py33
-rw-r--r--networkx/generators/lattice.py6
-rw-r--r--networkx/generators/line.py4
-rw-r--r--networkx/generators/mycielski.py12
-rw-r--r--networkx/generators/tests/test_expanders.py4
-rw-r--r--networkx/generators/tests/test_geometric.py27
-rw-r--r--networkx/generators/tests/test_lattice.py11
-rw-r--r--networkx/generators/tests/test_mycielski.py3
-rw-r--r--networkx/generators/tests/test_trees.py4
-rw-r--r--networkx/linalg/algebraicconnectivity.py3
-rw-r--r--networkx/readwrite/edgelist.py92
-rw-r--r--networkx/readwrite/gml.py6
-rw-r--r--networkx/readwrite/graph6.py28
-rw-r--r--networkx/readwrite/graphml.py4
-rw-r--r--networkx/readwrite/json_graph/cytoscape.py28
-rw-r--r--networkx/readwrite/json_graph/jit.py1
-rw-r--r--networkx/readwrite/json_graph/tests/test_adjacency.py35
-rw-r--r--networkx/readwrite/json_graph/tests/test_cytoscape.py53
-rw-r--r--networkx/readwrite/json_graph/tests/test_node_link.py3
-rw-r--r--networkx/readwrite/json_graph/tests/test_tree.py33
-rw-r--r--networkx/readwrite/nx_yaml.py14
-rw-r--r--networkx/readwrite/sparse6.py40
-rw-r--r--networkx/readwrite/tests/test_adjlist.py212
-rw-r--r--networkx/readwrite/tests/test_edgelist.py174
-rw-r--r--networkx/readwrite/tests/test_gpickle.py38
-rw-r--r--networkx/readwrite/tests/test_graph6.py15
-rw-r--r--networkx/readwrite/tests/test_leda.py32
-rw-r--r--networkx/readwrite/tests/test_p2g.py44
-rw-r--r--networkx/readwrite/tests/test_pajek.py38
-rw-r--r--networkx/readwrite/tests/test_shp.py31
-rw-r--r--networkx/readwrite/tests/test_sparse6.py34
-rw-r--r--networkx/readwrite/tests/test_yaml.py11
-rw-r--r--networkx/relabel.py2
-rw-r--r--networkx/tests/test_convert_pandas.py10
-rw-r--r--networkx/utils/heaps.py1
-rw-r--r--networkx/utils/tests/test_decorators.py4
-rw-r--r--networkx/utils/tests/test_heaps.py110
-rw-r--r--networkx/utils/tests/test_unionfind.py1
182 files changed, 4090 insertions, 3886 deletions
diff --git a/networkx/algorithms/__init__.py b/networkx/algorithms/__init__.py
index a187067a..c77e966e 100644
--- a/networkx/algorithms/__init__.py
+++ b/networkx/algorithms/__init__.py
@@ -78,7 +78,7 @@ from networkx.algorithms.connectivity import all_pairs_node_connectivity
from networkx.algorithms.connectivity import all_node_cuts
from networkx.algorithms.connectivity import average_node_connectivity
from networkx.algorithms.connectivity import edge_connectivity
-from networkx.algorithms.connectivity import edge_disjoint_paths
+from networkx.algorithms.connectivity import edge_disjoint_paths
from networkx.algorithms.connectivity import k_components
from networkx.algorithms.connectivity import k_edge_components
from networkx.algorithms.connectivity import k_edge_subgraphs
@@ -87,7 +87,7 @@ from networkx.algorithms.connectivity import is_k_edge_connected
from networkx.algorithms.connectivity import minimum_edge_cut
from networkx.algorithms.connectivity import minimum_node_cut
from networkx.algorithms.connectivity import node_connectivity
-from networkx.algorithms.connectivity import node_disjoint_paths
+from networkx.algorithms.connectivity import node_disjoint_paths
from networkx.algorithms.connectivity import stoer_wagner
from networkx.algorithms.flow import capacity_scaling
from networkx.algorithms.flow import cost_of_flow
diff --git a/networkx/algorithms/approximation/clique.py b/networkx/algorithms/approximation/clique.py
index eff7e81f..5bfc70d2 100644
--- a/networkx/algorithms/approximation/clique.py
+++ b/networkx/algorithms/approximation/clique.py
@@ -155,6 +155,7 @@ def large_clique_size(G):
"""
degrees = G.degree
+
def _clique_heuristic(G, U, size, best_size):
if not U:
return max(best_size, size)
diff --git a/networkx/algorithms/approximation/clustering_coefficient.py b/networkx/algorithms/approximation/clustering_coefficient.py
index f6d2d254..352dd5dd 100644
--- a/networkx/algorithms/approximation/clustering_coefficient.py
+++ b/networkx/algorithms/approximation/clustering_coefficient.py
@@ -11,6 +11,7 @@ __all__ = ['average_clustering']
__author__ = """\n""".join(['Fred Morstatter <fred.morstatter@asu.edu>',
'Jordi Torrents <jtorrents@milnou.net>'])
+
@not_implemented_for('directed')
def average_clustering(G, trials=1000):
r"""Estimates the average clustering coefficient of G.
diff --git a/networkx/algorithms/approximation/connectivity.py b/networkx/algorithms/approximation/connectivity.py
index 67b2ee47..a0e91bfe 100644
--- a/networkx/algorithms/approximation/connectivity.py
+++ b/networkx/algorithms/approximation/connectivity.py
@@ -1,6 +1,6 @@
""" Fast approximation for node connectivity
"""
-# Copyright (C) 2015 by
+# Copyright (C) 2015 by
# Jordi Torrents <jtorrents@milnou.net>
# All rights reserved.
# BSD license.
@@ -20,7 +20,7 @@ INF = float('inf')
def local_node_connectivity(G, source, target, cutoff=None):
"""Compute node connectivity between source and target.
-
+
Pairwise or local node connectivity between two distinct and nonadjacent
nodes is the minimum number of nodes that must be removed (minimum
separating cutset) to disconnect them. By Menger's theorem, this is equal
@@ -83,7 +83,7 @@ def local_node_connectivity(G, source, target, cutoff=None):
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
http://eclectic.ss.uci.edu/~drwhite/working.pdf
-
+
"""
if target == source:
raise nx.NetworkXError("source and target have to be different nodes.")
@@ -97,7 +97,7 @@ def local_node_connectivity(G, source, target, cutoff=None):
K = 0
if not possible:
return K
-
+
if cutoff is None:
cutoff = INF
@@ -128,7 +128,7 @@ def node_connectivity(G, s=None, t=None):
This algorithm is based on a fast approximation that gives an strict lower
bound on the actual number of node independent paths between two nodes [1]_.
It works for both directed and undirected graphs.
-
+
Parameters
----------
G : NetworkX graph
@@ -153,7 +153,7 @@ def node_connectivity(G, s=None, t=None):
>>> G = nx.octahedral_graph()
>>> approx.node_connectivity(G)
4
-
+
Notes
-----
This algorithm [1]_ finds node independents paths between two nodes by
@@ -191,6 +191,7 @@ def node_connectivity(G, s=None, t=None):
if G.is_directed():
connected_func = nx.is_weakly_connected
iter_func = itertools.permutations
+
def neighbors(v):
return itertools.chain(G.predecessors(v), G.successors(v))
else:
@@ -322,7 +323,7 @@ def _bidirectional_shortest_path(G, source, target, exclude):
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
http://eclectic.ss.uci.edu/~drwhite/working.pdf
-
+
"""
# call helper to do the real work
results = _bidirectional_pred_succ(G, source, target, exclude)
@@ -348,10 +349,10 @@ def _bidirectional_pred_succ(G, source, target, exclude):
# does BFS from both source and target and meets in the middle
# excludes nodes in the container "exclude" from the search
if source is None or target is None:
- raise nx.NetworkXException(\
+ raise nx.NetworkXException(
"Bidirectional shortest path called without source or target")
if target == source:
- return ({target:None},{source:None},source)
+ return ({target: None}, {source: None}, source)
# handle either directed or undirected
if G.is_directed():
@@ -370,7 +371,7 @@ def _bidirectional_pred_succ(G, source, target, exclude):
reverse_fringe = [target]
level = 0
-
+
while forward_fringe and reverse_fringe:
# Make sure that we iterate one step forward and one step backwards
# thus source and target will only tigger "found path" when they are
@@ -387,7 +388,7 @@ def _bidirectional_pred_succ(G, source, target, exclude):
forward_fringe.append(w)
pred[w] = v
if w in succ:
- return pred, succ, w # found path
+ return pred, succ, w # found path
else:
this_level = reverse_fringe
reverse_fringe = []
@@ -398,7 +399,7 @@ def _bidirectional_pred_succ(G, source, target, exclude):
if w not in succ:
succ[w] = v
reverse_fringe.append(w)
- if w in pred:
- return pred, succ, w # found path
+ if w in pred:
+ return pred, succ, w # found path
raise nx.NetworkXNoPath("No path between %s and %s." % (source, target))
diff --git a/networkx/algorithms/approximation/ramsey.py b/networkx/algorithms/approximation/ramsey.py
index 96e8049b..48a9df79 100644
--- a/networkx/algorithms/approximation/ramsey.py
+++ b/networkx/algorithms/approximation/ramsey.py
@@ -12,6 +12,7 @@ from ...utils import arbitrary_element
__all__ = ["ramsey_R2"]
__author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)"""
+
def ramsey_R2(G):
r"""Approximately computes the Ramsey number `R(2;s,t)` for graph.
diff --git a/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py b/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py
index e0e9ed5c..b2cae934 100644
--- a/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py
+++ b/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py
@@ -2,32 +2,38 @@ from nose.tools import assert_equal
import networkx as nx
from networkx.algorithms.approximation import average_clustering
-# This approximation has to be be exact in regular graphs
+# This approximation has to be be exact in regular graphs
# with no triangles or with all possible triangles.
+
+
def test_petersen():
# Actual coefficient is 0
G = nx.petersen_graph()
- assert_equal(average_clustering(G, trials=int(len(G)/2)),
+ assert_equal(average_clustering(G, trials=int(len(G) / 2)),
nx.average_clustering(G))
+
def test_tetrahedral():
# Actual coefficient is 1
G = nx.tetrahedral_graph()
- assert_equal(average_clustering(G, trials=int(len(G)/2)),
+ assert_equal(average_clustering(G, trials=int(len(G) / 2)),
nx.average_clustering(G))
+
def test_dodecahedral():
# Actual coefficient is 0
G = nx.dodecahedral_graph()
- assert_equal(average_clustering(G, trials=int(len(G)/2)),
+ assert_equal(average_clustering(G, trials=int(len(G) / 2)),
nx.average_clustering(G))
+
def test_empty():
G = nx.empty_graph(5)
- assert_equal(average_clustering(G, trials=int(len(G)/2)), 0)
+ assert_equal(average_clustering(G, trials=int(len(G) / 2)), 0)
+
def test_complete():
G = nx.complete_graph(5)
- assert_equal(average_clustering(G, trials=int(len(G)/2)), 1)
+ assert_equal(average_clustering(G, trials=int(len(G) / 2)), 1)
G = nx.complete_graph(7)
- assert_equal(average_clustering(G, trials=int(len(G)/2)), 1)
+ assert_equal(average_clustering(G, trials=int(len(G) / 2)), 1)
diff --git a/networkx/algorithms/approximation/tests/test_connectivity.py b/networkx/algorithms/approximation/tests/test_connectivity.py
index 991f5526..057ec177 100644
--- a/networkx/algorithms/approximation/tests/test_connectivity.py
+++ b/networkx/algorithms/approximation/tests/test_connectivity.py
@@ -8,12 +8,13 @@ from networkx.algorithms import approximation as approx
def test_global_node_connectivity():
# Figure 1 chapter on Connectivity
G = nx.Graph()
- G.add_edges_from([(1,2),(1,3),(1,4),(1,5),(2,3),(2,6),(3,4),
- (3,6),(4,6),(4,7),(5,7),(6,8),(6,9),(7,8),
- (7,10),(8,11),(9,10),(9,11),(10,11)])
- assert_equal(2, approx.local_node_connectivity(G,1,11))
+ G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4),
+ (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8),
+ (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)])
+ assert_equal(2, approx.local_node_connectivity(G, 1, 11))
assert_equal(2, approx.node_connectivity(G))
- assert_equal(2, approx.node_connectivity(G,1,11))
+ assert_equal(2, approx.node_connectivity(G, 1, 11))
+
def test_white_harary1():
# Figure 1b white and harary (2001)
@@ -21,19 +22,21 @@ def test_white_harary1():
# (node connectivity)
G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
G.remove_node(7)
- for i in range(4,7):
- G.add_edge(0,i)
+ for i in range(4, 7):
+ G.add_edge(0, i)
G = nx.disjoint_union(G, nx.complete_graph(4))
- G.remove_node(G.order()-1)
- for i in range(7,10):
- G.add_edge(0,i)
+ G.remove_node(G.order() - 1)
+ for i in range(7, 10):
+ G.add_edge(0, i)
assert_equal(1, approx.node_connectivity(G))
+
def test_complete_graphs():
for n in range(5, 25, 5):
G = nx.complete_graph(n)
- assert_equal(n-1, approx.node_connectivity(G))
- assert_equal(n-1, approx.node_connectivity(G, 0, 3))
+ assert_equal(n - 1, approx.node_connectivity(G))
+ assert_equal(n - 1, approx.node_connectivity(G, 0, 3))
+
def test_empty_graphs():
for k in range(5, 25, 5):
@@ -41,57 +44,65 @@ def test_empty_graphs():
assert_equal(0, approx.node_connectivity(G))
assert_equal(0, approx.node_connectivity(G, 0, 3))
+
def test_petersen():
G = nx.petersen_graph()
assert_equal(3, approx.node_connectivity(G))
assert_equal(3, approx.node_connectivity(G, 0, 5))
# Approximation fails with tutte graph
-#def test_tutte():
+# def test_tutte():
# G = nx.tutte_graph()
# assert_equal(3, approx.node_connectivity(G))
+
def test_dodecahedral():
G = nx.dodecahedral_graph()
assert_equal(3, approx.node_connectivity(G))
assert_equal(3, approx.node_connectivity(G, 0, 5))
+
def test_octahedral():
- G=nx.octahedral_graph()
+ G = nx.octahedral_graph()
assert_equal(4, approx.node_connectivity(G))
assert_equal(4, approx.node_connectivity(G, 0, 5))
# Approximation can fail with icosahedral graph depending
# on iteration order.
-#def test_icosahedral():
+# def test_icosahedral():
# G=nx.icosahedral_graph()
# assert_equal(5, approx.node_connectivity(G))
# assert_equal(5, approx.node_connectivity(G, 0, 5))
+
def test_only_source():
G = nx.complete_graph(5)
assert_raises(nx.NetworkXError, approx.node_connectivity, G, s=0)
+
def test_only_target():
G = nx.complete_graph(5)
assert_raises(nx.NetworkXError, approx.node_connectivity, G, t=0)
+
def test_missing_source():
G = nx.path_graph(4)
assert_raises(nx.NetworkXError, approx.node_connectivity, G, 10, 1)
+
def test_missing_target():
G = nx.path_graph(4)
assert_raises(nx.NetworkXError, approx.node_connectivity, G, 1, 10)
+
def test_source_equals_target():
G = nx.complete_graph(5)
assert_raises(nx.NetworkXError, approx.local_node_connectivity, G, 0, 0)
def test_directed_node_connectivity():
- G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction
- D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges
+ G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction
+ D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges
assert_equal(1, approx.node_connectivity(G))
assert_equal(1, approx.node_connectivity(G, 1, 4))
assert_equal(2, approx.node_connectivity(D))
@@ -111,8 +122,8 @@ class TestAllPairsNodeConnectivityApprox:
self.K10 = nx.complete_graph(10)
self.K5 = nx.complete_graph(5)
self.G_list = [self.path, self.directed_path, self.cycle,
- self.directed_cycle, self.gnp, self.directed_gnp, self.K10,
- self.K5, self.K20]
+ self.directed_cycle, self.gnp, self.directed_gnp, self.K10,
+ self.K5, self.K20]
def test_cycles(self):
K_undir = approx.all_pairs_node_connectivity(self.cycle)
@@ -129,7 +140,7 @@ class TestAllPairsNodeConnectivityApprox:
K = approx.all_pairs_node_connectivity(G)
for source in K:
for target, k in K[source].items():
- assert_true(k == len(G)-1)
+ assert_true(k == len(G) - 1)
def test_paths(self):
K_undir = approx.all_pairs_node_connectivity(self.path)
@@ -157,4 +168,3 @@ class TestAllPairsNodeConnectivityApprox:
nbunch = [0, 2, 3]
C = approx.all_pairs_node_connectivity(G, nbunch=nbunch)
assert_equal(len(C), len(nbunch))
-
diff --git a/networkx/algorithms/approximation/tests/test_independent_set.py b/networkx/algorithms/approximation/tests/test_independent_set.py
index 8825ec89..199152dc 100644
--- a/networkx/algorithms/approximation/tests/test_independent_set.py
+++ b/networkx/algorithms/approximation/tests/test_independent_set.py
@@ -2,7 +2,8 @@ from nose.tools import *
import networkx as nx
import networkx.algorithms.approximation as a
+
def test_independent_set():
# smoke test
G = nx.Graph()
- assert_equal(len(a.maximum_independent_set(G)),0)
+ assert_equal(len(a.maximum_independent_set(G)), 0)
diff --git a/networkx/algorithms/approximation/tests/test_kcomponents.py b/networkx/algorithms/approximation/tests/test_kcomponents.py
index dde83bcb..e3b0eff4 100644
--- a/networkx/algorithms/approximation/tests/test_kcomponents.py
+++ b/networkx/algorithms/approximation/tests/test_kcomponents.py
@@ -15,102 +15,107 @@ def build_k_number_dict(k_components):
return k_num
##
-## Some nice synthetic graphs
+# Some nice synthetic graphs
##
+
+
def graph_example_1():
- G = nx.convert_node_labels_to_integers(nx.grid_graph([5,5]),
- label_attribute='labels')
+ G = nx.convert_node_labels_to_integers(nx.grid_graph([5, 5]),
+ label_attribute='labels')
rlabels = nx.get_node_attributes(G, 'labels')
labels = {v: k for k, v in rlabels.items()}
- for nodes in [(labels[(0,0)], labels[(1,0)]),
- (labels[(0,4)], labels[(1,4)]),
- (labels[(3,0)], labels[(4,0)]),
- (labels[(3,4)], labels[(4,4)]) ]:
- new_node = G.order()+1
+ for nodes in [(labels[(0, 0)], labels[(1, 0)]),
+ (labels[(0, 4)], labels[(1, 4)]),
+ (labels[(3, 0)], labels[(4, 0)]),
+ (labels[(3, 4)], labels[(4, 4)])]:
+ new_node = G.order() + 1
# Petersen graph is triconnected
P = nx.petersen_graph()
- G = nx.disjoint_union(G,P)
+ G = nx.disjoint_union(G, P)
# Add two edges between the grid and P
- G.add_edge(new_node+1, nodes[0])
+ G.add_edge(new_node + 1, nodes[0])
G.add_edge(new_node, nodes[1])
# K5 is 4-connected
K = nx.complete_graph(5)
- G = nx.disjoint_union(G,K)
+ G = nx.disjoint_union(G, K)
# Add three edges between P and K5
- G.add_edge(new_node+2,new_node+11)
- G.add_edge(new_node+3,new_node+12)
- G.add_edge(new_node+4,new_node+13)
+ G.add_edge(new_node + 2, new_node + 11)
+ G.add_edge(new_node + 3, new_node + 12)
+ G.add_edge(new_node + 4, new_node + 13)
# Add another K5 sharing a node
- G = nx.disjoint_union(G,K)
- nbrs = G[new_node+10]
- G.remove_node(new_node+10)
+ G = nx.disjoint_union(G, K)
+ nbrs = G[new_node + 10]
+ G.remove_node(new_node + 10)
for nbr in nbrs:
- G.add_edge(new_node+17, nbr)
- G.add_edge(new_node+16, new_node+5)
+ G.add_edge(new_node + 17, nbr)
+ G.add_edge(new_node + 16, new_node + 5)
return G
+
def torrents_and_ferraro_graph():
- G = nx.convert_node_labels_to_integers(nx.grid_graph([5,5]),
- label_attribute='labels')
+ G = nx.convert_node_labels_to_integers(nx.grid_graph([5, 5]),
+ label_attribute='labels')
rlabels = nx.get_node_attributes(G, 'labels')
labels = {v: k for k, v in rlabels.items()}
- for nodes in [ (labels[(0,4)], labels[(1,4)]),
- (labels[(3,4)], labels[(4,4)]) ]:
- new_node = G.order()+1
+ for nodes in [(labels[(0, 4)], labels[(1, 4)]),
+ (labels[(3, 4)], labels[(4, 4)])]:
+ new_node = G.order() + 1
# Petersen graph is triconnected
P = nx.petersen_graph()
- G = nx.disjoint_union(G,P)
+ G = nx.disjoint_union(G, P)
# Add two edges between the grid and P
- G.add_edge(new_node+1, nodes[0])
+ G.add_edge(new_node + 1, nodes[0])
G.add_edge(new_node, nodes[1])
# K5 is 4-connected
K = nx.complete_graph(5)
- G = nx.disjoint_union(G,K)
+ G = nx.disjoint_union(G, K)
# Add three edges between P and K5
- G.add_edge(new_node+2,new_node+11)
- G.add_edge(new_node+3,new_node+12)
- G.add_edge(new_node+4,new_node+13)
+ G.add_edge(new_node + 2, new_node + 11)
+ G.add_edge(new_node + 3, new_node + 12)
+ G.add_edge(new_node + 4, new_node + 13)
# Add another K5 sharing a node
- G = nx.disjoint_union(G,K)
- nbrs = G[new_node+10]
- G.remove_node(new_node+10)
+ G = nx.disjoint_union(G, K)
+ nbrs = G[new_node + 10]
+ G.remove_node(new_node + 10)
for nbr in nbrs:
- G.add_edge(new_node+17, nbr)
+ G.add_edge(new_node + 17, nbr)
# Commenting this makes the graph not biconnected !!
# This stupid mistake make one reviewer very angry :P
- G.add_edge(new_node+16, new_node+8)
+ G.add_edge(new_node + 16, new_node + 8)
- for nodes in [(labels[(0,0)], labels[(1,0)]),
- (labels[(3,0)], labels[(4,0)])]:
- new_node = G.order()+1
+ for nodes in [(labels[(0, 0)], labels[(1, 0)]),
+ (labels[(3, 0)], labels[(4, 0)])]:
+ new_node = G.order() + 1
# Petersen graph is triconnected
P = nx.petersen_graph()
- G = nx.disjoint_union(G,P)
+ G = nx.disjoint_union(G, P)
# Add two edges between the grid and P
- G.add_edge(new_node+1, nodes[0])
+ G.add_edge(new_node + 1, nodes[0])
G.add_edge(new_node, nodes[1])
# K5 is 4-connected
K = nx.complete_graph(5)
- G = nx.disjoint_union(G,K)
+ G = nx.disjoint_union(G, K)
# Add three edges between P and K5
- G.add_edge(new_node+2,new_node+11)
- G.add_edge(new_node+3,new_node+12)
- G.add_edge(new_node+4,new_node+13)
+ G.add_edge(new_node + 2, new_node + 11)
+ G.add_edge(new_node + 3, new_node + 12)
+ G.add_edge(new_node + 4, new_node + 13)
# Add another K5 sharing two nodes
- G = nx.disjoint_union(G,K)
- nbrs = G[new_node+10]
- G.remove_node(new_node+10)
+ G = nx.disjoint_union(G, K)
+ nbrs = G[new_node + 10]
+ G.remove_node(new_node + 10)
for nbr in nbrs:
- G.add_edge(new_node+17, nbr)
- nbrs2 = G[new_node+9]
- G.remove_node(new_node+9)
+ G.add_edge(new_node + 17, nbr)
+ nbrs2 = G[new_node + 9]
+ G.remove_node(new_node + 9)
for nbr in nbrs2:
- G.add_edge(new_node+18, nbr)
+ G.add_edge(new_node + 18, nbr)
return G
# Helper function
+
+
def _check_connectivity(G):
result = k_components(G)
for k, components in result.items():
@@ -121,18 +126,22 @@ def _check_connectivity(G):
K = nx.node_connectivity(C)
assert_greater_equal(K, k)
+
def test_torrents_and_ferraro_graph():
G = torrents_and_ferraro_graph()
_check_connectivity(G)
+
def test_example_1():
G = graph_example_1()
_check_connectivity(G)
+
def test_karate_0():
G = nx.karate_club_graph()
_check_connectivity(G)
+
def test_karate_1():
karate_k_num = {0: 4, 1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 3, 7: 4, 8: 4, 9: 2,
10: 3, 11: 1, 12: 2, 13: 4, 14: 2, 15: 2, 16: 2, 17: 2, 18: 2,
@@ -146,6 +155,7 @@ def test_karate_1():
k_num = build_k_number_dict(k_comps)
assert_in(k_num, (karate_k_num, approx_karate_k_num))
+
def test_example_1_detail_3_and_4():
G = graph_example_1()
result = k_components(G)
@@ -166,14 +176,16 @@ def test_example_1_detail_3_and_4():
K = nx.node_connectivity(G.subgraph(component))
assert_greater_equal(K, k)
+
@raises(nx.NetworkXNotImplemented)
def test_directed():
G = nx.gnp_random_graph(10, 0.4, directed=True)
kc = k_components(G)
+
def test_same():
equal = {'A': 2, 'B': 2, 'C': 2}
- slightly_different = {'A': 2, 'B': 1, 'C': 2}
+ slightly_different = {'A': 2, 'B': 1, 'C': 2}
different = {'A': 2, 'B': 8, 'C': 18}
assert_true(_same(equal))
assert_false(_same(slightly_different))
@@ -184,21 +196,21 @@ def test_same():
class TestAntiGraph:
def setUp(self):
- self.Gnp = nx.gnp_random_graph(20,0.8)
+ self.Gnp = nx.gnp_random_graph(20, 0.8)
self.Anp = _AntiGraph(nx.complement(self.Gnp))
self.Gd = nx.davis_southern_women_graph()
self.Ad = _AntiGraph(nx.complement(self.Gd))
self.Gk = nx.karate_club_graph()
self.Ak = _AntiGraph(nx.complement(self.Gk))
self.GA = [(self.Gnp, self.Anp),
- (self.Gd,self.Ad),
- (self.Gk, self.Ak)]
+ (self.Gd, self.Ad),
+ (self.Gk, self.Ak)]
def test_size(self):
for G, A in self.GA:
n = G.order()
s = len(list(G.edges())) + len(list(A.edges()))
- assert_true(s == (n*(n-1))/2)
+ assert_true(s == (n * (n - 1)) / 2)
def test_degree(self):
for G, A in self.GA:
@@ -218,8 +230,8 @@ class TestAntiGraph:
def test_adj(self):
for G, A in self.GA:
for n, nbrs in G.adj.items():
- a_adj = sorted((n,sorted(ad)) for n, ad in A.adj.items())
- g_adj = sorted((n,sorted(ad)) for n, ad in G.adj.items())
+ a_adj = sorted((n, sorted(ad)) for n, ad in A.adj.items())
+ g_adj = sorted((n, sorted(ad)) for n, ad in G.adj.items())
assert_equal(a_adj, g_adj)
def test_adjacency(self):
diff --git a/networkx/algorithms/approximation/tests/test_matching.py b/networkx/algorithms/approximation/tests/test_matching.py
index b768c39c..5286b313 100644
--- a/networkx/algorithms/approximation/tests/test_matching.py
+++ b/networkx/algorithms/approximation/tests/test_matching.py
@@ -2,7 +2,8 @@ from nose.tools import *
import networkx as nx
import networkx.algorithms.approximation as a
+
def test_min_maximal_matching():
# smoke test
G = nx.Graph()
- assert_equal(len(a.min_maximal_matching(G)),0)
+ assert_equal(len(a.min_maximal_matching(G)), 0)
diff --git a/networkx/algorithms/approximation/tests/test_ramsey.py b/networkx/algorithms/approximation/tests/test_ramsey.py
index 7ab8dac2..ae5fd235 100644
--- a/networkx/algorithms/approximation/tests/test_ramsey.py
+++ b/networkx/algorithms/approximation/tests/test_ramsey.py
@@ -2,6 +2,7 @@ from nose.tools import *
import networkx as nx
import networkx.algorithms.approximation as apxa
+
def test_ramsey():
# this should only find the complete graph
graph = nx.complete_graph(10)
diff --git a/networkx/algorithms/assortativity/connectivity.py b/networkx/algorithms/assortativity/connectivity.py
index c01842c0..5c0d574f 100644
--- a/networkx/algorithms/assortativity/connectivity.py
+++ b/networkx/algorithms/assortativity/connectivity.py
@@ -139,4 +139,5 @@ def average_degree_connectivity(G, source="in+out", target="in+out",
dc[k] /= norm
return dc
+
k_nearest_neighbors = average_degree_connectivity
diff --git a/networkx/algorithms/assortativity/correlation.py b/networkx/algorithms/assortativity/correlation.py
index d6b34f3d..feec0a31 100644
--- a/networkx/algorithms/assortativity/correlation.py
+++ b/networkx/algorithms/assortativity/correlation.py
@@ -13,7 +13,8 @@ __all__ = ['degree_pearson_correlation_coefficient',
'attribute_assortativity_coefficient',
'numeric_assortativity_coefficient']
-def degree_assortativity_coefficient(G, x='out', y='in', weight=None,
+
+def degree_assortativity_coefficient(G, x='out', y='in', weight=None,
nodes=None):
"""Compute degree assortativity of graph.
@@ -26,7 +27,7 @@ def degree_assortativity_coefficient(G, x='out', y='in', weight=None,
x: string ('in','out')
The degree type for source node (directed graphs only).
-
+
y: string ('in','out')
The degree type for target node (directed graphs only).
@@ -43,7 +44,7 @@ def degree_assortativity_coefficient(G, x='out', y='in', weight=None,
-------
r : float
Assortativity of graph by degree.
-
+
Examples
--------
>>> G=nx.path_graph(4)
@@ -77,7 +78,7 @@ def degree_assortativity_coefficient(G, x='out', y='in', weight=None,
return numeric_ac(M)
-def degree_pearson_correlation_coefficient(G, x='out', y='in',
+def degree_pearson_correlation_coefficient(G, x='out', y='in',
weight=None, nodes=None):
"""Compute degree assortativity of graph.
@@ -110,7 +111,7 @@ def degree_pearson_correlation_coefficient(G, x='out', y='in',
-------
r : float
Assortativity of graph by degree.
-
+
Examples
--------
>>> G=nx.path_graph(4)
@@ -133,18 +134,18 @@ def degree_pearson_correlation_coefficient(G, x='out', y='in',
import scipy.stats as stats
except ImportError:
raise ImportError(
- "Assortativity requires SciPy: http://scipy.org/ ")
- xy=node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
- x,y=zip(*xy)
- return stats.pearsonr(x,y)[0]
+ "Assortativity requires SciPy: http://scipy.org/ ")
+ xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
+ x, y = zip(*xy)
+ return stats.pearsonr(x, y)[0]
-def attribute_assortativity_coefficient(G,attribute,nodes=None):
+def attribute_assortativity_coefficient(G, attribute, nodes=None):
"""Compute assortativity for node attributes.
Assortativity measures the similarity of connections
in the graph with respect to the given attribute.
-
+
Parameters
----------
G : NetworkX graph
@@ -160,7 +161,7 @@ def attribute_assortativity_coefficient(G,attribute,nodes=None):
-------
r: float
Assortativity of graph for given attribute
-
+
Examples
--------
>>> G=nx.Graph()
@@ -181,7 +182,7 @@ def attribute_assortativity_coefficient(G,attribute,nodes=None):
.. [1] M. E. J. Newman, Mixing patterns in networks,
Physical Review E, 67 026126, 2003
"""
- M = attribute_mixing_matrix(G,attribute,nodes)
+ M = attribute_mixing_matrix(G, attribute, nodes)
return attribute_ac(M)
@@ -208,7 +209,7 @@ def numeric_assortativity_coefficient(G, attribute, nodes=None):
-------
r: float
Assortativity of graph for given attribute
-
+
Examples
--------
>>> G=nx.Graph()
@@ -228,7 +229,7 @@ def numeric_assortativity_coefficient(G, attribute, nodes=None):
.. [1] M. E. J. Newman, Mixing patterns in networks
Physical Review E, 67 026126, 2003
"""
- a = numeric_mixing_matrix(G,attribute,nodes)
+ a = numeric_mixing_matrix(G, attribute, nodes)
return numeric_ac(a)
@@ -255,13 +256,13 @@ def attribute_ac(M):
import numpy
except ImportError:
raise ImportError(
- "attribute_assortativity requires NumPy: http://scipy.org/ ")
+ "attribute_assortativity requires NumPy: http://scipy.org/ ")
if M.sum() != 1.0:
- M=M/float(M.sum())
- M=numpy.asmatrix(M)
- s=(M*M).sum()
- t=M.trace()
- r=(t-s)/(1-s)
+ M = M / float(M.sum())
+ M = numpy.asmatrix(M)
+ s = (M * M).sum()
+ t = M.trace()
+ r = (t - s) / (1 - s)
return float(r)
@@ -274,17 +275,17 @@ def numeric_ac(M):
raise ImportError('numeric_assortativity requires ',
'NumPy: http://scipy.org/')
if M.sum() != 1.0:
- M=M/float(M.sum())
- nx,ny=M.shape # nx=ny
- x=numpy.arange(nx)
- y=numpy.arange(ny)
- a=M.sum(axis=0)
- b=M.sum(axis=1)
- vara=(a*x**2).sum()-((a*x).sum())**2
- varb=(b*x**2).sum()-((b*x).sum())**2
- xy=numpy.outer(x,y)
- ab=numpy.outer(a,b)
- return (xy*(M-ab)).sum()/numpy.sqrt(vara*varb)
+ M = M / float(M.sum())
+ nx, ny = M.shape # nx=ny
+ x = numpy.arange(nx)
+ y = numpy.arange(ny)
+ a = M.sum(axis=0)
+ b = M.sum(axis=1)
+ vara = (a * x**2).sum() - ((a * x).sum())**2
+ varb = (b * x**2).sum() - ((b * x).sum())**2
+ xy = numpy.outer(x, y)
+ ab = numpy.outer(a, b)
+ return (xy * (M - ab)).sum() / numpy.sqrt(vara * varb)
# fixture for nose tests
diff --git a/networkx/algorithms/assortativity/neighbor_degree.py b/networkx/algorithms/assortativity/neighbor_degree.py
index 46d13e80..1a0dbebb 100644
--- a/networkx/algorithms/assortativity/neighbor_degree.py
+++ b/networkx/algorithms/assortativity/neighbor_degree.py
@@ -1,5 +1,5 @@
#-*- coding: utf-8 -*-
-# Copyright (C) 2011 by
+# Copyright (C) 2011 by
# Jordi Torrents <jtorrents@milnou.net>
# Aric Hagberg <hagberg@lanl.gov>
# All rights reserved.
@@ -13,7 +13,7 @@ __all__ = ["average_neighbor_degree"]
def _average_nbr_deg(G, source_degree, target_degree, nodes=None, weight=None):
# average degree of neighbors
avg = {}
- for n,deg in source_degree(nodes,weight=weight):
+ for n, deg in source_degree(nodes, weight=weight):
# normalize but not by zero degree
if deg == 0:
deg = 1
@@ -21,10 +21,11 @@ def _average_nbr_deg(G, source_degree, target_degree, nodes=None, weight=None):
if weight is None:
avg[n] = sum(d for n, d in nbrdeg) / float(deg)
else:
- avg[n] = sum((G[n][nbr].get(weight,1)*d
- for nbr,d in nbrdeg)) / float(deg)
+ avg[n] = sum((G[n][nbr].get(weight, 1) * d
+ for nbr, d in nbrdeg)) / float(deg)
return avg
+
def average_neighbor_degree(G, source='out', target='out',
nodes=None, weight=None):
r"""Returns the average degree of the neighborhood of each node.
@@ -91,7 +92,7 @@ def average_neighbor_degree(G, source='out', target='out',
>>> nx.average_neighbor_degree(G, source='out', target='out')
{0: 1.0, 1: 1.0, 2: 0.0, 3: 0.0}
-
+
Notes
-----
For directed graphs you can also specify in-degree or out-degree
@@ -100,7 +101,7 @@ def average_neighbor_degree(G, source='out', target='out',
See Also
--------
average_degree_connectivity
-
+
References
----------
.. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
@@ -110,11 +111,11 @@ def average_neighbor_degree(G, source='out', target='out',
source_degree = G.degree
target_degree = G.degree
if G.is_directed():
- direction = {'out':G.out_degree,
- 'in':G.in_degree}
+ direction = {'out': G.out_degree,
+ 'in': G.in_degree}
source_degree = direction[source]
target_degree = direction[target]
- return _average_nbr_deg(G, source_degree, target_degree,
+ return _average_nbr_deg(G, source_degree, target_degree,
nodes=nodes, weight=weight)
# obsolete
@@ -129,4 +130,3 @@ def average_neighbor_degree(G, source='out', target='out',
# raise nx.NetworkXError("Not defined for undirected graphs.")
# return _average_nbr_deg(G, G.out_degree, G.out_degree, nodes, weight)
# average_neighbor_out_degree.__doc__=average_neighbor_degree.__doc__
-
diff --git a/networkx/algorithms/assortativity/pairs.py b/networkx/algorithms/assortativity/pairs.py
index 9a31222e..73d3be5c 100644
--- a/networkx/algorithms/assortativity/pairs.py
+++ b/networkx/algorithms/assortativity/pairs.py
@@ -5,6 +5,7 @@ __author__ = ' '.join(['Aric Hagberg <aric.hagberg@gmail.com>'])
__all__ = ['node_attribute_xy',
'node_degree_xy']
+
def node_attribute_xy(G, attribute, nodes=None):
"""Return iterator of node-attribute pairs for all edges in G.
@@ -44,19 +45,19 @@ def node_attribute_xy(G, attribute, nodes=None):
else:
nodes = set(nodes)
Gnodes = G.nodes
- for u,nbrsdict in G.adjacency():
+ for u, nbrsdict in G.adjacency():
if u not in nodes:
continue
- uattr = Gnodes[u].get(attribute,None)
+ uattr = Gnodes[u].get(attribute, None)
if G.is_multigraph():
- for v,keys in nbrsdict.items():
- vattr = Gnodes[v].get(attribute,None)
- for k,d in keys.items():
- yield (uattr,vattr)
+ for v, keys in nbrsdict.items():
+ vattr = Gnodes[v].get(attribute, None)
+ for k, d in keys.items():
+ yield (uattr, vattr)
else:
- for v,eattr in nbrsdict.items():
- vattr = Gnodes[v].get(attribute,None)
- yield (uattr,vattr)
+ for v, eattr in nbrsdict.items():
+ vattr = Gnodes[v].get(attribute, None)
+ yield (uattr, vattr)
def node_degree_xy(G, x='out', y='in', weight=None, nodes=None):
@@ -109,16 +110,16 @@ def node_degree_xy(G, x='out', y='in', weight=None, nodes=None):
xdeg = G.degree
ydeg = G.degree
if G.is_directed():
- direction = {'out':G.out_degree,
- 'in':G.in_degree}
+ direction = {'out': G.out_degree,
+ 'in': G.in_degree}
xdeg = direction[x]
ydeg = direction[y]
- for u,degu in xdeg(nodes, weight=weight):
- neighbors = (nbr for _,nbr in G.edges(u) if nbr in nodes)
- for v,degv in ydeg(neighbors, weight=weight):
- yield degu,degv
-
+ for u, degu in xdeg(nodes, weight=weight):
+ neighbors = (nbr for _, nbr in G.edges(u) if nbr in nodes)
+ for v, degv in ydeg(neighbors, weight=weight):
+ yield degu, degv
+
# fixture for nose tests
def setup_module(module):
diff --git a/networkx/algorithms/assortativity/tests/base_test.py b/networkx/algorithms/assortativity/tests/base_test.py
index 14be0f53..d0539122 100644
--- a/networkx/algorithms/assortativity/tests/base_test.py
+++ b/networkx/algorithms/assortativity/tests/base_test.py
@@ -1,50 +1,51 @@
import networkx as nx
+
class BaseTestAttributeMixing(object):
def setUp(self):
- G=nx.Graph()
- G.add_nodes_from([0,1],fish='one')
- G.add_nodes_from([2,3],fish='two')
- G.add_nodes_from([4],fish='red')
- G.add_nodes_from([5],fish='blue')
- G.add_edges_from([(0,1),(2,3),(0,4),(2,5)])
- self.G=G
-
- D=nx.DiGraph()
- D.add_nodes_from([0,1],fish='one')
- D.add_nodes_from([2,3],fish='two')
- D.add_nodes_from([4],fish='red')
- D.add_nodes_from([5],fish='blue')
- D.add_edges_from([(0,1),(2,3),(0,4),(2,5)])
- self.D=D
-
- M=nx.MultiGraph()
- M.add_nodes_from([0,1],fish='one')
- M.add_nodes_from([2,3],fish='two')
- M.add_nodes_from([4],fish='red')
- M.add_nodes_from([5],fish='blue')
- M.add_edges_from([(0,1),(0,1),(2,3)])
- self.M=M
-
- S=nx.Graph()
- S.add_nodes_from([0,1],fish='one')
- S.add_nodes_from([2,3],fish='two')
- S.add_nodes_from([4],fish='red')
- S.add_nodes_from([5],fish='blue')
- S.add_edge(0,0)
- S.add_edge(2,2)
- self.S=S
+ G = nx.Graph()
+ G.add_nodes_from([0, 1], fish='one')
+ G.add_nodes_from([2, 3], fish='two')
+ G.add_nodes_from([4], fish='red')
+ G.add_nodes_from([5], fish='blue')
+ G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
+ self.G = G
+
+ D = nx.DiGraph()
+ D.add_nodes_from([0, 1], fish='one')
+ D.add_nodes_from([2, 3], fish='two')
+ D.add_nodes_from([4], fish='red')
+ D.add_nodes_from([5], fish='blue')
+ D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
+ self.D = D
+
+ M = nx.MultiGraph()
+ M.add_nodes_from([0, 1], fish='one')
+ M.add_nodes_from([2, 3], fish='two')
+ M.add_nodes_from([4], fish='red')
+ M.add_nodes_from([5], fish='blue')
+ M.add_edges_from([(0, 1), (0, 1), (2, 3)])
+ self.M = M
+
+ S = nx.Graph()
+ S.add_nodes_from([0, 1], fish='one')
+ S.add_nodes_from([2, 3], fish='two')
+ S.add_nodes_from([4], fish='red')
+ S.add_nodes_from([5], fish='blue')
+ S.add_edge(0, 0)
+ S.add_edge(2, 2)
+ self.S = S
+
class BaseTestDegreeMixing(object):
def setUp(self):
- self.P4=nx.path_graph(4)
- self.D=nx.DiGraph()
+ self.P4 = nx.path_graph(4)
+ self.D = nx.DiGraph()
self.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)])
- self.M=nx.MultiGraph()
+ self.M = nx.MultiGraph()
nx.add_path(self.M, range(4))
- self.M.add_edge(0,1)
- self.S=nx.Graph()
- self.S.add_edges_from([(0,0),(1,1)])
-
+ self.M.add_edge(0, 1)
+ self.S = nx.Graph()
+ self.S.add_edges_from([(0, 0), (1, 1)])
diff --git a/networkx/algorithms/assortativity/tests/test_connectivity.py b/networkx/algorithms/assortativity/tests/test_connectivity.py
index 0700b396..701d30f3 100644
--- a/networkx/algorithms/assortativity/tests/test_connectivity.py
+++ b/networkx/algorithms/assortativity/tests/test_connectivity.py
@@ -10,107 +10,107 @@ import networkx as nx
class TestNeighborConnectivity(object):
def test_degree_p4(self):
- G=nx.path_graph(4)
- answer={1:2.0,2:1.5}
+ G = nx.path_graph(4)
+ answer = {1: 2.0, 2: 1.5}
nd = nx.average_degree_connectivity(G)
- assert_equal(nd,answer)
-
- D=G.to_directed()
- answer={2:2.0,4:1.5}
+ assert_equal(nd, answer)
+
+ D = G.to_directed()
+ answer = {2: 2.0, 4: 1.5}
nd = nx.average_degree_connectivity(D)
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
- answer={1:2.0,2:1.5}
- D=G.to_directed()
+ answer = {1: 2.0, 2: 1.5}
+ D = G.to_directed()
nd = nx.average_degree_connectivity(D, source='in', target='in')
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
- D=G.to_directed()
+ D = G.to_directed()
nd = nx.average_degree_connectivity(D, source='in', target='in')
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
def test_degree_p4_weighted(self):
- G=nx.path_graph(4)
- G[1][2]['weight']=4
- answer={1:2.0,2:1.8}
- nd = nx.average_degree_connectivity(G,weight='weight')
- assert_equal(nd,answer)
- answer={1:2.0,2:1.5}
+ G = nx.path_graph(4)
+ G[1][2]['weight'] = 4
+ answer = {1: 2.0, 2: 1.8}
+ nd = nx.average_degree_connectivity(G, weight='weight')
+ assert_equal(nd, answer)
+ answer = {1: 2.0, 2: 1.5}
nd = nx.average_degree_connectivity(G)
- assert_equal(nd,answer)
-
- D=G.to_directed()
- answer={2:2.0,4:1.8}
- nd = nx.average_degree_connectivity(D,weight='weight')
- assert_equal(nd,answer)
-
- answer={1:2.0,2:1.8}
- D=G.to_directed()
- nd = nx.average_degree_connectivity(D,weight='weight', source='in',
+ assert_equal(nd, answer)
+
+ D = G.to_directed()
+ answer = {2: 2.0, 4: 1.8}
+ nd = nx.average_degree_connectivity(D, weight='weight')
+ assert_equal(nd, answer)
+
+ answer = {1: 2.0, 2: 1.8}
+ D = G.to_directed()
+ nd = nx.average_degree_connectivity(D, weight='weight', source='in',
target='in')
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
- D=G.to_directed()
- nd = nx.average_degree_connectivity(D,source='in',target='out',
+ D = G.to_directed()
+ nd = nx.average_degree_connectivity(D, source='in', target='out',
weight='weight')
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
def test_weight_keyword(self):
- G=nx.path_graph(4)
- G[1][2]['other']=4
- answer={1:2.0,2:1.8}
- nd = nx.average_degree_connectivity(G,weight='other')
- assert_equal(nd,answer)
- answer={1:2.0,2:1.5}
- nd = nx.average_degree_connectivity(G,weight=None)
- assert_equal(nd,answer)
-
- D=G.to_directed()
- answer={2:2.0,4:1.8}
- nd = nx.average_degree_connectivity(D,weight='other')
- assert_equal(nd,answer)
-
- answer={1:2.0,2:1.8}
- D=G.to_directed()
- nd = nx.average_degree_connectivity(D,weight='other', source='in',
+ G = nx.path_graph(4)
+ G[1][2]['other'] = 4
+ answer = {1: 2.0, 2: 1.8}
+ nd = nx.average_degree_connectivity(G, weight='other')
+ assert_equal(nd, answer)
+ answer = {1: 2.0, 2: 1.5}
+ nd = nx.average_degree_connectivity(G, weight=None)
+ assert_equal(nd, answer)
+
+ D = G.to_directed()
+ answer = {2: 2.0, 4: 1.8}
+ nd = nx.average_degree_connectivity(D, weight='other')
+ assert_equal(nd, answer)
+
+ answer = {1: 2.0, 2: 1.8}
+ D = G.to_directed()
+ nd = nx.average_degree_connectivity(D, weight='other', source='in',
target='in')
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
- D=G.to_directed()
- nd = nx.average_degree_connectivity(D,weight='other',source='in',
+ D = G.to_directed()
+ nd = nx.average_degree_connectivity(D, weight='other', source='in',
target='in')
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
def test_degree_barrat(self):
- G=nx.star_graph(5)
- G.add_edges_from([(5,6),(5,7),(5,8),(5,9)])
- G[0][5]['weight']=5
+ G = nx.star_graph(5)
+ G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
+ G[0][5]['weight'] = 5
nd = nx.average_degree_connectivity(G)[5]
- assert_equal(nd,1.8)
- nd = nx.average_degree_connectivity(G,weight='weight')[5]
- assert_almost_equal(nd,3.222222,places=5)
- nd = nx.k_nearest_neighbors(G,weight='weight')[5]
- assert_almost_equal(nd,3.222222,places=5)
+ assert_equal(nd, 1.8)
+ nd = nx.average_degree_connectivity(G, weight='weight')[5]
+ assert_almost_equal(nd, 3.222222, places=5)
+ nd = nx.k_nearest_neighbors(G, weight='weight')[5]
+ assert_almost_equal(nd, 3.222222, places=5)
def test_zero_deg(self):
- G=nx.DiGraph()
- G.add_edge(1,2)
- G.add_edge(1,3)
- G.add_edge(1,4)
+ G = nx.DiGraph()
+ G.add_edge(1, 2)
+ G.add_edge(1, 3)
+ G.add_edge(1, 4)
c = nx.average_degree_connectivity(G)
- assert_equal(c,{1:0,3:1})
+ assert_equal(c, {1: 0, 3: 1})
c = nx.average_degree_connectivity(G, source='in', target='in')
- assert_equal(c,{0:0,1:0})
+ assert_equal(c, {0: 0, 1: 0})
c = nx.average_degree_connectivity(G, source='in', target='out')
- assert_equal(c,{0:0,1:3})
+ assert_equal(c, {0: 0, 1: 3})
c = nx.average_degree_connectivity(G, source='in', target='in+out')
- assert_equal(c,{0:0,1:3})
+ assert_equal(c, {0: 0, 1: 3})
c = nx.average_degree_connectivity(G, source='out', target='out')
- assert_equal(c,{0:0,3:0})
+ assert_equal(c, {0: 0, 3: 0})
c = nx.average_degree_connectivity(G, source='out', target='in')
- assert_equal(c,{0:0,3:1})
+ assert_equal(c, {0: 0, 3: 1})
c = nx.average_degree_connectivity(G, source='out', target='in+out')
- assert_equal(c,{0:0,3:1})
+ assert_equal(c, {0: 0, 3: 1})
def test_in_out_weight(self):
G = nx.DiGraph()
diff --git a/networkx/algorithms/assortativity/tests/test_correlation.py b/networkx/algorithms/assortativity/tests/test_correlation.py
index fbb2d517..46837ec9 100644
--- a/networkx/algorithms/assortativity/tests/test_correlation.py
+++ b/networkx/algorithms/assortativity/tests/test_correlation.py
@@ -2,7 +2,7 @@
from nose.tools import *
from nose import SkipTest
import networkx as nx
-from base_test import BaseTestAttributeMixing,BaseTestDegreeMixing
+from base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
from networkx.algorithms.assortativity.correlation import attribute_ac
@@ -15,40 +15,36 @@ class TestDegreeMixingCorrelation(BaseTestDegreeMixing):
import numpy as np
import numpy.testing as npt
except ImportError:
- raise SkipTest('NumPy not available.')
+ raise SkipTest('NumPy not available.')
try:
import scipy
import scipy.stats
except ImportError:
- raise SkipTest('SciPy not available.')
-
-
+ raise SkipTest('SciPy not available.')
def test_degree_assortativity_undirected(self):
- r=nx.degree_assortativity_coefficient(self.P4)
- npt.assert_almost_equal(r,-1.0/2,decimal=4)
+ r = nx.degree_assortativity_coefficient(self.P4)
+ npt.assert_almost_equal(r, -1.0 / 2, decimal=4)
def test_degree_assortativity_directed(self):
- r=nx.degree_assortativity_coefficient(self.D)
- npt.assert_almost_equal(r,-0.57735,decimal=4)
+ r = nx.degree_assortativity_coefficient(self.D)
+ npt.assert_almost_equal(r, -0.57735, decimal=4)
def test_degree_assortativity_multigraph(self):
- r=nx.degree_assortativity_coefficient(self.M)
- npt.assert_almost_equal(r,-1.0/7.0,decimal=4)
-
+ r = nx.degree_assortativity_coefficient(self.M)
+ npt.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
def test_degree_assortativity_undirected(self):
- r=nx.degree_pearson_correlation_coefficient(self.P4)
- npt.assert_almost_equal(r,-1.0/2,decimal=4)
+ r = nx.degree_pearson_correlation_coefficient(self.P4)
+ npt.assert_almost_equal(r, -1.0 / 2, decimal=4)
def test_degree_assortativity_directed(self):
- r=nx.degree_pearson_correlation_coefficient(self.D)
- npt.assert_almost_equal(r,-0.57735,decimal=4)
+ r = nx.degree_pearson_correlation_coefficient(self.D)
+ npt.assert_almost_equal(r, -0.57735, decimal=4)
def test_degree_assortativity_multigraph(self):
- r=nx.degree_pearson_correlation_coefficient(self.M)
- npt.assert_almost_equal(r,-1.0/7.0,decimal=4)
-
+ r = nx.degree_pearson_correlation_coefficient(self.M)
+ npt.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
class TestAttributeMixingCorrelation(BaseTestAttributeMixing):
@@ -61,41 +57,39 @@ class TestAttributeMixingCorrelation(BaseTestAttributeMixing):
import numpy.testing as npt
except ImportError:
- raise SkipTest('NumPy not available.')
-
+ raise SkipTest('NumPy not available.')
def test_attribute_assortativity_undirected(self):
- r=nx.attribute_assortativity_coefficient(self.G,'fish')
- assert_equal(r,6.0/22.0)
+ r = nx.attribute_assortativity_coefficient(self.G, 'fish')
+ assert_equal(r, 6.0 / 22.0)
def test_attribute_assortativity_directed(self):
- r=nx.attribute_assortativity_coefficient(self.D,'fish')
- assert_equal(r,1.0/3.0)
+ r = nx.attribute_assortativity_coefficient(self.D, 'fish')
+ assert_equal(r, 1.0 / 3.0)
def test_attribute_assortativity_multigraph(self):
- r=nx.attribute_assortativity_coefficient(self.M,'fish')
- assert_equal(r,1.0)
+ r = nx.attribute_assortativity_coefficient(self.M, 'fish')
+ assert_equal(r, 1.0)
def test_attribute_assortativity_coefficient(self):
# from "Mixing patterns in networks"
- a=np.array([[0.258,0.016,0.035,0.013],
- [0.012,0.157,0.058,0.019],
- [0.013,0.023,0.306,0.035],
- [0.005,0.007,0.024,0.016]])
- r=attribute_ac(a)
- npt.assert_almost_equal(r,0.623,decimal=3)
+ a = np.array([[0.258, 0.016, 0.035, 0.013],
+ [0.012, 0.157, 0.058, 0.019],
+ [0.013, 0.023, 0.306, 0.035],
+ [0.005, 0.007, 0.024, 0.016]])
+ r = attribute_ac(a)
+ npt.assert_almost_equal(r, 0.623, decimal=3)
def test_attribute_assortativity_coefficient2(self):
- a=np.array([[0.18,0.02,0.01,0.03],
- [0.02,0.20,0.03,0.02],
- [0.01,0.03,0.16,0.01],
- [0.03,0.02,0.01,0.22]])
+ a = np.array([[0.18, 0.02, 0.01, 0.03],
+ [0.02, 0.20, 0.03, 0.02],
+ [0.01, 0.03, 0.16, 0.01],
+ [0.03, 0.02, 0.01, 0.22]])
- r=attribute_ac(a)
- npt.assert_almost_equal(r,0.68,decimal=2)
+ r = attribute_ac(a)
+ npt.assert_almost_equal(r, 0.68, decimal=2)
def test_attribute_assortativity(self):
- a=np.array([[50,50,0],[50,50,0],[0,0,2]])
- r=attribute_ac(a)
- npt.assert_almost_equal(r,0.029,decimal=3)
-
+ a = np.array([[50, 50, 0], [50, 50, 0], [0, 0, 2]])
+ r = attribute_ac(a)
+ npt.assert_almost_equal(r, 0.029, decimal=3)
diff --git a/networkx/algorithms/assortativity/tests/test_mixing.py b/networkx/algorithms/assortativity/tests/test_mixing.py
index ce60a94b..703c0192 100644
--- a/networkx/algorithms/assortativity/tests/test_mixing.py
+++ b/networkx/algorithms/assortativity/tests/test_mixing.py
@@ -2,42 +2,41 @@
from nose.tools import *
from nose import SkipTest
import networkx as nx
-from base_test import BaseTestAttributeMixing,BaseTestDegreeMixing
+from base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
class TestDegreeMixingDict(BaseTestDegreeMixing):
-
def test_degree_mixing_dict_undirected(self):
- d=nx.degree_mixing_dict(self.P4)
- d_result={1:{2:2},
- 2:{1:2,2:2},
- }
- assert_equal(d,d_result)
+ d = nx.degree_mixing_dict(self.P4)
+ d_result = {1: {2: 2},
+ 2: {1: 2, 2: 2},
+ }
+ assert_equal(d, d_result)
def test_degree_mixing_dict_undirected_normalized(self):
- d=nx.degree_mixing_dict(self.P4, normalized=True)
- d_result={1:{2:1.0/3},
- 2:{1:1.0/3,2:1.0/3},
- }
- assert_equal(d,d_result)
+ d = nx.degree_mixing_dict(self.P4, normalized=True)
+ d_result = {1: {2: 1.0 / 3},
+ 2: {1: 1.0 / 3, 2: 1.0 / 3},
+ }
+ assert_equal(d, d_result)
def test_degree_mixing_dict_directed(self):
- d=nx.degree_mixing_dict(self.D)
+ d = nx.degree_mixing_dict(self.D)
print(d)
- d_result={1:{3:2},
- 2:{1:1,3:1},
- 3:{}
- }
- assert_equal(d,d_result)
+ d_result = {1: {3: 2},
+ 2: {1: 1, 3: 1},
+ 3: {}
+ }
+ assert_equal(d, d_result)
def test_degree_mixing_dict_multigraph(self):
- d=nx.degree_mixing_dict(self.M)
- d_result={1:{2:1},
- 2:{1:1,3:3},
- 3:{2:3}
- }
- assert_equal(d,d_result)
+ d = nx.degree_mixing_dict(self.M)
+ d_result = {1: {2: 1},
+ 2: {1: 1, 3: 3},
+ 3: {2: 3}
+ }
+ assert_equal(d, d_result)
class TestDegreeMixingMatrix(BaseTestDegreeMixing):
@@ -51,80 +50,77 @@ class TestDegreeMixingMatrix(BaseTestDegreeMixing):
import numpy.testing as npt
except ImportError:
- raise SkipTest('NumPy not available.')
+ raise SkipTest('NumPy not available.')
def test_degree_mixing_matrix_undirected(self):
- a_result=np.array([[0,0,0],
- [0,0,2],
- [0,2,2]]
- )
- a=nx.degree_mixing_matrix(self.P4,normalized=False)
- npt.assert_equal(a,a_result)
- a=nx.degree_mixing_matrix(self.P4)
- npt.assert_equal(a,a_result/float(a_result.sum()))
+ a_result = np.array([[0, 0, 0],
+ [0, 0, 2],
+ [0, 2, 2]]
+ )
+ a = nx.degree_mixing_matrix(self.P4, normalized=False)
+ npt.assert_equal(a, a_result)
+ a = nx.degree_mixing_matrix(self.P4)
+ npt.assert_equal(a, a_result / float(a_result.sum()))
def test_degree_mixing_matrix_directed(self):
- a_result=np.array([[0,0,0,0],
- [0,0,0,2],
- [0,1,0,1],
- [0,0,0,0]]
- )
- a=nx.degree_mixing_matrix(self.D,normalized=False)
- npt.assert_equal(a,a_result)
- a=nx.degree_mixing_matrix(self.D)
- npt.assert_equal(a,a_result/float(a_result.sum()))
+ a_result = np.array([[0, 0, 0, 0],
+ [0, 0, 0, 2],
+ [0, 1, 0, 1],
+ [0, 0, 0, 0]]
+ )
+ a = nx.degree_mixing_matrix(self.D, normalized=False)
+ npt.assert_equal(a, a_result)
+ a = nx.degree_mixing_matrix(self.D)
+ npt.assert_equal(a, a_result / float(a_result.sum()))
def test_degree_mixing_matrix_multigraph(self):
- a_result=np.array([[0,0,0,0],
- [0,0,1,0],
- [0,1,0,3],
- [0,0,3,0]]
- )
- a=nx.degree_mixing_matrix(self.M,normalized=False)
- npt.assert_equal(a,a_result)
- a=nx.degree_mixing_matrix(self.M)
- npt.assert_equal(a,a_result/float(a_result.sum()))
-
+ a_result = np.array([[0, 0, 0, 0],
+ [0, 0, 1, 0],
+ [0, 1, 0, 3],
+ [0, 0, 3, 0]]
+ )
+ a = nx.degree_mixing_matrix(self.M, normalized=False)
+ npt.assert_equal(a, a_result)
+ a = nx.degree_mixing_matrix(self.M)
+ npt.assert_equal(a, a_result / float(a_result.sum()))
def test_degree_mixing_matrix_selfloop(self):
- a_result=np.array([[0,0,0],
- [0,0,0],
- [0,0,2]]
- )
- a=nx.degree_mixing_matrix(self.S,normalized=False)
- npt.assert_equal(a,a_result)
- a=nx.degree_mixing_matrix(self.S)
- npt.assert_equal(a,a_result/float(a_result.sum()))
+ a_result = np.array([[0, 0, 0],
+ [0, 0, 0],
+ [0, 0, 2]]
+ )
+ a = nx.degree_mixing_matrix(self.S, normalized=False)
+ npt.assert_equal(a, a_result)
+ a = nx.degree_mixing_matrix(self.S)
+ npt.assert_equal(a, a_result / float(a_result.sum()))
class TestAttributeMixingDict(BaseTestAttributeMixing):
def test_attribute_mixing_dict_undirected(self):
- d=nx.attribute_mixing_dict(self.G,'fish')
- d_result={'one':{'one':2,'red':1},
- 'two':{'two':2,'blue':1},
- 'red':{'one':1},
- 'blue':{'two':1}
- }
- assert_equal(d,d_result)
+ d = nx.attribute_mixing_dict(self.G, 'fish')
+ d_result = {'one': {'one': 2, 'red': 1},
+ 'two': {'two': 2, 'blue': 1},
+ 'red': {'one': 1},
+ 'blue': {'two': 1}
+ }
+ assert_equal(d, d_result)
def test_attribute_mixing_dict_directed(self):
- d=nx.attribute_mixing_dict(self.D,'fish')
- d_result={'one':{'one':1,'red':1},
- 'two':{'two':1,'blue':1},
- 'red':{},
- 'blue':{}
- }
- assert_equal(d,d_result)
-
+ d = nx.attribute_mixing_dict(self.D, 'fish')
+ d_result = {'one': {'one': 1, 'red': 1},
+ 'two': {'two': 1, 'blue': 1},
+ 'red': {},
+ 'blue': {}
+ }
+ assert_equal(d, d_result)
def test_attribute_mixing_dict_multigraph(self):
- d=nx.attribute_mixing_dict(self.M,'fish')
- d_result={'one':{'one':4},
- 'two':{'two':2},
- }
- assert_equal(d,d_result)
-
+ d = nx.attribute_mixing_dict(self.M, 'fish')
+ d_result = {'one': {'one': 4},
+ 'two': {'two': 2},
+ }
+ assert_equal(d, d_result)
class TestAttributeMixingMatrix(BaseTestAttributeMixing):
@@ -137,50 +133,49 @@ class TestAttributeMixingMatrix(BaseTestAttributeMixing):
import numpy.testing as npt
except ImportError:
- raise SkipTest('NumPy not available.')
+ raise SkipTest('NumPy not available.')
def test_attribute_mixing_matrix_undirected(self):
- mapping={'one':0,'two':1,'red':2,'blue':3}
- a_result=np.array([[2,0,1,0],
- [0,2,0,1],
- [1,0,0,0],
- [0,1,0,0]]
- )
- a=nx.attribute_mixing_matrix(self.G,'fish',
- mapping=mapping,
- normalized=False)
- npt.assert_equal(a,a_result)
- a=nx.attribute_mixing_matrix(self.G,'fish',
- mapping=mapping)
- npt.assert_equal(a,a_result/float(a_result.sum()))
+ mapping = {'one': 0, 'two': 1, 'red': 2, 'blue': 3}
+ a_result = np.array([[2, 0, 1, 0],
+ [0, 2, 0, 1],
+ [1, 0, 0, 0],
+ [0, 1, 0, 0]]
+ )
+ a = nx.attribute_mixing_matrix(self.G, 'fish',
+ mapping=mapping,
+ normalized=False)
+ npt.assert_equal(a, a_result)
+ a = nx.attribute_mixing_matrix(self.G, 'fish',
+ mapping=mapping)
+ npt.assert_equal(a, a_result / float(a_result.sum()))
def test_attribute_mixing_matrix_directed(self):
- mapping={'one':0,'two':1,'red':2,'blue':3}
- a_result=np.array([[1,0,1,0],
- [0,1,0,1],
- [0,0,0,0],
- [0,0,0,0]]
- )
- a=nx.attribute_mixing_matrix(self.D,'fish',
- mapping=mapping,
- normalized=False)
- npt.assert_equal(a,a_result)
- a=nx.attribute_mixing_matrix(self.D,'fish',
- mapping=mapping)
- npt.assert_equal(a,a_result/float(a_result.sum()))
+ mapping = {'one': 0, 'two': 1, 'red': 2, 'blue': 3}
+ a_result = np.array([[1, 0, 1, 0],
+ [0, 1, 0, 1],
+ [0, 0, 0, 0],
+ [0, 0, 0, 0]]
+ )
+ a = nx.attribute_mixing_matrix(self.D, 'fish',
+ mapping=mapping,
+ normalized=False)
+ npt.assert_equal(a, a_result)
+ a = nx.attribute_mixing_matrix(self.D, 'fish',
+ mapping=mapping)
+ npt.assert_equal(a, a_result / float(a_result.sum()))
def test_attribute_mixing_matrix_multigraph(self):
- mapping={'one':0,'two':1,'red':2,'blue':3}
- a_result=np.array([[4,0,0,0],
- [0,2,0,0],
- [0,0,0,0],
- [0,0,0,0]]
- )
- a=nx.attribute_mixing_matrix(self.M,'fish',
- mapping=mapping,
- normalized=False)
- npt.assert_equal(a,a_result)
- a=nx.attribute_mixing_matrix(self.M,'fish',
- mapping=mapping)
- npt.assert_equal(a,a_result/float(a_result.sum()))
-
+ mapping = {'one': 0, 'two': 1, 'red': 2, 'blue': 3}
+ a_result = np.array([[4, 0, 0, 0],
+ [0, 2, 0, 0],
+ [0, 0, 0, 0],
+ [0, 0, 0, 0]]
+ )
+ a = nx.attribute_mixing_matrix(self.M, 'fish',
+ mapping=mapping,
+ normalized=False)
+ npt.assert_equal(a, a_result)
+ a = nx.attribute_mixing_matrix(self.M, 'fish',
+ mapping=mapping)
+ npt.assert_equal(a, a_result / float(a_result.sum()))
diff --git a/networkx/algorithms/assortativity/tests/test_neighbor_degree.py b/networkx/algorithms/assortativity/tests/test_neighbor_degree.py
index 7ab99fb9..c294de48 100644
--- a/networkx/algorithms/assortativity/tests/test_neighbor_degree.py
+++ b/networkx/algorithms/assortativity/tests/test_neighbor_degree.py
@@ -2,81 +2,79 @@
from nose.tools import *
import networkx as nx
+
class TestAverageNeighbor(object):
def test_degree_p4(self):
- G=nx.path_graph(4)
- answer={0:2,1:1.5,2:1.5,3:2}
+ G = nx.path_graph(4)
+ answer = {0: 2, 1: 1.5, 2: 1.5, 3: 2}
nd = nx.average_neighbor_degree(G)
- assert_equal(nd,answer)
-
- D=G.to_directed()
+ assert_equal(nd, answer)
+
+ D = G.to_directed()
nd = nx.average_neighbor_degree(D)
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
- D=G.to_directed()
+ D = G.to_directed()
nd = nx.average_neighbor_degree(D)
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
- D=G.to_directed()
+ D = G.to_directed()
nd = nx.average_neighbor_degree(D, source='in', target='in')
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
def test_degree_p4_weighted(self):
- G=nx.path_graph(4)
- G[1][2]['weight']=4
- answer={0:2,1:1.8,2:1.8,3:2}
- nd = nx.average_neighbor_degree(G,weight='weight')
- assert_equal(nd,answer)
-
- D=G.to_directed()
- nd = nx.average_neighbor_degree(D,weight='weight')
- assert_equal(nd,answer)
-
- D=G.to_directed()
- nd = nx.average_neighbor_degree(D,weight='weight')
- assert_equal(nd,answer)
- nd = nx.average_neighbor_degree(D,source='out',target='out',
- weight='weight')
- assert_equal(nd,answer)
+ G = nx.path_graph(4)
+ G[1][2]['weight'] = 4
+ answer = {0: 2, 1: 1.8, 2: 1.8, 3: 2}
+ nd = nx.average_neighbor_degree(G, weight='weight')
+ assert_equal(nd, answer)
- D=G.to_directed()
- nd = nx.average_neighbor_degree(D,source='in',target='in',
- weight='weight')
- assert_equal(nd,answer)
+ D = G.to_directed()
+ nd = nx.average_neighbor_degree(D, weight='weight')
+ assert_equal(nd, answer)
+ D = G.to_directed()
+ nd = nx.average_neighbor_degree(D, weight='weight')
+ assert_equal(nd, answer)
+ nd = nx.average_neighbor_degree(D, source='out', target='out',
+ weight='weight')
+ assert_equal(nd, answer)
+
+ D = G.to_directed()
+ nd = nx.average_neighbor_degree(D, source='in', target='in',
+ weight='weight')
+ assert_equal(nd, answer)
def test_degree_k4(self):
- G=nx.complete_graph(4)
- answer={0:3,1:3,2:3,3:3}
+ G = nx.complete_graph(4)
+ answer = {0: 3, 1: 3, 2: 3, 3: 3}
nd = nx.average_neighbor_degree(G)
- assert_equal(nd,answer)
-
- D=G.to_directed()
+ assert_equal(nd, answer)
+
+ D = G.to_directed()
nd = nx.average_neighbor_degree(D)
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
- D=G.to_directed()
+ D = G.to_directed()
nd = nx.average_neighbor_degree(D)
- assert_equal(nd,answer)
+ assert_equal(nd, answer)
- D=G.to_directed()
- nd = nx.average_neighbor_degree(D,source='in',target='in')
- assert_equal(nd,answer)
+ D = G.to_directed()
+ nd = nx.average_neighbor_degree(D, source='in', target='in')
+ assert_equal(nd, answer)
def test_degree_k4_nodes(self):
- G=nx.complete_graph(4)
- answer={1:3.0,2:3.0}
- nd = nx.average_neighbor_degree(G,nodes=[1,2])
- assert_equal(nd,answer)
+ G = nx.complete_graph(4)
+ answer = {1: 3.0, 2: 3.0}
+ nd = nx.average_neighbor_degree(G, nodes=[1, 2])
+ assert_equal(nd, answer)
def test_degree_barrat(self):
- G=nx.star_graph(5)
- G.add_edges_from([(5,6),(5,7),(5,8),(5,9)])
- G[0][5]['weight']=5
+ G = nx.star_graph(5)
+ G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
+ G[0][5]['weight'] = 5
nd = nx.average_neighbor_degree(G)[5]
- assert_equal(nd,1.8)
- nd = nx.average_neighbor_degree(G,weight='weight')[5]
- assert_almost_equal(nd,3.222222,places=5)
-
-
+ assert_equal(nd, 1.8)
+ nd = nx.average_neighbor_degree(G, weight='weight')[5]
+ assert_almost_equal(nd, 3.222222, places=5)
diff --git a/networkx/algorithms/assortativity/tests/test_pairs.py b/networkx/algorithms/assortativity/tests/test_pairs.py
index fa67a454..b3d80120 100644
--- a/networkx/algorithms/assortativity/tests/test_pairs.py
+++ b/networkx/algorithms/assortativity/tests/test_pairs.py
@@ -1,113 +1,110 @@
#!/usr/bin/env python
from nose.tools import *
import networkx as nx
-from base_test import BaseTestAttributeMixing,BaseTestDegreeMixing
+from base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
+
class TestAttributeMixingXY(BaseTestAttributeMixing):
def test_node_attribute_xy_undirected(self):
- attrxy=sorted(nx.node_attribute_xy(self.G,'fish'))
- attrxy_result=sorted([('one','one'),
- ('one','one'),
- ('two','two'),
- ('two','two'),
- ('one','red'),
- ('red','one'),
- ('blue','two'),
- ('two','blue')
- ])
- assert_equal(attrxy,attrxy_result)
+ attrxy = sorted(nx.node_attribute_xy(self.G, 'fish'))
+ attrxy_result = sorted([('one', 'one'),
+ ('one', 'one'),
+ ('two', 'two'),
+ ('two', 'two'),
+ ('one', 'red'),
+ ('red', 'one'),
+ ('blue', 'two'),
+ ('two', 'blue')
+ ])
+ assert_equal(attrxy, attrxy_result)
def test_node_attribute_xy_undirected_nodes(self):
- attrxy=sorted(nx.node_attribute_xy(self.G,'fish',
- nodes=['one','yellow']))
- attrxy_result=sorted( [
- ])
- assert_equal(attrxy,attrxy_result)
-
+ attrxy = sorted(nx.node_attribute_xy(self.G, 'fish',
+ nodes=['one', 'yellow']))
+ attrxy_result = sorted([
+ ])
+ assert_equal(attrxy, attrxy_result)
def test_node_attribute_xy_directed(self):
- attrxy=sorted(nx.node_attribute_xy(self.D,'fish'))
- attrxy_result=sorted([('one','one'),
- ('two','two'),
- ('one','red'),
- ('two','blue')
- ])
- assert_equal(attrxy,attrxy_result)
+ attrxy = sorted(nx.node_attribute_xy(self.D, 'fish'))
+ attrxy_result = sorted([('one', 'one'),
+ ('two', 'two'),
+ ('one', 'red'),
+ ('two', 'blue')
+ ])
+ assert_equal(attrxy, attrxy_result)
def test_node_attribute_xy_multigraph(self):
- attrxy=sorted(nx.node_attribute_xy(self.M,'fish'))
- attrxy_result=[('one','one'),
- ('one','one'),
- ('one','one'),
- ('one','one'),
- ('two','two'),
- ('two','two')
- ]
- assert_equal(attrxy,attrxy_result)
+ attrxy = sorted(nx.node_attribute_xy(self.M, 'fish'))
+ attrxy_result = [('one', 'one'),
+ ('one', 'one'),
+ ('one', 'one'),
+ ('one', 'one'),
+ ('two', 'two'),
+ ('two', 'two')
+ ]
+ assert_equal(attrxy, attrxy_result)
def test_node_attribute_xy_selfloop(self):
- attrxy=sorted(nx.node_attribute_xy(self.S,'fish'))
- attrxy_result=[('one','one'),
- ('two','two')
- ]
- assert_equal(attrxy,attrxy_result)
+ attrxy = sorted(nx.node_attribute_xy(self.S, 'fish'))
+ attrxy_result = [('one', 'one'),
+ ('two', 'two')
+ ]
+ assert_equal(attrxy, attrxy_result)
class TestDegreeMixingXY(BaseTestDegreeMixing):
def test_node_degree_xy_undirected(self):
- xy=sorted(nx.node_degree_xy(self.P4))
- xy_result=sorted([(1,2),
- (2,1),
- (2,2),
- (2,2),
- (1,2),
- (2,1)])
- assert_equal(xy,xy_result)
+ xy = sorted(nx.node_degree_xy(self.P4))
+ xy_result = sorted([(1, 2),
+ (2, 1),
+ (2, 2),
+ (2, 2),
+ (1, 2),
+ (2, 1)])
+ assert_equal(xy, xy_result)
def test_node_degree_xy_undirected_nodes(self):
- xy=sorted(nx.node_degree_xy(self.P4,nodes=[0,1,-1]))
- xy_result=sorted([(1,2),
- (2,1),])
- assert_equal(xy,xy_result)
-
+ xy = sorted(nx.node_degree_xy(self.P4, nodes=[0, 1, -1]))
+ xy_result = sorted([(1, 2),
+ (2, 1), ])
+ assert_equal(xy, xy_result)
def test_node_degree_xy_directed(self):
- xy=sorted(nx.node_degree_xy(self.D))
- xy_result=sorted([(2,1),
- (2,3),
- (1,3),
- (1,3)])
- assert_equal(xy,xy_result)
+ xy = sorted(nx.node_degree_xy(self.D))
+ xy_result = sorted([(2, 1),
+ (2, 3),
+ (1, 3),
+ (1, 3)])
+ assert_equal(xy, xy_result)
def test_node_degree_xy_multigraph(self):
- xy=sorted(nx.node_degree_xy(self.M))
- xy_result=sorted([(2,3),
- (2,3),
- (3,2),
- (3,2),
- (2,3),
- (3,2),
- (1,2),
- (2,1)])
- assert_equal(xy,xy_result)
-
+ xy = sorted(nx.node_degree_xy(self.M))
+ xy_result = sorted([(2, 3),
+ (2, 3),
+ (3, 2),
+ (3, 2),
+ (2, 3),
+ (3, 2),
+ (1, 2),
+ (2, 1)])
+ assert_equal(xy, xy_result)
def test_node_degree_xy_selfloop(self):
- xy=sorted(nx.node_degree_xy(self.S))
- xy_result=sorted([(2,2),
- (2,2)])
- assert_equal(xy,xy_result)
+ xy = sorted(nx.node_degree_xy(self.S))
+ xy_result = sorted([(2, 2),
+ (2, 2)])
+ assert_equal(xy, xy_result)
def test_node_degree_xy_weighted(self):
G = nx.Graph()
- G.add_edge(1,2,weight=7)
- G.add_edge(2,3,weight=10)
- xy=sorted(nx.node_degree_xy(G,weight='weight'))
- xy_result=sorted([(7,17),
- (17,10),
- (17,7),
- (10,17)])
- assert_equal(xy,xy_result)
-
+ G.add_edge(1, 2, weight=7)
+ G.add_edge(2, 3, weight=10)
+ xy = sorted(nx.node_degree_xy(G, weight='weight'))
+ xy_result = sorted([(7, 17),
+ (17, 10),
+ (17, 7),
+ (10, 17)])
+ assert_equal(xy, xy_result)
diff --git a/networkx/algorithms/bipartite/basic.py b/networkx/algorithms/bipartite/basic.py
index 92116a62..6dc0de5f 100644
--- a/networkx/algorithms/bipartite/basic.py
+++ b/networkx/algorithms/bipartite/basic.py
@@ -13,12 +13,12 @@ Bipartite Graph Algorithms
import networkx as nx
__author__ = """\n""".join(['Jordi Torrents <jtorrents@milnou.net>',
'Aric Hagberg <aric.hagberg@gmail.com>'])
-__all__ = [ 'is_bipartite',
- 'is_bipartite_node_set',
- 'color',
- 'sets',
- 'density',
- 'degrees']
+__all__ = ['is_bipartite',
+ 'is_bipartite_node_set',
+ 'color',
+ 'sets',
+ 'density',
+ 'degrees']
def color(G):
@@ -57,21 +57,22 @@ def color(G):
"""
if G.is_directed():
import itertools
+
def neighbors(v):
return itertools.chain.from_iterable([G.predecessors(v),
G.successors(v)])
else:
- neighbors=G.neighbors
+ neighbors = G.neighbors
color = {}
- for n in G: # handle disconnected graphs
- if n in color or len(G[n])==0: # skip isolates
+ for n in G: # handle disconnected graphs
+ if n in color or len(G[n]) == 0: # skip isolates
continue
queue = [n]
- color[n] = 1 # nodes seen with color (1 or 0)
+ color[n] = 1 # nodes seen with color (1 or 0)
while queue:
v = queue.pop()
- c = 1 - color[v] # opposite color of node v
+ c = 1 - color[v] # opposite color of node v
for w in neighbors(v):
if w in color:
if color[w] == color[v]:
@@ -80,9 +81,10 @@ def color(G):
color[w] = c
queue.append(w)
# color isolates with 0
- color.update(dict.fromkeys(nx.isolates(G),0))
+ color.update(dict.fromkeys(nx.isolates(G), 0))
return color
+
def is_bipartite(G):
""" Returns True if graph G is bipartite, False if not.
@@ -107,7 +109,8 @@ def is_bipartite(G):
except nx.NetworkXError:
return False
-def is_bipartite_node_set(G,nodes):
+
+def is_bipartite_node_set(G, nodes):
"""Returns True if nodes and G/nodes are a bipartition of G.
Parameters
@@ -130,11 +133,11 @@ def is_bipartite_node_set(G,nodes):
For connected graphs the bipartite sets are unique. This function handles
disconnected graphs.
"""
- S=set(nodes)
+ S = set(nodes)
for CC in nx.connected_component_subgraphs(G):
- X,Y=sets(CC)
- if not ( (X.issubset(S) and Y.isdisjoint(S)) or
- (Y.issubset(S) and X.isdisjoint(S)) ):
+ X, Y = sets(CC)
+ if not ((X.issubset(S) and Y.isdisjoint(S)) or
+ (Y.issubset(S) and X.isdisjoint(S))):
return False
return True
@@ -206,6 +209,7 @@ def sets(G, top_nodes=None):
Y = {n for n, is_top in c.items() if not is_top}
return (X, Y)
+
def density(B, nodes):
"""Return density of bipartite graph B.
@@ -244,19 +248,20 @@ def density(B, nodes):
--------
color
"""
- n=len(B)
- m=nx.number_of_edges(B)
- nb=len(nodes)
- nt=n-nb
- if m==0: # includes cases n==0 and n==1
- d=0.0
+ n = len(B)
+ m = nx.number_of_edges(B)
+ nb = len(nodes)
+ nt = n - nb
+ if m == 0: # includes cases n==0 and n==1
+ d = 0.0
else:
if B.is_directed():
- d=m/(2.0*float(nb*nt))
+ d = m / (2.0 * float(nb * nt))
else:
- d= m/float(nb*nt)
+ d = m / float(nb * nt)
return d
+
def degrees(B, nodes, weight=None):
"""Return the degrees of the two node sets in the bipartite graph B.
@@ -298,7 +303,6 @@ def degrees(B, nodes, weight=None):
--------
color, density
"""
- bottom=set(nodes)
- top=set(B)-bottom
- return (B.degree(top,weight),B.degree(bottom,weight))
-
+ bottom = set(nodes)
+ top = set(B) - bottom
+ return (B.degree(top, weight), B.degree(bottom, weight))
diff --git a/networkx/algorithms/bipartite/cluster.py b/networkx/algorithms/bipartite/cluster.py
index 73a8a9dc..282a8129 100644
--- a/networkx/algorithms/bipartite/cluster.py
+++ b/networkx/algorithms/bipartite/cluster.py
@@ -1,5 +1,5 @@
#-*- coding: utf-8 -*-
-# Copyright (C) 2011 by
+# Copyright (C) 2011 by
# Jordi Torrents <jtorrents@milnou.net>
# Aric Hagberg <hagberg@lanl.gov>
# All rights reserved.
@@ -8,31 +8,37 @@ import itertools
import networkx as nx
__author__ = """\n""".join(['Jordi Torrents <jtorrents@milnou.net>',
'Aric Hagberg (hagberg@lanl.gov)'])
-__all__ = [ 'clustering',
- 'average_clustering',
- 'latapy_clustering',
- 'robins_alexander_clustering']
+__all__ = ['clustering',
+ 'average_clustering',
+ 'latapy_clustering',
+ 'robins_alexander_clustering']
# functions for computing clustering of pairs
-def cc_dot(nu,nv):
- return float(len(nu & nv))/len(nu | nv)
-def cc_max(nu,nv):
- return float(len(nu & nv))/max(len(nu),len(nv))
-def cc_min(nu,nv):
- return float(len(nu & nv))/min(len(nu),len(nv))
-
-modes={'dot':cc_dot,
- 'min':cc_min,
- 'max':cc_max}
+def cc_dot(nu, nv):
+ return float(len(nu & nv)) / len(nu | nv)
+
+
+def cc_max(nu, nv):
+ return float(len(nu & nv)) / max(len(nu), len(nv))
+
+
+def cc_min(nu, nv):
+ return float(len(nu & nv)) / min(len(nu), len(nv))
+
+
+modes = {'dot': cc_dot,
+ 'min': cc_min,
+ 'max': cc_max}
+
def latapy_clustering(G, nodes=None, mode='dot'):
r"""Compute a bipartite clustering coefficient for nodes.
The bipartie clustering coefficient is a measure of local density
of connections defined as [1]_:
-
+
.. math::
c_u = \frac{\sum_{v \in N(N(v))} c_{uv} }{|N(N(u))|}
@@ -74,7 +80,7 @@ def latapy_clustering(G, nodes=None, mode='dot'):
mode : string
The pariwise bipartite clustering method to be used in the computation.
It must be "dot", "max", or "min".
-
+
Returns
-------
clustering : dictionary
@@ -97,7 +103,7 @@ def latapy_clustering(G, nodes=None, mode='dot'):
robins_alexander_clustering
square_clustering
average_clustering
-
+
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
@@ -106,28 +112,30 @@ def latapy_clustering(G, nodes=None, mode='dot'):
"""
if not nx.algorithms.bipartite.is_bipartite(G):
raise nx.NetworkXError("Graph is not bipartite")
-
+
try:
cc_func = modes[mode]
except KeyError:
- raise nx.NetworkXError(\
- "Mode for bipartite clustering must be: dot, min or max")
+ raise nx.NetworkXError(
+ "Mode for bipartite clustering must be: dot, min or max")
if nodes is None:
nodes = G
ccs = {}
for v in nodes:
cc = 0.0
- nbrs2=set([u for nbr in G[v] for u in G[nbr]])-set([v])
+ nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v])
for u in nbrs2:
- cc += cc_func(set(G[u]),set(G[v]))
- if cc > 0.0: # len(nbrs2)>0
+ cc += cc_func(set(G[u]), set(G[v]))
+ if cc > 0.0: # len(nbrs2)>0
cc /= len(nbrs2)
ccs[v] = cc
return ccs
+
clustering = latapy_clustering
+
def average_clustering(G, nodes=None, mode='dot'):
r"""Compute the average bipartite clustering coefficient.
@@ -136,15 +144,15 @@ def average_clustering(G, nodes=None, mode='dot'):
.. math::
C = \frac{1}{n}\sum_{v \in G} c_v,
-
+
where `n` is the number of nodes in `G`.
Similar measures for the two bipartite sets can be defined [1]_
-
+
.. math::
C_X = \frac{1}{|X|}\sum_{v \in X} c_v,
-
+
where `X` is a bipartite set of `G`.
Parameters
@@ -160,7 +168,7 @@ def average_clustering(G, nodes=None, mode='dot'):
mode : string
The pariwise bipartite clustering method.
It must be "dot", "max", or "min"
-
+
Returns
-------
clustering : float
@@ -182,7 +190,7 @@ def average_clustering(G, nodes=None, mode='dot'):
See Also
--------
clustering
-
+
Notes
-----
The container of nodes passed to this function must contain all of the nodes
@@ -199,9 +207,10 @@ def average_clustering(G, nodes=None, mode='dot'):
Social Networks 30(1), 31--48.
"""
if nodes is None:
- nodes=G
- ccs=latapy_clustering(G, nodes=nodes, mode=mode)
- return float(sum(ccs[v] for v in nodes))/len(nodes)
+ nodes = G
+ ccs = latapy_clustering(G, nodes=nodes, mode=mode)
+ return float(sum(ccs[v] for v in nodes)) / len(nodes)
+
def robins_alexander_clustering(G):
r"""Compute the bipartite clustering of G.
@@ -213,7 +222,7 @@ def robins_alexander_clustering(G):
.. math::
CC_4 = \frac{4 * C_4}{L_3}
-
+
Parameters
----------
G : graph
@@ -235,7 +244,7 @@ def robins_alexander_clustering(G):
--------
latapy_clustering
square_clustering
-
+
References
----------
.. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
@@ -251,6 +260,7 @@ def robins_alexander_clustering(G):
C_4 = _four_cycles(G)
return (4. * C_4) / L_3
+
def _four_cycles(G):
cycles = 0
for v in G:
@@ -258,6 +268,7 @@ def _four_cycles(G):
cycles += len((set(G[u]) & set(G[w])) - set([v]))
return cycles / 4
+
def _threepaths(G):
paths = 0
for v in G:
diff --git a/networkx/algorithms/bipartite/covering.py b/networkx/algorithms/bipartite/covering.py
index c4201c6d..551dbbeb 100644
--- a/networkx/algorithms/bipartite/covering.py
+++ b/networkx/algorithms/bipartite/covering.py
@@ -54,7 +54,7 @@ def min_edge_cover(G, matching_algorithm=None):
is bounded by the worst-case running time of the function
``matching_algorithm``.
"""
- if G.order() == 0: # Special case for the empty graph
+ if G.order() == 0: # Special case for the empty graph
return set()
if matching_algorithm is None:
matching_algorithm = hopcroft_karp_matching
diff --git a/networkx/algorithms/bipartite/edgelist.py b/networkx/algorithms/bipartite/edgelist.py
index 5b9e0605..ca0b7080 100644
--- a/networkx/algorithms/bipartite/edgelist.py
+++ b/networkx/algorithms/bipartite/edgelist.py
@@ -37,9 +37,10 @@ import networkx as nx
from networkx.utils import open_file, make_str, not_implemented_for
from networkx.convert import _prep_create_using
+
@open_file(1, mode='wb')
def write_edgelist(G, path, comments="#", delimiter=' ', data=True,
- encoding = 'utf-8'):
+ encoding='utf-8'):
"""Write a bipartite graph as a list of edges.
Parameters
@@ -138,22 +139,22 @@ def generate_edgelist(G, delimiter=' ', data=True):
2 3
"""
try:
- part0 = [n for n,d in G.nodes.items() if d['bipartite'] == 0]
+ part0 = [n for n, d in G.nodes.items() if d['bipartite'] == 0]
except:
- raise AttributeError("Missing node attribute `bipartite`")
+ raise AttributeError("Missing node attribute `bipartite`")
if data is True or data is False:
for n in part0:
for e in G.edges(n, data=data):
- yield delimiter.join(map(make_str,e))
+ yield delimiter.join(map(make_str, e))
else:
for n in part0:
- for u,v,d in G.edges(n, data=True):
- e = [u,v]
+ for u, v, d in G.edges(n, data=True):
+ e = [u, v]
try:
e.extend(d[k] for k in data)
except KeyError:
- pass # missing data for this edge, should warn?
- yield delimiter.join(map(make_str,e))
+ pass # missing data for this edge, should warn?
+ yield delimiter.join(map(make_str, e))
def parse_edgelist(lines, comments='#', delimiter=None,
@@ -226,62 +227,62 @@ def parse_edgelist(lines, comments='#', delimiter=None,
from ast import literal_eval
G = _prep_create_using(create_using)
for line in lines:
- p=line.find(comments)
- if p>=0:
+ p = line.find(comments)
+ if p >= 0:
line = line[:p]
if not len(line):
continue
# split line, should have 2 or more
- s=line.strip().split(delimiter)
- if len(s)<2:
+ s = line.strip().split(delimiter)
+ if len(s) < 2:
continue
- u=s.pop(0)
- v=s.pop(0)
- d=s
+ u = s.pop(0)
+ v = s.pop(0)
+ d = s
if nodetype is not None:
try:
- u=nodetype(u)
- v=nodetype(v)
+ u = nodetype(u)
+ v = nodetype(v)
except:
raise TypeError("Failed to convert nodes %s,%s to type %s."
- %(u,v,nodetype))
+ % (u, v, nodetype))
- if len(d)==0 or data is False:
+ if len(d) == 0 or data is False:
# no data or data type specified
- edgedata={}
+ edgedata = {}
elif data is True:
# no edge types specified
- try: # try to evaluate as dictionary
- edgedata=dict(literal_eval(' '.join(d)))
+ try: # try to evaluate as dictionary
+ edgedata = dict(literal_eval(' '.join(d)))
except:
raise TypeError(
- "Failed to convert edge data (%s) to dictionary."%(d))
+ "Failed to convert edge data (%s) to dictionary." % (d))
else:
# convert edge data to dictionary with specified keys and type
- if len(d)!=len(data):
+ if len(d) != len(data):
raise IndexError(
- "Edge data %s and data_keys %s are not the same length"%
+ "Edge data %s and data_keys %s are not the same length" %
(d, data))
- edgedata={}
- for (edge_key,edge_type),edge_value in zip(data,d):
+ edgedata = {}
+ for (edge_key, edge_type), edge_value in zip(data, d):
try:
- edge_value=edge_type(edge_value)
+ edge_value = edge_type(edge_value)
except:
raise TypeError(
"Failed to convert %s data %s to type %s."
- %(edge_key, edge_value, edge_type))
- edgedata.update({edge_key:edge_value})
+ % (edge_key, edge_value, edge_type))
+ edgedata.update({edge_key: edge_value})
G.add_node(u, bipartite=0)
G.add_node(v, bipartite=1)
G.add_edge(u, v, **edgedata)
return G
-@open_file(0,mode='rb')
+@open_file(0, mode='rb')
def read_edgelist(path, comments="#",
- delimiter=None, create_using=None,
- nodetype=None, data=True, edgetype=None,
- encoding='utf-8'):
+ delimiter=None, create_using=None,
+ nodetype=None, data=True, edgetype=None,
+ encoding='utf-8'):
"""Read a bipartite graph from a list of edges.
Parameters
@@ -350,11 +351,8 @@ def read_edgelist(path, comments="#",
types (e.g. int, float, str, frozenset - or tuples of those, etc.)
"""
lines = (line.decode(encoding) for line in path)
- return parse_edgelist(lines,comments=comments,
- delimiter=delimiter,
- create_using=create_using,
- nodetype=nodetype,
- data=data)
-
-
-
+ return parse_edgelist(lines, comments=comments,
+ delimiter=delimiter,
+ create_using=create_using,
+ nodetype=nodetype,
+ data=data)
diff --git a/networkx/algorithms/bipartite/projection.py b/networkx/algorithms/bipartite/projection.py
index 29b8ad2e..92bade4c 100644
--- a/networkx/algorithms/bipartite/projection.py
+++ b/networkx/algorithms/bipartite/projection.py
@@ -512,5 +512,6 @@ def generic_weighted_projected_graph(B, nodes, weight_function=None):
G.add_edge(u, v, weight=weight)
return G
+
def project(B, nodes, create_using=None):
return projected_graph(B, nodes)
diff --git a/networkx/algorithms/bipartite/spectral.py b/networkx/algorithms/bipartite/spectral.py
index 2b288400..ad6cd9bf 100644
--- a/networkx/algorithms/bipartite/spectral.py
+++ b/networkx/algorithms/bipartite/spectral.py
@@ -4,7 +4,7 @@ Spectral bipartivity measure.
"""
import networkx as nx
__author__ = """Aric Hagberg (hagberg@lanl.gov)"""
-# Copyright (C) 2011 by
+# Copyright (C) 2011 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
@@ -12,6 +12,7 @@ __author__ = """Aric Hagberg (hagberg@lanl.gov)"""
# BSD license.
__all__ = ['spectral_bipartivity']
+
def spectral_bipartivity(G, nodes=None, weight='weight'):
"""Returns the spectral bipartivity.
@@ -31,7 +32,7 @@ def spectral_bipartivity(G, nodes=None, weight='weight'):
A single number if the keyword nodes is not specified, or
a dictionary keyed by node with the spectral bipartivity contribution
of that node as the value.
-
+
Examples
--------
>>> from networkx.algorithms import bipartite
@@ -58,7 +59,7 @@ def spectral_bipartivity(G, nodes=None, weight='weight'):
except ImportError:
raise ImportError('spectral_bipartivity() requires SciPy: ',
'http://scipy.org/')
- nodelist = list(G) # ordering of nodes in matrix
+ nodelist = list(G) # ordering of nodes in matrix
A = nx.to_numpy_matrix(G, nodelist, weight=weight)
expA = scipy.linalg.expm(A)
expmA = scipy.linalg.expm(-A)
@@ -75,6 +76,7 @@ def spectral_bipartivity(G, nodes=None, weight='weight'):
sb[n] = coshA[i, i] / expA[i, i]
return sb
+
def setup_module(module):
"""Fixture for nose tests."""
from nose import SkipTest
diff --git a/networkx/algorithms/bipartite/tests/test_basic.py b/networkx/algorithms/bipartite/tests/test_basic.py
index d33fe6bc..cbd0738e 100644
--- a/networkx/algorithms/bipartite/tests/test_basic.py
+++ b/networkx/algorithms/bipartite/tests/test_basic.py
@@ -5,23 +5,22 @@ from nose.plugins.attrib import attr
import networkx as nx
from networkx.algorithms import bipartite
+
class TestBipartiteBasic:
def test_is_bipartite(self):
assert_true(bipartite.is_bipartite(nx.path_graph(4)))
- assert_true(bipartite.is_bipartite(nx.DiGraph([(1,0)])))
+ assert_true(bipartite.is_bipartite(nx.DiGraph([(1, 0)])))
assert_false(bipartite.is_bipartite(nx.complete_graph(3)))
-
def test_bipartite_color(self):
- G=nx.path_graph(4)
- c=bipartite.color(G)
- assert_equal(c,{0: 1, 1: 0, 2: 1, 3: 0})
+ G = nx.path_graph(4)
+ c = bipartite.color(G)
+ assert_equal(c, {0: 1, 1: 0, 2: 1, 3: 0})
@raises(nx.NetworkXError)
def test_not_bipartite_color(self):
- c=bipartite.color(nx.complete_graph(4))
-
+ c = bipartite.color(nx.complete_graph(4))
def test_bipartite_directed(self):
G = bipartite.random_graph(10, 10, 0.1, directed=True)
@@ -41,7 +40,7 @@ class TestBipartiteBasic:
assert_equal(Y, {1, 3})
def test_bipartite_sets_given_top_nodes(self):
- G=nx.path_graph(4)
+ G = nx.path_graph(4)
top_nodes = [0, 2]
X, Y = bipartite.sets(G, top_nodes)
assert_equal(X, {0, 2})
@@ -54,45 +53,44 @@ class TestBipartiteBasic:
X, Y = bipartite.sets(G)
def test_is_bipartite_node_set(self):
- G=nx.path_graph(4)
- assert_true(bipartite.is_bipartite_node_set(G,[0,2]))
- assert_true(bipartite.is_bipartite_node_set(G,[1,3]))
- assert_false(bipartite.is_bipartite_node_set(G,[1,2]))
+ G = nx.path_graph(4)
+ assert_true(bipartite.is_bipartite_node_set(G, [0, 2]))
+ assert_true(bipartite.is_bipartite_node_set(G, [1, 3]))
+ assert_false(bipartite.is_bipartite_node_set(G, [1, 2]))
G.add_edge(10, 20)
- assert_true(bipartite.is_bipartite_node_set(G,[0,2,10]))
- assert_true(bipartite.is_bipartite_node_set(G,[0,2,20]))
- assert_true(bipartite.is_bipartite_node_set(G,[1,3,10]))
- assert_true(bipartite.is_bipartite_node_set(G,[1,3,20]))
+ assert_true(bipartite.is_bipartite_node_set(G, [0, 2, 10]))
+ assert_true(bipartite.is_bipartite_node_set(G, [0, 2, 20]))
+ assert_true(bipartite.is_bipartite_node_set(G, [1, 3, 10]))
+ assert_true(bipartite.is_bipartite_node_set(G, [1, 3, 20]))
def test_bipartite_density(self):
- G=nx.path_graph(5)
- X,Y=bipartite.sets(G)
- density=float(len(list(G.edges())))/(len(X)*len(Y))
- assert_equal(bipartite.density(G,X),density)
+ G = nx.path_graph(5)
+ X, Y = bipartite.sets(G)
+ density = float(len(list(G.edges()))) / (len(X) * len(Y))
+ assert_equal(bipartite.density(G, X), density)
D = nx.DiGraph(G.edges())
- assert_equal(bipartite.density(D,X),density/2.0)
- assert_equal(bipartite.density(nx.Graph(),{}),0.0)
+ assert_equal(bipartite.density(D, X), density / 2.0)
+ assert_equal(bipartite.density(nx.Graph(), {}), 0.0)
def test_bipartite_degrees(self):
- G=nx.path_graph(5)
- X=set([1,3])
- Y=set([0,2,4])
- u,d=bipartite.degrees(G,Y)
- assert_equal(dict(u), {1:2,3:2})
- assert_equal(dict(d), {0:1,2:2,4:1})
+ G = nx.path_graph(5)
+ X = set([1, 3])
+ Y = set([0, 2, 4])
+ u, d = bipartite.degrees(G, Y)
+ assert_equal(dict(u), {1: 2, 3: 2})
+ assert_equal(dict(d), {0: 1, 2: 2, 4: 1})
def test_bipartite_weighted_degrees(self):
- G=nx.path_graph(5)
- G.add_edge(0,1,weight=0.1,other=0.2)
- X=set([1,3])
- Y=set([0,2,4])
- u,d=bipartite.degrees(G,Y,weight='weight')
- assert_equal(dict(u), {1:1.1,3:2})
- assert_equal(dict(d), {0:0.1,2:2,4:1})
- u,d=bipartite.degrees(G,Y,weight='other')
- assert_equal(dict(u), {1:1.2,3:2})
- assert_equal(dict(d), {0:0.2,2:2,4:1})
-
+ G = nx.path_graph(5)
+ G.add_edge(0, 1, weight=0.1, other=0.2)
+ X = set([1, 3])
+ Y = set([0, 2, 4])
+ u, d = bipartite.degrees(G, Y, weight='weight')
+ assert_equal(dict(u), {1: 1.1, 3: 2})
+ assert_equal(dict(d), {0: 0.1, 2: 2, 4: 1})
+ u, d = bipartite.degrees(G, Y, weight='other')
+ assert_equal(dict(u), {1: 1.2, 3: 2})
+ assert_equal(dict(d), {0: 0.2, 2: 2, 4: 1})
@attr('numpy')
def test_biadjacency_matrix_weight(self):
@@ -100,14 +98,14 @@ class TestBipartiteBasic:
import scipy
except ImportError:
raise SkipTest('SciPy not available.')
- G=nx.path_graph(5)
- G.add_edge(0,1,weight=2,other=4)
- X=[1,3]
- Y=[0,2,4]
- M = bipartite.biadjacency_matrix(G,X,weight='weight')
- assert_equal(M[0,0], 2)
+ G = nx.path_graph(5)
+ G.add_edge(0, 1, weight=2, other=4)
+ X = [1, 3]
+ Y = [0, 2, 4]
+ M = bipartite.biadjacency_matrix(G, X, weight='weight')
+ assert_equal(M[0, 0], 2)
M = bipartite.biadjacency_matrix(G, X, weight='other')
- assert_equal(M[0,0], 4)
+ assert_equal(M[0, 0], 4)
@attr('numpy')
def test_biadjacency_matrix(self):
@@ -115,14 +113,14 @@ class TestBipartiteBasic:
import scipy
except ImportError:
raise SkipTest('SciPy not available.')
- tops = [2,5,10]
- bots = [5,10,15]
+ tops = [2, 5, 10]
+ bots = [5, 10, 15]
for i in range(len(tops)):
G = bipartite.random_graph(tops[i], bots[i], 0.2)
- top = [n for n,d in G.nodes(data=True) if d['bipartite']==0]
+ top = [n for n, d in G.nodes(data=True) if d['bipartite'] == 0]
M = bipartite.biadjacency_matrix(G, top)
- assert_equal(M.shape[0],tops[i])
- assert_equal(M.shape[1],bots[i])
+ assert_equal(M.shape[0], tops[i])
+ assert_equal(M.shape[1], bots[i])
@attr('numpy')
def test_biadjacency_matrix_order(self):
@@ -130,9 +128,9 @@ class TestBipartiteBasic:
import scipy
except ImportError:
raise SkipTest('SciPy not available.')
- G=nx.path_graph(5)
- G.add_edge(0,1,weight=2)
- X=[3,1]
- Y=[4,2,0]
- M = bipartite.biadjacency_matrix(G,X,Y,weight='weight')
- assert_equal(M[1,2], 2)
+ G = nx.path_graph(5)
+ G.add_edge(0, 1, weight=2)
+ X = [3, 1]
+ Y = [4, 2, 0]
+ M = bipartite.biadjacency_matrix(G, X, Y, weight='weight')
+ assert_equal(M[1, 2], 2)
diff --git a/networkx/algorithms/bipartite/tests/test_centrality.py b/networkx/algorithms/bipartite/tests/test_centrality.py
index 992d643e..2745915f 100644
--- a/networkx/algorithms/bipartite/tests/test_centrality.py
+++ b/networkx/algorithms/bipartite/tests/test_centrality.py
@@ -2,46 +2,47 @@ from nose.tools import *
import networkx as nx
from networkx.algorithms import bipartite
+
class TestBipartiteCentrality(object):
def setUp(self):
self.P4 = nx.path_graph(4)
- self.K3 = nx.complete_bipartite_graph(3,3)
+ self.K3 = nx.complete_bipartite_graph(3, 3)
self.C4 = nx.cycle_graph(4)
self.davis = nx.davis_southern_women_graph()
- self.top_nodes = [n for n,d in self.davis.nodes(data=True)
- if d['bipartite']==0]
+ self.top_nodes = [n for n, d in self.davis.nodes(data=True)
+ if d['bipartite'] == 0]
def test_degree_centrality(self):
- d = bipartite.degree_centrality(self.P4, [1,3])
+ d = bipartite.degree_centrality(self.P4, [1, 3])
answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5}
assert_equal(d, answer)
- d = bipartite.degree_centrality(self.K3, [0,1,2])
+ d = bipartite.degree_centrality(self.K3, [0, 1, 2])
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
assert_equal(d, answer)
- d = bipartite.degree_centrality(self.C4, [0,2])
+ d = bipartite.degree_centrality(self.C4, [0, 2])
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
- assert_equal(d,answer)
+ assert_equal(d, answer)
def test_betweenness_centrality(self):
- c = bipartite.betweenness_centrality(self.P4, [1,3])
+ c = bipartite.betweenness_centrality(self.P4, [1, 3])
answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0}
assert_equal(c, answer)
- c = bipartite.betweenness_centrality(self.K3, [0,1,2])
+ c = bipartite.betweenness_centrality(self.K3, [0, 1, 2])
answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125}
assert_equal(c, answer)
- c = bipartite.betweenness_centrality(self.C4, [0,2])
+ c = bipartite.betweenness_centrality(self.C4, [0, 2])
answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
assert_equal(c, answer)
def test_closeness_centrality(self):
- c = bipartite.closeness_centrality(self.P4, [1,3])
- answer = {0: 2.0/3, 1: 1.0, 2: 1.0, 3:2.0/3}
+ c = bipartite.closeness_centrality(self.P4, [1, 3])
+ answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3}
assert_equal(c, answer)
- c = bipartite.closeness_centrality(self.K3, [0,1,2])
+ c = bipartite.closeness_centrality(self.K3, [0, 1, 2])
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
assert_equal(c, answer)
- c = bipartite.closeness_centrality(self.C4, [0,2])
+ c = bipartite.closeness_centrality(self.C4, [0, 2])
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
assert_equal(c, answer)
G = nx.Graph()
@@ -55,115 +56,113 @@ class TestBipartiteCentrality(object):
def test_davis_degree_centrality(self):
G = self.davis
deg = bipartite.degree_centrality(G, self.top_nodes)
- answer = {'E8':0.78,
- 'E9':0.67,
- 'E7':0.56,
- 'Nora Fayette':0.57,
- 'Evelyn Jefferson':0.57,
- 'Theresa Anderson':0.57,
- 'E6':0.44,
- 'Sylvia Avondale':0.50,
- 'Laura Mandeville':0.50,
- 'Brenda Rogers':0.50,
- 'Katherina Rogers':0.43,
- 'E5':0.44,
- 'Helen Lloyd':0.36,
- 'E3':0.33,
- 'Ruth DeSand':0.29,
- 'Verne Sanderson':0.29,
- 'E12':0.33,
- 'Myra Liddel':0.29,
- 'E11':0.22,
- 'Eleanor Nye':0.29,
- 'Frances Anderson':0.29,
- 'Pearl Oglethorpe':0.21,
- 'E4':0.22,
- 'Charlotte McDowd':0.29,
- 'E10':0.28,
- 'Olivia Carleton':0.14,
- 'Flora Price':0.14,
- 'E2':0.17,
- 'E1':0.17,
- 'Dorothy Murchison':0.14,
- 'E13':0.17,
- 'E14':0.17}
+ answer = {'E8': 0.78,
+ 'E9': 0.67,
+ 'E7': 0.56,
+ 'Nora Fayette': 0.57,
+ 'Evelyn Jefferson': 0.57,
+ 'Theresa Anderson': 0.57,
+ 'E6': 0.44,
+ 'Sylvia Avondale': 0.50,
+ 'Laura Mandeville': 0.50,
+ 'Brenda Rogers': 0.50,
+ 'Katherina Rogers': 0.43,
+ 'E5': 0.44,
+ 'Helen Lloyd': 0.36,
+ 'E3': 0.33,
+ 'Ruth DeSand': 0.29,
+ 'Verne Sanderson': 0.29,
+ 'E12': 0.33,
+ 'Myra Liddel': 0.29,
+ 'E11': 0.22,
+ 'Eleanor Nye': 0.29,
+ 'Frances Anderson': 0.29,
+ 'Pearl Oglethorpe': 0.21,
+ 'E4': 0.22,
+ 'Charlotte McDowd': 0.29,
+ 'E10': 0.28,
+ 'Olivia Carleton': 0.14,
+ 'Flora Price': 0.14,
+ 'E2': 0.17,
+ 'E1': 0.17,
+ 'Dorothy Murchison': 0.14,
+ 'E13': 0.17,
+ 'E14': 0.17}
for node, value in answer.items():
assert_almost_equal(value, deg[node], places=2)
def test_davis_betweenness_centrality(self):
G = self.davis
bet = bipartite.betweenness_centrality(G, self.top_nodes)
- answer = {'E8':0.24,
- 'E9':0.23,
- 'E7':0.13,
- 'Nora Fayette':0.11,
- 'Evelyn Jefferson':0.10,
- 'Theresa Anderson':0.09,
- 'E6':0.07,
- 'Sylvia Avondale':0.07,
- 'Laura Mandeville':0.05,
- 'Brenda Rogers':0.05,
- 'Katherina Rogers':0.05,
- 'E5':0.04,
- 'Helen Lloyd':0.04,
- 'E3':0.02,
- 'Ruth DeSand':0.02,
- 'Verne Sanderson':0.02,
- 'E12':0.02,
- 'Myra Liddel':0.02,
- 'E11':0.02,
- 'Eleanor Nye':0.01,
- 'Frances Anderson':0.01,
- 'Pearl Oglethorpe':0.01,
- 'E4':0.01,
- 'Charlotte McDowd':0.01,
- 'E10':0.01,
- 'Olivia Carleton':0.01,
- 'Flora Price':0.01,
- 'E2':0.00,
- 'E1':0.00,
- 'Dorothy Murchison':0.00,
- 'E13':0.00,
- 'E14':0.00}
+ answer = {'E8': 0.24,
+ 'E9': 0.23,
+ 'E7': 0.13,
+ 'Nora Fayette': 0.11,
+ 'Evelyn Jefferson': 0.10,
+ 'Theresa Anderson': 0.09,
+ 'E6': 0.07,
+ 'Sylvia Avondale': 0.07,
+ 'Laura Mandeville': 0.05,
+ 'Brenda Rogers': 0.05,
+ 'Katherina Rogers': 0.05,
+ 'E5': 0.04,
+ 'Helen Lloyd': 0.04,
+ 'E3': 0.02,
+ 'Ruth DeSand': 0.02,
+ 'Verne Sanderson': 0.02,
+ 'E12': 0.02,
+ 'Myra Liddel': 0.02,
+ 'E11': 0.02,
+ 'Eleanor Nye': 0.01,
+ 'Frances Anderson': 0.01,
+ 'Pearl Oglethorpe': 0.01,
+ 'E4': 0.01,
+ 'Charlotte McDowd': 0.01,
+ 'E10': 0.01,
+ 'Olivia Carleton': 0.01,
+ 'Flora Price': 0.01,
+ 'E2': 0.00,
+ 'E1': 0.00,
+ 'Dorothy Murchison': 0.00,
+ 'E13': 0.00,
+ 'E14': 0.00}
for node, value in answer.items():
assert_almost_equal(value, bet[node], places=2)
def test_davis_closeness_centrality(self):
G = self.davis
clos = bipartite.closeness_centrality(G, self.top_nodes)
- answer = {'E8':0.85,
- 'E9':0.79,
- 'E7':0.73,
- 'Nora Fayette':0.80,
- 'Evelyn Jefferson':0.80,
- 'Theresa Anderson':0.80,
- 'E6':0.69,
- 'Sylvia Avondale':0.77,
- 'Laura Mandeville':0.73,
- 'Brenda Rogers':0.73,
- 'Katherina Rogers':0.73,
- 'E5':0.59,
- 'Helen Lloyd':0.73,
- 'E3':0.56,
- 'Ruth DeSand':0.71,
- 'Verne Sanderson':0.71,
- 'E12':0.56,
- 'Myra Liddel':0.69,
- 'E11':0.54,
- 'Eleanor Nye':0.67,
- 'Frances Anderson':0.67,
- 'Pearl Oglethorpe':0.67,
- 'E4':0.54,
- 'Charlotte McDowd':0.60,
- 'E10':0.55,
- 'Olivia Carleton':0.59,
- 'Flora Price':0.59,
- 'E2':0.52,
- 'E1':0.52,
- 'Dorothy Murchison':0.65,
- 'E13':0.52,
- 'E14':0.52}
+ answer = {'E8': 0.85,
+ 'E9': 0.79,
+ 'E7': 0.73,
+ 'Nora Fayette': 0.80,
+ 'Evelyn Jefferson': 0.80,
+ 'Theresa Anderson': 0.80,
+ 'E6': 0.69,
+ 'Sylvia Avondale': 0.77,
+ 'Laura Mandeville': 0.73,
+ 'Brenda Rogers': 0.73,
+ 'Katherina Rogers': 0.73,
+ 'E5': 0.59,
+ 'Helen Lloyd': 0.73,
+ 'E3': 0.56,
+ 'Ruth DeSand': 0.71,
+ 'Verne Sanderson': 0.71,
+ 'E12': 0.56,
+ 'Myra Liddel': 0.69,
+ 'E11': 0.54,
+ 'Eleanor Nye': 0.67,
+ 'Frances Anderson': 0.67,
+ 'Pearl Oglethorpe': 0.67,
+ 'E4': 0.54,
+ 'Charlotte McDowd': 0.60,
+ 'E10': 0.55,
+ 'Olivia Carleton': 0.59,
+ 'Flora Price': 0.59,
+ 'E2': 0.52,
+ 'E1': 0.52,
+ 'Dorothy Murchison': 0.65,
+ 'E13': 0.52,
+ 'E14': 0.52}
for node, value in answer.items():
assert_almost_equal(value, clos[node], places=2)
-
-
diff --git a/networkx/algorithms/bipartite/tests/test_cluster.py b/networkx/algorithms/bipartite/tests/test_cluster.py
index aa158f9c..ce0c1e88 100644
--- a/networkx/algorithms/bipartite/tests/test_cluster.py
+++ b/networkx/algorithms/bipartite/tests/test_cluster.py
@@ -1,70 +1,79 @@
import networkx as nx
from nose.tools import *
-from networkx.algorithms.bipartite.cluster import cc_dot,cc_min,cc_max
+from networkx.algorithms.bipartite.cluster import cc_dot, cc_min, cc_max
import networkx.algorithms.bipartite as bipartite
+
def test_pairwise_bipartite_cc_functions():
# Test functions for different kinds of bipartite clustering coefficients
- # between pairs of nodes using 3 example graphs from figure 5 p. 40
+ # between pairs of nodes using 3 example graphs from figure 5 p. 40
# Latapy et al (2008)
- G1 = nx.Graph([(0,2),(0,3),(0,4),(0,5),(0,6),(1,5),(1,6),(1,7)])
- G2 = nx.Graph([(0,2),(0,3),(0,4),(1,3),(1,4),(1,5)])
- G3 = nx.Graph([(0,2),(0,3),(0,4),(0,5),(0,6),(1,5),(1,6),(1,7),(1,8),(1,9)])
- result = {0:[1/3.0, 2/3.0, 2/5.0],
- 1:[1/2.0, 2/3.0, 2/3.0],
- 2:[2/8.0, 2/5.0, 2/5.0]}
+ G1 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7)])
+ G2 = nx.Graph([(0, 2), (0, 3), (0, 4), (1, 3), (1, 4), (1, 5)])
+ G3 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)])
+ result = {0: [1 / 3.0, 2 / 3.0, 2 / 5.0],
+ 1: [1 / 2.0, 2 / 3.0, 2 / 3.0],
+ 2: [2 / 8.0, 2 / 5.0, 2 / 5.0]}
for i, G in enumerate([G1, G2, G3]):
assert(bipartite.is_bipartite(G))
assert(cc_dot(set(G[0]), set(G[1])) == result[i][0])
assert(cc_min(set(G[0]), set(G[1])) == result[i][1])
assert(cc_max(set(G[0]), set(G[1])) == result[i][2])
+
def test_star_graph():
- G=nx.star_graph(3)
+ G = nx.star_graph(3)
# all modes are the same
- answer={0:0,1:1,2:1,3:1}
- assert_equal(bipartite.clustering(G,mode='dot'),answer)
- assert_equal(bipartite.clustering(G,mode='min'),answer)
- assert_equal(bipartite.clustering(G,mode='max'),answer)
+ answer = {0: 0, 1: 1, 2: 1, 3: 1}
+ assert_equal(bipartite.clustering(G, mode='dot'), answer)
+ assert_equal(bipartite.clustering(G, mode='min'), answer)
+ assert_equal(bipartite.clustering(G, mode='max'), answer)
+
@raises(nx.NetworkXError)
def test_not_bipartite():
bipartite.clustering(nx.complete_graph(4))
+
@raises(nx.NetworkXError)
def test_bad_mode():
- bipartite.clustering(nx.path_graph(4),mode='foo')
+ bipartite.clustering(nx.path_graph(4), mode='foo')
+
def test_path_graph():
- G=nx.path_graph(4)
- answer={0:0.5,1:0.5,2:0.5,3:0.5}
- assert_equal(bipartite.clustering(G,mode='dot'),answer)
- assert_equal(bipartite.clustering(G,mode='max'),answer)
- answer={0:1,1:1,2:1,3:1}
- assert_equal(bipartite.clustering(G,mode='min'),answer)
+ G = nx.path_graph(4)
+ answer = {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5}
+ assert_equal(bipartite.clustering(G, mode='dot'), answer)
+ assert_equal(bipartite.clustering(G, mode='max'), answer)
+ answer = {0: 1, 1: 1, 2: 1, 3: 1}
+ assert_equal(bipartite.clustering(G, mode='min'), answer)
+
def test_average_path_graph():
- G=nx.path_graph(4)
- assert_equal(bipartite.average_clustering(G,mode='dot'),0.5)
- assert_equal(bipartite.average_clustering(G,mode='max'),0.5)
- assert_equal(bipartite.average_clustering(G,mode='min'),1)
+ G = nx.path_graph(4)
+ assert_equal(bipartite.average_clustering(G, mode='dot'), 0.5)
+ assert_equal(bipartite.average_clustering(G, mode='max'), 0.5)
+ assert_equal(bipartite.average_clustering(G, mode='min'), 1)
+
def test_ra_clustering_davis():
G = nx.davis_southern_women_graph()
cc4 = round(bipartite.robins_alexander_clustering(G), 3)
assert_equal(cc4, 0.468)
+
def test_ra_clustering_square():
G = nx.path_graph(4)
G.add_edge(0, 3)
assert_equal(bipartite.robins_alexander_clustering(G), 1.0)
+
def test_ra_clustering_zero():
G = nx.Graph()
assert_equal(bipartite.robins_alexander_clustering(G), 0)
G.add_nodes_from(range(4))
assert_equal(bipartite.robins_alexander_clustering(G), 0)
- G.add_edges_from([(0,1),(2,3),(3,4)])
+ G.add_edges_from([(0, 1), (2, 3), (3, 4)])
assert_equal(bipartite.robins_alexander_clustering(G), 0)
- G.add_edge(1,2)
+ G.add_edge(1, 2)
assert_equal(bipartite.robins_alexander_clustering(G), 0)
diff --git a/networkx/algorithms/bipartite/tests/test_edgelist.py b/networkx/algorithms/bipartite/tests/test_edgelist.py
index 419f978e..896a818a 100644
--- a/networkx/algorithms/bipartite/tests/test_edgelist.py
+++ b/networkx/algorithms/bipartite/tests/test_edgelist.py
@@ -8,23 +8,24 @@ import os
import networkx as nx
from networkx.testing import (assert_edges_equal, assert_nodes_equal,
- assert_graphs_equal)
+ assert_graphs_equal)
from networkx.algorithms import bipartite
+
class TestEdgelist:
def setUp(self):
- self.G=nx.Graph(name="test")
- e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')]
+ self.G = nx.Graph(name="test")
+ e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')]
self.G.add_edges_from(e)
- self.G.add_nodes_from(['a','c','e'],bipartite=0)
- self.G.add_nodes_from(['b','d','f'],bipartite=1)
- self.G.add_node('g',bipartite=0)
- self.DG=nx.DiGraph(self.G)
- self.MG=nx.MultiGraph()
- self.MG.add_edges_from([(1,2),(1,2),(1,2)])
- self.MG.add_node(1,bipartite=0)
- self.MG.add_node(2,bipartite=1)
+ self.G.add_nodes_from(['a', 'c', 'e'], bipartite=0)
+ self.G.add_nodes_from(['b', 'd', 'f'], bipartite=1)
+ self.G.add_node('g', bipartite=0)
+ self.DG = nx.DiGraph(self.G)
+ self.MG = nx.MultiGraph()
+ self.MG.add_edges_from([(1, 2), (1, 2), (1, 2)])
+ self.MG.add_node(1, bipartite=0)
+ self.MG.add_node(2, bipartite=1)
def test_read_edgelist_1(self):
s = b"""\
@@ -34,8 +35,8 @@ class TestEdgelist:
2 3
"""
bytesIO = io.BytesIO(s)
- G = bipartite.read_edgelist(bytesIO,nodetype=int)
- assert_edges_equal(G.edges(),[(1,2),(2,3)])
+ G = bipartite.read_edgelist(bytesIO, nodetype=int)
+ assert_edges_equal(G.edges(), [(1, 2), (2, 3)])
def test_read_edgelist_3(self):
s = b"""\
@@ -45,71 +46,71 @@ class TestEdgelist:
2 3 {'weight':3.0}
"""
bytesIO = io.BytesIO(s)
- G = bipartite.read_edgelist(bytesIO,nodetype=int,data=False)
- assert_edges_equal(G.edges(),[(1,2),(2,3)])
+ G = bipartite.read_edgelist(bytesIO, nodetype=int, data=False)
+ assert_edges_equal(G.edges(), [(1, 2), (2, 3)])
bytesIO = io.BytesIO(s)
- G = bipartite.read_edgelist(bytesIO,nodetype=int,data=True)
+ G = bipartite.read_edgelist(bytesIO, nodetype=int, data=True)
assert_edges_equal(G.edges(data=True),
- [(1,2,{'weight':2.0}),(2,3,{'weight':3.0})])
+ [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})])
def test_write_edgelist_1(self):
- fh=io.BytesIO()
- G=nx.Graph()
- G.add_edges_from([(1,2),(2,3)])
- G.add_node(1,bipartite=0)
- G.add_node(2,bipartite=1)
- G.add_node(3,bipartite=0)
- bipartite.write_edgelist(G,fh,data=False)
+ fh = io.BytesIO()
+ G = nx.Graph()
+ G.add_edges_from([(1, 2), (2, 3)])
+ G.add_node(1, bipartite=0)
+ G.add_node(2, bipartite=1)
+ G.add_node(3, bipartite=0)
+ bipartite.write_edgelist(G, fh, data=False)
fh.seek(0)
- assert_equal(fh.read(),b"1 2\n3 2\n")
+ assert_equal(fh.read(), b"1 2\n3 2\n")
def test_write_edgelist_2(self):
- fh=io.BytesIO()
- G=nx.Graph()
- G.add_edges_from([(1,2),(2,3)])
- G.add_node(1,bipartite=0)
- G.add_node(2,bipartite=1)
- G.add_node(3,bipartite=0)
- bipartite.write_edgelist(G,fh,data=True)
+ fh = io.BytesIO()
+ G = nx.Graph()
+ G.add_edges_from([(1, 2), (2, 3)])
+ G.add_node(1, bipartite=0)
+ G.add_node(2, bipartite=1)
+ G.add_node(3, bipartite=0)
+ bipartite.write_edgelist(G, fh, data=True)
fh.seek(0)
- assert_equal(fh.read(),b"1 2 {}\n3 2 {}\n")
+ assert_equal(fh.read(), b"1 2 {}\n3 2 {}\n")
def test_write_edgelist_3(self):
- fh=io.BytesIO()
- G=nx.Graph()
- G.add_edge(1,2,weight=2.0)
- G.add_edge(2,3,weight=3.0)
- G.add_node(1,bipartite=0)
- G.add_node(2,bipartite=1)
- G.add_node(3,bipartite=0)
- bipartite.write_edgelist(G,fh,data=True)
+ fh = io.BytesIO()
+ G = nx.Graph()
+ G.add_edge(1, 2, weight=2.0)
+ G.add_edge(2, 3, weight=3.0)
+ G.add_node(1, bipartite=0)
+ G.add_node(2, bipartite=1)
+ G.add_node(3, bipartite=0)
+ bipartite.write_edgelist(G, fh, data=True)
fh.seek(0)
- assert_equal(fh.read(),b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n")
+ assert_equal(fh.read(), b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n")
def test_write_edgelist_4(self):
- fh=io.BytesIO()
- G=nx.Graph()
- G.add_edge(1,2,weight=2.0)
- G.add_edge(2,3,weight=3.0)
- G.add_node(1,bipartite=0)
- G.add_node(2,bipartite=1)
- G.add_node(3,bipartite=0)
- bipartite.write_edgelist(G,fh,data=[('weight')])
+ fh = io.BytesIO()
+ G = nx.Graph()
+ G.add_edge(1, 2, weight=2.0)
+ G.add_edge(2, 3, weight=3.0)
+ G.add_node(1, bipartite=0)
+ G.add_node(2, bipartite=1)
+ G.add_node(3, bipartite=0)
+ bipartite.write_edgelist(G, fh, data=[('weight')])
fh.seek(0)
- assert_equal(fh.read(),b"1 2 2.0\n3 2 3.0\n")
+ assert_equal(fh.read(), b"1 2 2.0\n3 2 3.0\n")
def test_unicode(self):
G = nx.Graph()
- try: # Python 3.x
+ try: # Python 3.x
name1 = chr(2344) + chr(123) + chr(6543)
name2 = chr(5543) + chr(1543) + chr(324)
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = unichr(2344) + unichr(123) + unichr(6543)
name2 = unichr(5543) + unichr(1543) + unichr(324)
G.add_edge(name1, 'Radiohead', **{name2: 3})
- G.add_node(name1,bipartite=0)
- G.add_node('Radiohead',bipartite=1)
+ G.add_node(name1, bipartite=0)
+ G.add_node('Radiohead', bipartite=1)
fd, fname = tempfile.mkstemp()
bipartite.write_edgelist(G, fname)
H = bipartite.read_edgelist(fname)
@@ -119,59 +120,59 @@ class TestEdgelist:
def test_latin1_issue(self):
G = nx.Graph()
- try: # Python 3.x
+ try: # Python 3.x
name1 = chr(2344) + chr(123) + chr(6543)
name2 = chr(5543) + chr(1543) + chr(324)
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = unichr(2344) + unichr(123) + unichr(6543)
name2 = unichr(5543) + unichr(1543) + unichr(324)
G.add_edge(name1, 'Radiohead', **{name2: 3})
- G.add_node(name1,bipartite=0)
- G.add_node('Radiohead',bipartite=1)
+ G.add_node(name1, bipartite=0)
+ G.add_node('Radiohead', bipartite=1)
fd, fname = tempfile.mkstemp()
assert_raises(UnicodeEncodeError,
bipartite.write_edgelist,
- G, fname, encoding = 'latin-1')
+ G, fname, encoding='latin-1')
os.close(fd)
os.unlink(fname)
def test_latin1(self):
G = nx.Graph()
- try: # Python 3.x
- blurb = chr(1245) # just to trigger the exception
+ try: # Python 3.x
+ blurb = chr(1245) # just to trigger the exception
name1 = 'Bj' + chr(246) + 'rk'
name2 = chr(220) + 'ber'
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = 'Bj' + unichr(246) + 'rk'
name2 = unichr(220) + 'ber'
G.add_edge(name1, 'Radiohead', **{name2: 3})
- G.add_node(name1,bipartite=0)
- G.add_node('Radiohead',bipartite=1)
+ G.add_node(name1, bipartite=0)
+ G.add_node('Radiohead', bipartite=1)
fd, fname = tempfile.mkstemp()
- bipartite.write_edgelist(G, fname, encoding = 'latin-1')
- H = bipartite.read_edgelist(fname, encoding = 'latin-1')
+ bipartite.write_edgelist(G, fname, encoding='latin-1')
+ H = bipartite.read_edgelist(fname, encoding='latin-1')
assert_graphs_equal(G, H)
os.close(fd)
os.unlink(fname)
def test_edgelist_graph(self):
- G=self.G
- (fd,fname)=tempfile.mkstemp()
- bipartite.write_edgelist(G,fname)
- H=bipartite.read_edgelist(fname)
- H2=bipartite.read_edgelist(fname)
- assert_not_equal(H,H2) # they should be different graphs
- G.remove_node('g') # isolated nodes are not written in edgelist
+ G = self.G
+ (fd, fname) = tempfile.mkstemp()
+ bipartite.write_edgelist(G, fname)
+ H = bipartite.read_edgelist(fname)
+ H2 = bipartite.read_edgelist(fname)
+ assert_not_equal(H, H2) # they should be different graphs
+ G.remove_node('g') # isolated nodes are not written in edgelist
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_edgelist_integers(self):
- G=nx.convert_node_labels_to_integers(self.G)
- (fd,fname)=tempfile.mkstemp()
- bipartite.write_edgelist(G,fname)
- H=bipartite.read_edgelist(fname,nodetype=int)
+ G = nx.convert_node_labels_to_integers(self.G)
+ (fd, fname) = tempfile.mkstemp()
+ bipartite.write_edgelist(G, fname)
+ H = bipartite.read_edgelist(fname, nodetype=int)
# isolated nodes are not written in edgelist
G.remove_nodes_from(list(nx.isolates(G)))
assert_nodes_equal(list(H), list(G))
@@ -180,12 +181,12 @@ class TestEdgelist:
os.unlink(fname)
def test_edgelist_multigraph(self):
- G=self.MG
- (fd,fname)=tempfile.mkstemp()
- bipartite.write_edgelist(G,fname)
- H=bipartite.read_edgelist(fname,nodetype=int,create_using=nx.MultiGraph())
- H2=bipartite.read_edgelist(fname,nodetype=int,create_using=nx.MultiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.MG
+ (fd, fname) = tempfile.mkstemp()
+ bipartite.write_edgelist(G, fname)
+ H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
+ H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
@@ -194,11 +195,10 @@ class TestEdgelist:
@raises(nx.NetworkXNotImplemented)
def test_empty_digraph(self):
bytesIO = io.BytesIO()
- bipartite.write_edgelist(nx.DiGraph(),bytesIO)
+ bipartite.write_edgelist(nx.DiGraph(), bytesIO)
@raises(AttributeError)
def test_raise_attribute(self):
G = nx.path_graph(4)
bytesIO = io.BytesIO()
- bipartite.write_edgelist(G,bytesIO)
-
+ bipartite.write_edgelist(G, bytesIO)
diff --git a/networkx/algorithms/bipartite/tests/test_generators.py b/networkx/algorithms/bipartite/tests/test_generators.py
index 41551c07..4392bb21 100644
--- a/networkx/algorithms/bipartite/tests/test_generators.py
+++ b/networkx/algorithms/bipartite/tests/test_generators.py
@@ -8,75 +8,75 @@ from networkx.algorithms.bipartite.generators import *
----------------------
"""
+
class TestGeneratorsBipartite():
def test_complete_bipartite_graph(self):
- G=complete_bipartite_graph(0,0)
- assert_true(is_isomorphic( G, null_graph() ))
+ G = complete_bipartite_graph(0, 0)
+ assert_true(is_isomorphic(G, null_graph()))
for i in [1, 5]:
- G=complete_bipartite_graph(i,0)
- assert_true(is_isomorphic( G, empty_graph(i) ))
- G=complete_bipartite_graph(0,i)
- assert_true(is_isomorphic( G, empty_graph(i) ))
+ G = complete_bipartite_graph(i, 0)
+ assert_true(is_isomorphic(G, empty_graph(i)))
+ G = complete_bipartite_graph(0, i)
+ assert_true(is_isomorphic(G, empty_graph(i)))
- G=complete_bipartite_graph(2,2)
- assert_true(is_isomorphic( G, cycle_graph(4) ))
+ G = complete_bipartite_graph(2, 2)
+ assert_true(is_isomorphic(G, cycle_graph(4)))
- G=complete_bipartite_graph(1,5)
- assert_true(is_isomorphic( G, star_graph(5) ))
+ G = complete_bipartite_graph(1, 5)
+ assert_true(is_isomorphic(G, star_graph(5)))
- G=complete_bipartite_graph(5,1)
- assert_true(is_isomorphic( G, star_graph(5) ))
+ G = complete_bipartite_graph(5, 1)
+ assert_true(is_isomorphic(G, star_graph(5)))
# complete_bipartite_graph(m1,m2) is a connected graph with
# m1+m2 nodes and m1*m2 edges
for m1, m2 in [(5, 11), (7, 3)]:
- G=complete_bipartite_graph(m1,m2)
+ G = complete_bipartite_graph(m1, m2)
assert_equal(number_of_nodes(G), m1 + m2)
assert_equal(number_of_edges(G), m1 * m2)
assert_raises(networkx.exception.NetworkXError,
complete_bipartite_graph, 7, 3, create_using=DiGraph())
- mG=complete_bipartite_graph(7, 3, create_using=MultiGraph())
+ mG = complete_bipartite_graph(7, 3, create_using=MultiGraph())
assert_equal(sorted(mG.edges()), sorted(G.edges()))
# specify nodes rather than number of nodes
G = complete_bipartite_graph([1, 2], ['a', 'b'])
- has_edges = G.has_edge(1,'a') & G.has_edge(1,'b') &\
- G.has_edge(2,'a') & G.has_edge(2,'b')
+ has_edges = G.has_edge(1, 'a') & G.has_edge(1, 'b') &\
+ G.has_edge(2, 'a') & G.has_edge(2, 'b')
assert_true(has_edges)
assert_equal(G.size(), 4)
-
def test_configuration_model(self):
- aseq=[3,3,3,3]
- bseq=[2,2,2,2,2]
+ aseq = [3, 3, 3, 3]
+ bseq = [2, 2, 2, 2, 2]
assert_raises(networkx.exception.NetworkXError,
configuration_model, aseq, bseq)
-
- aseq=[3,3,3,3]
- bseq=[2,2,2,2,2,2]
- G=configuration_model(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+
+ aseq = [3, 3, 3, 3]
+ bseq = [2, 2, 2, 2, 2, 2]
+ G = configuration_model(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
- aseq=[2,2,2,2,2,2]
- bseq=[3,3,3,3]
- G=configuration_model(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+ aseq = [2, 2, 2, 2, 2, 2]
+ bseq = [3, 3, 3, 3]
+ G = configuration_model(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
- aseq=[2,2,2,1,1,1]
- bseq=[3,3,3]
- G=configuration_model(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+ aseq = [2, 2, 2, 1, 1, 1]
+ bseq = [3, 3, 3]
+ G = configuration_model(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[1, 1, 1, 2, 2, 2, 3, 3, 3])
- GU=project(Graph(G),range(len(aseq)))
+ GU = project(Graph(G), range(len(aseq)))
assert_equal(GU.number_of_nodes(), 6)
- GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq)))
+ GD = project(Graph(G), range(len(aseq), len(aseq) + len(bseq)))
assert_equal(GD.number_of_nodes(), 3)
assert_raises(networkx.exception.NetworkXError,
@@ -84,132 +84,132 @@ class TestGeneratorsBipartite():
create_using=DiGraph())
def test_havel_hakimi_graph(self):
- aseq=[3,3,3,3]
- bseq=[2,2,2,2,2]
+ aseq = [3, 3, 3, 3]
+ bseq = [2, 2, 2, 2, 2]
assert_raises(networkx.exception.NetworkXError,
havel_hakimi_graph, aseq, bseq)
-
- bseq=[2,2,2,2,2,2]
- G=havel_hakimi_graph(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+
+ bseq = [2, 2, 2, 2, 2, 2]
+ G = havel_hakimi_graph(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
- aseq=[2,2,2,2,2,2]
- bseq=[3,3,3,3]
- G=havel_hakimi_graph(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+ aseq = [2, 2, 2, 2, 2, 2]
+ bseq = [3, 3, 3, 3]
+ G = havel_hakimi_graph(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
- GU=project(Graph(G),range(len(aseq)))
+ GU = project(Graph(G), range(len(aseq)))
assert_equal(GU.number_of_nodes(), 6)
- GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq)))
+ GD = project(Graph(G), range(len(aseq), len(aseq) + len(bseq)))
assert_equal(GD.number_of_nodes(), 4)
assert_raises(networkx.exception.NetworkXError,
havel_hakimi_graph, aseq, bseq,
create_using=DiGraph())
-
+
def test_reverse_havel_hakimi_graph(self):
- aseq=[3,3,3,3]
- bseq=[2,2,2,2,2]
+ aseq = [3, 3, 3, 3]
+ bseq = [2, 2, 2, 2, 2]
assert_raises(networkx.exception.NetworkXError,
reverse_havel_hakimi_graph, aseq, bseq)
-
- bseq=[2,2,2,2,2,2]
- G=reverse_havel_hakimi_graph(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+
+ bseq = [2, 2, 2, 2, 2, 2]
+ G = reverse_havel_hakimi_graph(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
- aseq=[2,2,2,2,2,2]
- bseq=[3,3,3,3]
- G=reverse_havel_hakimi_graph(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+ aseq = [2, 2, 2, 2, 2, 2]
+ bseq = [3, 3, 3, 3]
+ G = reverse_havel_hakimi_graph(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
- aseq=[2,2,2,1,1,1]
- bseq=[3,3,3]
- G=reverse_havel_hakimi_graph(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+ aseq = [2, 2, 2, 1, 1, 1]
+ bseq = [3, 3, 3]
+ G = reverse_havel_hakimi_graph(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[1, 1, 1, 2, 2, 2, 3, 3, 3])
- GU=project(Graph(G),range(len(aseq)))
+ GU = project(Graph(G), range(len(aseq)))
assert_equal(GU.number_of_nodes(), 6)
- GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq)))
+ GD = project(Graph(G), range(len(aseq), len(aseq) + len(bseq)))
assert_equal(GD.number_of_nodes(), 3)
assert_raises(networkx.exception.NetworkXError,
reverse_havel_hakimi_graph, aseq, bseq,
create_using=DiGraph())
-
+
def test_alternating_havel_hakimi_graph(self):
- aseq=[3,3,3,3]
- bseq=[2,2,2,2,2]
+ aseq = [3, 3, 3, 3]
+ bseq = [2, 2, 2, 2, 2]
assert_raises(networkx.exception.NetworkXError,
alternating_havel_hakimi_graph, aseq, bseq)
-
- bseq=[2,2,2,2,2,2]
- G=alternating_havel_hakimi_graph(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+
+ bseq = [2, 2, 2, 2, 2, 2]
+ G = alternating_havel_hakimi_graph(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
- aseq=[2,2,2,2,2,2]
- bseq=[3,3,3,3]
- G=alternating_havel_hakimi_graph(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+ aseq = [2, 2, 2, 2, 2, 2]
+ bseq = [3, 3, 3, 3]
+ G = alternating_havel_hakimi_graph(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
- aseq=[2,2,2,1,1,1]
- bseq=[3,3,3]
- G=alternating_havel_hakimi_graph(aseq,bseq)
- assert_equal(sorted(d for n,d in G.degree()),
+ aseq = [2, 2, 2, 1, 1, 1]
+ bseq = [3, 3, 3]
+ G = alternating_havel_hakimi_graph(aseq, bseq)
+ assert_equal(sorted(d for n, d in G.degree()),
[1, 1, 1, 2, 2, 2, 3, 3, 3])
- GU=project(Graph(G),range(len(aseq)))
+ GU = project(Graph(G), range(len(aseq)))
assert_equal(GU.number_of_nodes(), 6)
- GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq)))
+ GD = project(Graph(G), range(len(aseq), len(aseq) + len(bseq)))
assert_equal(GD.number_of_nodes(), 3)
assert_raises(networkx.exception.NetworkXError,
alternating_havel_hakimi_graph, aseq, bseq,
create_using=DiGraph())
-
+
def test_preferential_attachment(self):
- aseq=[3,2,1,1]
- G=preferential_attachment_graph(aseq,0.5)
+ aseq = [3, 2, 1, 1]
+ G = preferential_attachment_graph(aseq, 0.5)
assert_raises(networkx.exception.NetworkXError,
preferential_attachment_graph, aseq, 0.5,
create_using=DiGraph())
def test_random_graph(self):
- n=10
- m=20
- G=random_graph(n,m,0.9)
- assert_equal(len(G),30)
+ n = 10
+ m = 20
+ G = random_graph(n, m, 0.9)
+ assert_equal(len(G), 30)
assert_true(is_bipartite(G))
- X,Y=nx.algorithms.bipartite.sets(G)
- assert_equal(set(range(n)),X)
- assert_equal(set(range(n,n+m)),Y)
+ X, Y = nx.algorithms.bipartite.sets(G)
+ assert_equal(set(range(n)), X)
+ assert_equal(set(range(n, n + m)), Y)
def test_random_graph(self):
- n=10
- m=20
- G=random_graph(n,m,0.9,directed=True)
- assert_equal(len(G),30)
+ n = 10
+ m = 20
+ G = random_graph(n, m, 0.9, directed=True)
+ assert_equal(len(G), 30)
assert_true(is_bipartite(G))
- X,Y=nx.algorithms.bipartite.sets(G)
- assert_equal(set(range(n)),X)
- assert_equal(set(range(n,n+m)),Y)
+ X, Y = nx.algorithms.bipartite.sets(G)
+ assert_equal(set(range(n)), X)
+ assert_equal(set(range(n, n + m)), Y)
def test_gnmk_random_graph(self):
n = 10
m = 20
edges = 200
G = gnmk_random_graph(n, m, edges)
- assert_equal(len(G),30)
+ assert_equal(len(G), 30)
assert_true(is_bipartite(G))
- X,Y=nx.algorithms.bipartite.sets(G)
+ X, Y = nx.algorithms.bipartite.sets(G)
print(X)
- assert_equal(set(range(n)),X)
- assert_equal(set(range(n,n+m)),Y)
+ assert_equal(set(range(n)), X)
+ assert_equal(set(range(n, n + m)), Y)
assert_equal(edges, len(list(G.edges())))
diff --git a/networkx/algorithms/bipartite/tests/test_matching.py b/networkx/algorithms/bipartite/tests/test_matching.py
index 22944c2c..bd4653d5 100644
--- a/networkx/algorithms/bipartite/tests/test_matching.py
+++ b/networkx/algorithms/bipartite/tests/test_matching.py
@@ -160,7 +160,7 @@ class TestMatching():
for u, v in tc.edges():
btc.add_edge((0, u), (1, v))
- top_nodes = {n for n in btc if n[0]==0}
+ top_nodes = {n for n in btc if n[0] == 0}
matching = hopcroft_karp_matching(btc, top_nodes)
vertex_cover = to_vertex_cover(btc, matching, top_nodes)
independent_set = set(G) - {v for _, v in vertex_cover}
@@ -194,5 +194,5 @@ def test_eppstein_matching():
G.add_edges_from([('a', 1), ('a', 'b'), (2, 'b'),
(2, 'c'), (3, 'c'), (4, 1)])
matching = eppstein_matching(G)
- assert_true(len(matching)==len(maximum_matching(G)))
+ assert_true(len(matching) == len(maximum_matching(G)))
assert all(x in set(matching.keys()) for x in set(matching.values()))
diff --git a/networkx/algorithms/bipartite/tests/test_matrix.py b/networkx/algorithms/bipartite/tests/test_matrix.py
index 6a1ec296..dbe5d527 100644
--- a/networkx/algorithms/bipartite/tests/test_matrix.py
+++ b/networkx/algorithms/bipartite/tests/test_matrix.py
@@ -5,6 +5,7 @@ import networkx as nx
from networkx.algorithms import bipartite
from networkx.testing.utils import assert_edges_equal
+
class TestBiadjacencyMatrix:
@classmethod
def setupClass(cls):
@@ -13,78 +14,77 @@ class TestBiadjacencyMatrix:
import numpy as np
import scipy as sp
import scipy.sparse as sparse
- np_assert_equal=np.testing.assert_equal
+ np_assert_equal = np.testing.assert_equal
except ImportError:
raise SkipTest('SciPy sparse library not available.')
def test_biadjacency_matrix_weight(self):
- G=nx.path_graph(5)
- G.add_edge(0,1,weight=2,other=4)
- X=[1,3]
- Y=[0,2,4]
- M = bipartite.biadjacency_matrix(G,X,weight='weight')
- assert_equal(M[0,0], 2)
+ G = nx.path_graph(5)
+ G.add_edge(0, 1, weight=2, other=4)
+ X = [1, 3]
+ Y = [0, 2, 4]
+ M = bipartite.biadjacency_matrix(G, X, weight='weight')
+ assert_equal(M[0, 0], 2)
M = bipartite.biadjacency_matrix(G, X, weight='other')
- assert_equal(M[0,0], 4)
+ assert_equal(M[0, 0], 4)
def test_biadjacency_matrix(self):
- tops = [2,5,10]
- bots = [5,10,15]
+ tops = [2, 5, 10]
+ bots = [5, 10, 15]
for i in range(len(tops)):
G = bipartite.random_graph(tops[i], bots[i], 0.2)
- top = [n for n,d in G.nodes(data=True) if d['bipartite']==0]
+ top = [n for n, d in G.nodes(data=True) if d['bipartite'] == 0]
M = bipartite.biadjacency_matrix(G, top)
- assert_equal(M.shape[0],tops[i])
- assert_equal(M.shape[1],bots[i])
+ assert_equal(M.shape[0], tops[i])
+ assert_equal(M.shape[1], bots[i])
def test_biadjacency_matrix_order(self):
- G=nx.path_graph(5)
- G.add_edge(0,1,weight=2)
- X=[3,1]
- Y=[4,2,0]
- M = bipartite.biadjacency_matrix(G,X,Y,weight='weight')
- assert_equal(M[1,2], 2)
+ G = nx.path_graph(5)
+ G.add_edge(0, 1, weight=2)
+ X = [3, 1]
+ Y = [4, 2, 0]
+ M = bipartite.biadjacency_matrix(G, X, Y, weight='weight')
+ assert_equal(M[1, 2], 2)
@raises(nx.NetworkXError)
def test_null_graph(self):
- bipartite.biadjacency_matrix(nx.Graph(),[])
+ bipartite.biadjacency_matrix(nx.Graph(), [])
@raises(nx.NetworkXError)
def test_empty_graph(self):
- bipartite.biadjacency_matrix(nx.Graph([(1,0)]),[])
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [])
@raises(nx.NetworkXError)
def test_duplicate_row(self):
- bipartite.biadjacency_matrix(nx.Graph([(1,0)]),[1,1])
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [1, 1])
@raises(nx.NetworkXError)
def test_duplicate_col(self):
- bipartite.biadjacency_matrix(nx.Graph([(1,0)]),[0],[1,1])
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1])
@raises(nx.NetworkXError)
def test_duplicate_col(self):
- bipartite.biadjacency_matrix(nx.Graph([(1,0)]),[0],[1,1])
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1])
@raises(nx.NetworkXError)
def test_format_keyword(self):
- bipartite.biadjacency_matrix(nx.Graph([(1,0)]),[0],format='foo')
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format='foo')
def test_from_biadjacency_roundtrip(self):
B1 = nx.path_graph(5)
- M = bipartite.biadjacency_matrix(B1, [0,2,4])
+ M = bipartite.biadjacency_matrix(B1, [0, 2, 4])
B2 = bipartite.from_biadjacency_matrix(M)
- assert_true(nx.is_isomorphic(B1,B2))
+ assert_true(nx.is_isomorphic(B1, B2))
def test_from_biadjacency_weight(self):
- M = sparse.csc_matrix([[1,2],[0,3]])
+ M = sparse.csc_matrix([[1, 2], [0, 3]])
B = bipartite.from_biadjacency_matrix(M)
- assert_edges_equal(B.edges(),[(0,2),(0,3),(1,3)])
+ assert_edges_equal(B.edges(), [(0, 2), (0, 3), (1, 3)])
B = bipartite.from_biadjacency_matrix(M, edge_attribute='weight')
- e = [(0,2,{'weight':1}),(0,3,{'weight':2}),(1,3,{'weight':3})]
- assert_edges_equal(B.edges(data=True),e)
+ e = [(0, 2, {'weight': 1}), (0, 3, {'weight': 2}), (1, 3, {'weight': 3})]
+ assert_edges_equal(B.edges(data=True), e)
def test_from_biadjacency_multigraph(self):
- M = sparse.csc_matrix([[1,2],[0,3]])
+ M = sparse.csc_matrix([[1, 2], [0, 3]])
B = bipartite.from_biadjacency_matrix(M, create_using=nx.MultiGraph())
- assert_edges_equal(B.edges(),[(0,2),(0,3),(0,3),(1,3),(1,3),(1,3)])
-
+ assert_edges_equal(B.edges(), [(0, 2), (0, 3), (0, 3), (1, 3), (1, 3), (1, 3)])
diff --git a/networkx/algorithms/bipartite/tests/test_project.py b/networkx/algorithms/bipartite/tests/test_project.py
index 831cca56..327c576a 100644
--- a/networkx/algorithms/bipartite/tests/test_project.py
+++ b/networkx/algorithms/bipartite/tests/test_project.py
@@ -4,144 +4,144 @@ import networkx as nx
from networkx.algorithms import bipartite
from networkx.testing import assert_edges_equal, assert_nodes_equal
+
class TestBipartiteProject:
def test_path_projected_graph(self):
- G=nx.path_graph(4)
- P=bipartite.projected_graph(G, [1, 3])
+ G = nx.path_graph(4)
+ P = bipartite.projected_graph(G, [1, 3])
assert_nodes_equal(list(P), [1, 3])
assert_edges_equal(list(P.edges()), [(1, 3)])
- P=bipartite.projected_graph(G, [0, 2])
+ P = bipartite.projected_graph(G, [0, 2])
assert_nodes_equal(list(P), [0, 2])
assert_edges_equal(list(P.edges()), [(0, 2)])
def test_path_projected_properties_graph(self):
- G=nx.path_graph(4)
- G.add_node(1,name='one')
- G.add_node(2,name='two')
- P=bipartite.projected_graph(G,[1,3])
- assert_nodes_equal(list(P),[1,3])
- assert_edges_equal(list(P.edges()),[(1,3)])
- assert_equal(P.nodes[1]['name'],G.nodes[1]['name'])
- P=bipartite.projected_graph(G,[0,2])
- assert_nodes_equal(list(P),[0,2])
- assert_edges_equal(list(P.edges()),[(0,2)])
- assert_equal(P.nodes[2]['name'],G.nodes[2]['name'])
+ G = nx.path_graph(4)
+ G.add_node(1, name='one')
+ G.add_node(2, name='two')
+ P = bipartite.projected_graph(G, [1, 3])
+ assert_nodes_equal(list(P), [1, 3])
+ assert_edges_equal(list(P.edges()), [(1, 3)])
+ assert_equal(P.nodes[1]['name'], G.nodes[1]['name'])
+ P = bipartite.projected_graph(G, [0, 2])
+ assert_nodes_equal(list(P), [0, 2])
+ assert_edges_equal(list(P.edges()), [(0, 2)])
+ assert_equal(P.nodes[2]['name'], G.nodes[2]['name'])
def test_path_collaboration_projected_graph(self):
- G=nx.path_graph(4)
- P=bipartite.collaboration_weighted_projected_graph(G,[1,3])
- assert_nodes_equal(list(P),[1,3])
- assert_edges_equal(list(P.edges()),[(1,3)])
- P[1][3]['weight']=1
- P=bipartite.collaboration_weighted_projected_graph(G,[0,2])
- assert_nodes_equal(list(P),[0,2])
- assert_edges_equal(list(P.edges()),[(0,2)])
- P[0][2]['weight']=1
+ G = nx.path_graph(4)
+ P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
+ assert_nodes_equal(list(P), [1, 3])
+ assert_edges_equal(list(P.edges()), [(1, 3)])
+ P[1][3]['weight'] = 1
+ P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
+ assert_nodes_equal(list(P), [0, 2])
+ assert_edges_equal(list(P.edges()), [(0, 2)])
+ P[0][2]['weight'] = 1
def test_directed_path_collaboration_projected_graph(self):
- G=nx.DiGraph()
+ G = nx.DiGraph()
nx.add_path(G, range(4))
- P=bipartite.collaboration_weighted_projected_graph(G,[1,3])
- assert_nodes_equal(list(P),[1,3])
- assert_edges_equal(list(P.edges()),[(1,3)])
- P[1][3]['weight']=1
- P=bipartite.collaboration_weighted_projected_graph(G,[0,2])
- assert_nodes_equal(list(P),[0,2])
- assert_edges_equal(list(P.edges()),[(0,2)])
- P[0][2]['weight']=1
+ P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
+ assert_nodes_equal(list(P), [1, 3])
+ assert_edges_equal(list(P.edges()), [(1, 3)])
+ P[1][3]['weight'] = 1
+ P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
+ assert_nodes_equal(list(P), [0, 2])
+ assert_edges_equal(list(P.edges()), [(0, 2)])
+ P[0][2]['weight'] = 1
def test_path_weighted_projected_graph(self):
- G=nx.path_graph(4)
- P=bipartite.weighted_projected_graph(G,[1,3])
- assert_nodes_equal(list(P),[1,3])
- assert_edges_equal(list(P.edges()),[(1,3)])
- P[1][3]['weight']=1
- P=bipartite.weighted_projected_graph(G,[0,2])
- assert_nodes_equal(list(P),[0,2])
- assert_edges_equal(list(P.edges()),[(0,2)])
- P[0][2]['weight']=1
+ G = nx.path_graph(4)
+ P = bipartite.weighted_projected_graph(G, [1, 3])
+ assert_nodes_equal(list(P), [1, 3])
+ assert_edges_equal(list(P.edges()), [(1, 3)])
+ P[1][3]['weight'] = 1
+ P = bipartite.weighted_projected_graph(G, [0, 2])
+ assert_nodes_equal(list(P), [0, 2])
+ assert_edges_equal(list(P.edges()), [(0, 2)])
+ P[0][2]['weight'] = 1
def test_path_weighted_projected_directed_graph(self):
- G=nx.DiGraph()
+ G = nx.DiGraph()
nx.add_path(G, range(4))
- P=bipartite.weighted_projected_graph(G,[1,3])
- assert_nodes_equal(list(P),[1,3])
- assert_edges_equal(list(P.edges()),[(1,3)])
- P[1][3]['weight']=1
- P=bipartite.weighted_projected_graph(G,[0,2])
- assert_nodes_equal(list(P),[0,2])
- assert_edges_equal(list(P.edges()),[(0,2)])
- P[0][2]['weight']=1
-
+ P = bipartite.weighted_projected_graph(G, [1, 3])
+ assert_nodes_equal(list(P), [1, 3])
+ assert_edges_equal(list(P.edges()), [(1, 3)])
+ P[1][3]['weight'] = 1
+ P = bipartite.weighted_projected_graph(G, [0, 2])
+ assert_nodes_equal(list(P), [0, 2])
+ assert_edges_equal(list(P.edges()), [(0, 2)])
+ P[0][2]['weight'] = 1
def test_star_projected_graph(self):
- G=nx.star_graph(3)
- P=bipartite.projected_graph(G,[1,2,3])
- assert_nodes_equal(list(P),[1,2,3])
- assert_edges_equal(list(P.edges()),[(1,2),(1,3),(2,3)])
- P=bipartite.weighted_projected_graph(G,[1,2,3])
- assert_nodes_equal(list(P),[1,2,3])
- assert_edges_equal(list(P.edges()),[(1,2),(1,3),(2,3)])
-
- P=bipartite.projected_graph(G,[0])
- assert_nodes_equal(list(P),[0])
- assert_edges_equal(list(P.edges()),[])
+ G = nx.star_graph(3)
+ P = bipartite.projected_graph(G, [1, 2, 3])
+ assert_nodes_equal(list(P), [1, 2, 3])
+ assert_edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
+ P = bipartite.weighted_projected_graph(G, [1, 2, 3])
+ assert_nodes_equal(list(P), [1, 2, 3])
+ assert_edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
+
+ P = bipartite.projected_graph(G, [0])
+ assert_nodes_equal(list(P), [0])
+ assert_edges_equal(list(P.edges()), [])
def test_project_multigraph(self):
- G=nx.Graph()
- G.add_edge('a',1)
- G.add_edge('b',1)
- G.add_edge('a',2)
- G.add_edge('b',2)
- P=bipartite.projected_graph(G,'ab')
- assert_edges_equal(list(P.edges()),[('a','b')])
- P=bipartite.weighted_projected_graph(G,'ab')
- assert_edges_equal(list(P.edges()),[('a','b')])
- P=bipartite.projected_graph(G,'ab',multigraph=True)
- assert_edges_equal(list(P.edges()),[('a','b'),('a','b')])
+ G = nx.Graph()
+ G.add_edge('a', 1)
+ G.add_edge('b', 1)
+ G.add_edge('a', 2)
+ G.add_edge('b', 2)
+ P = bipartite.projected_graph(G, 'ab')
+ assert_edges_equal(list(P.edges()), [('a', 'b')])
+ P = bipartite.weighted_projected_graph(G, 'ab')
+ assert_edges_equal(list(P.edges()), [('a', 'b')])
+ P = bipartite.projected_graph(G, 'ab', multigraph=True)
+ assert_edges_equal(list(P.edges()), [('a', 'b'), ('a', 'b')])
def test_project_collaboration(self):
- G=nx.Graph()
- G.add_edge('a',1)
- G.add_edge('b',1)
- G.add_edge('b',2)
- G.add_edge('c',2)
- G.add_edge('c',3)
- G.add_edge('c',4)
- G.add_edge('b',4)
- P=bipartite.collaboration_weighted_projected_graph(G,'abc')
- assert_equal(P['a']['b']['weight'],1)
- assert_equal(P['b']['c']['weight'],2)
+ G = nx.Graph()
+ G.add_edge('a', 1)
+ G.add_edge('b', 1)
+ G.add_edge('b', 2)
+ G.add_edge('c', 2)
+ G.add_edge('c', 3)
+ G.add_edge('c', 4)
+ G.add_edge('b', 4)
+ P = bipartite.collaboration_weighted_projected_graph(G, 'abc')
+ assert_equal(P['a']['b']['weight'], 1)
+ assert_equal(P['b']['c']['weight'], 2)
def test_directed_projection(self):
- G=nx.DiGraph()
- G.add_edge('A',1)
- G.add_edge(1,'B')
- G.add_edge('A',2)
- G.add_edge('B',2)
- P=bipartite.projected_graph(G,'AB')
- assert_edges_equal(list(P.edges()),[('A','B')])
- P=bipartite.weighted_projected_graph(G,'AB')
- assert_edges_equal(list(P.edges()),[('A','B')])
- assert_equal(P['A']['B']['weight'],1)
-
- P=bipartite.projected_graph(G,'AB',multigraph=True)
- assert_edges_equal(list(P.edges()),[('A','B')])
-
- G=nx.DiGraph()
- G.add_edge('A',1)
- G.add_edge(1,'B')
- G.add_edge('A',2)
- G.add_edge(2,'B')
- P=bipartite.projected_graph(G,'AB')
- assert_edges_equal(list(P.edges()),[('A','B')])
- P=bipartite.weighted_projected_graph(G,'AB')
- assert_edges_equal(list(P.edges()),[('A','B')])
- assert_equal(P['A']['B']['weight'],2)
-
- P=bipartite.projected_graph(G,'AB',multigraph=True)
- assert_edges_equal(list(P.edges()),[('A','B'),('A','B')])
+ G = nx.DiGraph()
+ G.add_edge('A', 1)
+ G.add_edge(1, 'B')
+ G.add_edge('A', 2)
+ G.add_edge('B', 2)
+ P = bipartite.projected_graph(G, 'AB')
+ assert_edges_equal(list(P.edges()), [('A', 'B')])
+ P = bipartite.weighted_projected_graph(G, 'AB')
+ assert_edges_equal(list(P.edges()), [('A', 'B')])
+ assert_equal(P['A']['B']['weight'], 1)
+
+ P = bipartite.projected_graph(G, 'AB', multigraph=True)
+ assert_edges_equal(list(P.edges()), [('A', 'B')])
+
+ G = nx.DiGraph()
+ G.add_edge('A', 1)
+ G.add_edge(1, 'B')
+ G.add_edge('A', 2)
+ G.add_edge(2, 'B')
+ P = bipartite.projected_graph(G, 'AB')
+ assert_edges_equal(list(P.edges()), [('A', 'B')])
+ P = bipartite.weighted_projected_graph(G, 'AB')
+ assert_edges_equal(list(P.edges()), [('A', 'B')])
+ assert_equal(P['A']['B']['weight'], 2)
+
+ P = bipartite.projected_graph(G, 'AB', multigraph=True)
+ assert_edges_equal(list(P.edges()), [('A', 'B'), ('A', 'B')])
class TestBipartiteWeightedProjection:
@@ -149,176 +149,176 @@ class TestBipartiteWeightedProjection:
def setUp(self):
# Tore Opsahl's example
# http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/
- self.G=nx.Graph()
- self.G.add_edge('A',1)
- self.G.add_edge('A',2)
- self.G.add_edge('B',1)
- self.G.add_edge('B',2)
- self.G.add_edge('B',3)
- self.G.add_edge('B',4)
- self.G.add_edge('B',5)
- self.G.add_edge('C',1)
- self.G.add_edge('D',3)
- self.G.add_edge('E',4)
- self.G.add_edge('E',5)
- self.G.add_edge('E',6)
- self.G.add_edge('F',6)
+ self.G = nx.Graph()
+ self.G.add_edge('A', 1)
+ self.G.add_edge('A', 2)
+ self.G.add_edge('B', 1)
+ self.G.add_edge('B', 2)
+ self.G.add_edge('B', 3)
+ self.G.add_edge('B', 4)
+ self.G.add_edge('B', 5)
+ self.G.add_edge('C', 1)
+ self.G.add_edge('D', 3)
+ self.G.add_edge('E', 4)
+ self.G.add_edge('E', 5)
+ self.G.add_edge('E', 6)
+ self.G.add_edge('F', 6)
# Graph based on figure 6 from Newman (2001)
- self.N=nx.Graph()
- self.N.add_edge('A',1)
- self.N.add_edge('A',2)
- self.N.add_edge('A',3)
- self.N.add_edge('B',1)
- self.N.add_edge('B',2)
- self.N.add_edge('B',3)
- self.N.add_edge('C',1)
- self.N.add_edge('D',1)
- self.N.add_edge('E',3)
+ self.N = nx.Graph()
+ self.N.add_edge('A', 1)
+ self.N.add_edge('A', 2)
+ self.N.add_edge('A', 3)
+ self.N.add_edge('B', 1)
+ self.N.add_edge('B', 2)
+ self.N.add_edge('B', 3)
+ self.N.add_edge('C', 1)
+ self.N.add_edge('D', 1)
+ self.N.add_edge('E', 3)
def test_project_weighted_shared(self):
- edges=[('A','B',2),
- ('A','C',1),
- ('B','C',1),
- ('B','D',1),
- ('B','E',2),
- ('E','F',1)]
- Panswer=nx.Graph()
+ edges = [('A', 'B', 2),
+ ('A', 'C', 1),
+ ('B', 'C', 1),
+ ('B', 'D', 1),
+ ('B', 'E', 2),
+ ('E', 'F', 1)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.weighted_projected_graph(self.G,'ABCDEF')
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in list(P.edges()):
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
-
- edges=[('A','B',3),
- ('A','E',1),
- ('A','C',1),
- ('A','D',1),
- ('B','E',1),
- ('B','C',1),
- ('B','D',1),
- ('C','D',1)]
- Panswer=nx.Graph()
+ P = bipartite.weighted_projected_graph(self.G, 'ABCDEF')
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in list(P.edges()):
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
+
+ edges = [('A', 'B', 3),
+ ('A', 'E', 1),
+ ('A', 'C', 1),
+ ('A', 'D', 1),
+ ('B', 'E', 1),
+ ('B', 'C', 1),
+ ('B', 'D', 1),
+ ('C', 'D', 1)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.weighted_projected_graph(self.N,'ABCDE')
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in list(P.edges()):
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
+ P = bipartite.weighted_projected_graph(self.N, 'ABCDE')
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in list(P.edges()):
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
def test_project_weighted_newman(self):
- edges=[('A','B',1.5),
- ('A','C',0.5),
- ('B','C',0.5),
- ('B','D',1),
- ('B','E',2),
- ('E','F',1)]
- Panswer=nx.Graph()
+ edges = [('A', 'B', 1.5),
+ ('A', 'C', 0.5),
+ ('B', 'C', 0.5),
+ ('B', 'D', 1),
+ ('B', 'E', 2),
+ ('E', 'F', 1)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.collaboration_weighted_projected_graph(self.G,'ABCDEF')
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in list(P.edges()):
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
-
- edges=[('A','B',11/6.0),
- ('A','E',1/2.0),
- ('A','C',1/3.0),
- ('A','D',1/3.0),
- ('B','E',1/2.0),
- ('B','C',1/3.0),
- ('B','D',1/3.0),
- ('C','D',1/3.0)]
- Panswer=nx.Graph()
+ P = bipartite.collaboration_weighted_projected_graph(self.G, 'ABCDEF')
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in list(P.edges()):
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
+
+ edges = [('A', 'B', 11 / 6.0),
+ ('A', 'E', 1 / 2.0),
+ ('A', 'C', 1 / 3.0),
+ ('A', 'D', 1 / 3.0),
+ ('B', 'E', 1 / 2.0),
+ ('B', 'C', 1 / 3.0),
+ ('B', 'D', 1 / 3.0),
+ ('C', 'D', 1 / 3.0)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.collaboration_weighted_projected_graph(self.N,'ABCDE')
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in list(P.edges()):
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
+ P = bipartite.collaboration_weighted_projected_graph(self.N, 'ABCDE')
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in list(P.edges()):
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
def test_project_weighted_ratio(self):
- edges=[('A','B',2/6.0),
- ('A','C',1/6.0),
- ('B','C',1/6.0),
- ('B','D',1/6.0),
- ('B','E',2/6.0),
- ('E','F',1/6.0)]
- Panswer=nx.Graph()
+ edges = [('A', 'B', 2 / 6.0),
+ ('A', 'C', 1 / 6.0),
+ ('B', 'C', 1 / 6.0),
+ ('B', 'D', 1 / 6.0),
+ ('B', 'E', 2 / 6.0),
+ ('E', 'F', 1 / 6.0)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.weighted_projected_graph(self.G, 'ABCDEF', ratio=True)
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in list(P.edges()):
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
-
- edges=[('A','B',3/3.0),
- ('A','E',1/3.0),
- ('A','C',1/3.0),
- ('A','D',1/3.0),
- ('B','E',1/3.0),
- ('B','C',1/3.0),
- ('B','D',1/3.0),
- ('C','D',1/3.0)]
- Panswer=nx.Graph()
+ P = bipartite.weighted_projected_graph(self.G, 'ABCDEF', ratio=True)
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in list(P.edges()):
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
+
+ edges = [('A', 'B', 3 / 3.0),
+ ('A', 'E', 1 / 3.0),
+ ('A', 'C', 1 / 3.0),
+ ('A', 'D', 1 / 3.0),
+ ('B', 'E', 1 / 3.0),
+ ('B', 'C', 1 / 3.0),
+ ('B', 'D', 1 / 3.0),
+ ('C', 'D', 1 / 3.0)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.weighted_projected_graph(self.N, 'ABCDE', ratio=True)
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in list(P.edges()):
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
+ P = bipartite.weighted_projected_graph(self.N, 'ABCDE', ratio=True)
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in list(P.edges()):
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
def test_project_weighted_overlap(self):
- edges=[('A','B',2/2.0),
- ('A','C',1/1.0),
- ('B','C',1/1.0),
- ('B','D',1/1.0),
- ('B','E',2/3.0),
- ('E','F',1/1.0)]
- Panswer=nx.Graph()
+ edges = [('A', 'B', 2 / 2.0),
+ ('A', 'C', 1 / 1.0),
+ ('B', 'C', 1 / 1.0),
+ ('B', 'D', 1 / 1.0),
+ ('B', 'E', 2 / 3.0),
+ ('E', 'F', 1 / 1.0)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.overlap_weighted_projected_graph(self.G,'ABCDEF', jaccard=False)
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in list(P.edges()):
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
-
- edges=[('A','B',3/3.0),
- ('A','E',1/1.0),
- ('A','C',1/1.0),
- ('A','D',1/1.0),
- ('B','E',1/1.0),
- ('B','C',1/1.0),
- ('B','D',1/1.0),
- ('C','D',1/1.0)]
- Panswer=nx.Graph()
+ P = bipartite.overlap_weighted_projected_graph(self.G, 'ABCDEF', jaccard=False)
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in list(P.edges()):
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
+
+ edges = [('A', 'B', 3 / 3.0),
+ ('A', 'E', 1 / 1.0),
+ ('A', 'C', 1 / 1.0),
+ ('A', 'D', 1 / 1.0),
+ ('B', 'E', 1 / 1.0),
+ ('B', 'C', 1 / 1.0),
+ ('B', 'D', 1 / 1.0),
+ ('C', 'D', 1 / 1.0)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.overlap_weighted_projected_graph(self.N,'ABCDE', jaccard=False)
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in list(P.edges()):
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
+ P = bipartite.overlap_weighted_projected_graph(self.N, 'ABCDE', jaccard=False)
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in list(P.edges()):
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
def test_project_weighted_jaccard(self):
- edges=[('A','B',2/5.0),
- ('A','C',1/2.0),
- ('B','C',1/5.0),
- ('B','D',1/5.0),
- ('B','E',2/6.0),
- ('E','F',1/3.0)]
- Panswer=nx.Graph()
+ edges = [('A', 'B', 2 / 5.0),
+ ('A', 'C', 1 / 2.0),
+ ('B', 'C', 1 / 5.0),
+ ('B', 'D', 1 / 5.0),
+ ('B', 'E', 2 / 6.0),
+ ('E', 'F', 1 / 3.0)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.overlap_weighted_projected_graph(self.G,'ABCDEF')
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in list(P.edges()):
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
-
- edges=[('A','B',3/3.0),
- ('A','E',1/3.0),
- ('A','C',1/3.0),
- ('A','D',1/3.0),
- ('B','E',1/3.0),
- ('B','C',1/3.0),
- ('B','D',1/3.0),
- ('C','D',1/1.0)]
- Panswer=nx.Graph()
+ P = bipartite.overlap_weighted_projected_graph(self.G, 'ABCDEF')
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in list(P.edges()):
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
+
+ edges = [('A', 'B', 3 / 3.0),
+ ('A', 'E', 1 / 3.0),
+ ('A', 'C', 1 / 3.0),
+ ('A', 'D', 1 / 3.0),
+ ('B', 'E', 1 / 3.0),
+ ('B', 'C', 1 / 3.0),
+ ('B', 'D', 1 / 3.0),
+ ('C', 'D', 1 / 1.0)]
+ Panswer = nx.Graph()
Panswer.add_weighted_edges_from(edges)
- P=bipartite.overlap_weighted_projected_graph(self.N,'ABCDE')
- assert_edges_equal(list(P.edges()),Panswer.edges())
- for u,v in P.edges():
- assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
+ P = bipartite.overlap_weighted_projected_graph(self.N, 'ABCDE')
+ assert_edges_equal(list(P.edges()), Panswer.edges())
+ for u, v in P.edges():
+ assert_equal(P[u][v]['weight'], Panswer[u][v]['weight'])
def test_generic_weighted_projected_graph_simple(self):
def shared(G, u, v):
@@ -327,37 +327,38 @@ class TestBipartiteWeightedProjection:
G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4], weight_function=shared)
assert_nodes_equal(list(G), [0, 2, 4])
assert_edges_equal(list(list(G.edges(data=True))),
- [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})] )
+ [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})])
G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
assert_nodes_equal(list(G), [0, 2, 4])
assert_edges_equal(list(list(G.edges(data=True))),
- [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})] )
+ [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})])
B = nx.DiGraph()
nx.add_path(B, range(5))
G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
assert_nodes_equal(list(G), [0, 2, 4])
assert_edges_equal(list(G.edges(data=True)),
- [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})] )
+ [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})])
def test_generic_weighted_projected_graph_custom(self):
def jaccard(G, u, v):
unbrs = set(G[u])
vnbrs = set(G[v])
return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
+
def my_weight(G, u, v, weight='weight'):
w = 0
for nbr in set(G[u]) & set(G[v]):
w += G.edges[u, nbr].get(weight, 1) + G.edges[v, nbr].get(weight, 1)
return w
B = nx.bipartite.complete_bipartite_graph(2, 2)
- for i,(u,v) in enumerate(B.edges()):
+ for i, (u, v) in enumerate(B.edges()):
B.edges[u, v]['weight'] = i + 1
G = bipartite.generic_weighted_projected_graph(B, [0, 1],
- weight_function=jaccard)
+ weight_function=jaccard)
assert_edges_equal(list(G.edges(data=True)), [(0, 1, {'weight': 1.0})])
G = bipartite.generic_weighted_projected_graph(B, [0, 1],
- weight_function=my_weight)
+ weight_function=my_weight)
assert_edges_equal(list(G.edges(data=True)), [(0, 1, {'weight': 10})])
G = bipartite.generic_weighted_projected_graph(B, [0, 1])
assert_edges_equal(list(G.edges(data=True)), [(0, 1, {'weight': 2})])
diff --git a/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py b/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py
index 456a1a9c..fa4203bc 100644
--- a/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py
+++ b/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py
@@ -8,6 +8,7 @@ from networkx.algorithms.bipartite import spectral_bipartivity as sb
# E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
# bipartivity in complex networks", PhysRev E 72, 046105 (2005)
+
class TestSpectralBipartivity(object):
@classmethod
def setupClass(cls):
@@ -17,77 +18,73 @@ class TestSpectralBipartivity(object):
try:
import scipy.linalg
except ImportError:
- raise SkipTest('SciPy not available.')
-
+ raise SkipTest('SciPy not available.')
def test_star_like(self):
# star-like
- G=nx.star_graph(2)
- G.add_edge(1,2)
- assert_almost_equal(sb(G),0.843,places=3)
+ G = nx.star_graph(2)
+ G.add_edge(1, 2)
+ assert_almost_equal(sb(G), 0.843, places=3)
- G=nx.star_graph(3)
- G.add_edge(1,2)
- assert_almost_equal(sb(G),0.871,places=3)
-
- G=nx.star_graph(4)
- G.add_edge(1,2)
- assert_almost_equal(sb(G),0.890,places=3)
+ G = nx.star_graph(3)
+ G.add_edge(1, 2)
+ assert_almost_equal(sb(G), 0.871, places=3)
+ G = nx.star_graph(4)
+ G.add_edge(1, 2)
+ assert_almost_equal(sb(G), 0.890, places=3)
def k23_like(self):
# K2,3-like
- G=nx.complete_bipartite_graph(2,3)
- G.add_edge(0,1)
- assert_almost_equal(sb(G),0.769,places=3)
-
- G=nx.complete_bipartite_graph(2,3)
- G.add_edge(2,4)
- assert_almost_equal(sb(G),0.829,places=3)
-
- G=nx.complete_bipartite_graph(2,3)
- G.add_edge(2,4)
- G.add_edge(3,4)
- assert_almost_equal(sb(G),0.731,places=3)
-
-
- G=nx.complete_bipartite_graph(2,3)
- G.add_edge(0,1)
- G.add_edge(2,4)
- assert_almost_equal(sb(G),0.692,places=3)
-
- G=nx.complete_bipartite_graph(2,3)
- G.add_edge(2,4)
- G.add_edge(3,4)
- G.add_edge(0,1)
- assert_almost_equal(sb(G),0.645,places=3)
-
- G=nx.complete_bipartite_graph(2,3)
- G.add_edge(2,4)
- G.add_edge(3,4)
- G.add_edge(2,3)
- assert_almost_equal(sb(G),0.645,places=3)
-
- G=nx.complete_bipartite_graph(2,3)
- G.add_edge(2,4)
- G.add_edge(3,4)
- G.add_edge(2,3)
- G.add_edge(0,1)
- assert_almost_equal(sb(G),0.597,places=3)
+ G = nx.complete_bipartite_graph(2, 3)
+ G.add_edge(0, 1)
+ assert_almost_equal(sb(G), 0.769, places=3)
+
+ G = nx.complete_bipartite_graph(2, 3)
+ G.add_edge(2, 4)
+ assert_almost_equal(sb(G), 0.829, places=3)
+
+ G = nx.complete_bipartite_graph(2, 3)
+ G.add_edge(2, 4)
+ G.add_edge(3, 4)
+ assert_almost_equal(sb(G), 0.731, places=3)
+
+ G = nx.complete_bipartite_graph(2, 3)
+ G.add_edge(0, 1)
+ G.add_edge(2, 4)
+ assert_almost_equal(sb(G), 0.692, places=3)
+
+ G = nx.complete_bipartite_graph(2, 3)
+ G.add_edge(2, 4)
+ G.add_edge(3, 4)
+ G.add_edge(0, 1)
+ assert_almost_equal(sb(G), 0.645, places=3)
+
+ G = nx.complete_bipartite_graph(2, 3)
+ G.add_edge(2, 4)
+ G.add_edge(3, 4)
+ G.add_edge(2, 3)
+ assert_almost_equal(sb(G), 0.645, places=3)
+
+ G = nx.complete_bipartite_graph(2, 3)
+ G.add_edge(2, 4)
+ G.add_edge(3, 4)
+ G.add_edge(2, 3)
+ G.add_edge(0, 1)
+ assert_almost_equal(sb(G), 0.597, places=3)
def test_single_nodes(self):
# single nodes
- G=nx.complete_bipartite_graph(2, 3)
- G.add_edge(2,4)
- sbn=sb(G,nodes=[1,2])
- assert_almost_equal(sbn[1],0.85,places=2)
- assert_almost_equal(sbn[2],0.77,places=2)
-
- G=nx.complete_bipartite_graph(2,3)
- G.add_edge(0,1)
- sbn=sb(G,nodes=[1,2])
- assert_almost_equal(sbn[1],0.73,places=2)
- assert_almost_equal(sbn[2],0.82,places=2)
-
+ G = nx.complete_bipartite_graph(2, 3)
+ G.add_edge(2, 4)
+ sbn = sb(G, nodes=[1, 2])
+ assert_almost_equal(sbn[1], 0.85, places=2)
+ assert_almost_equal(sbn[2], 0.77, places=2)
+
+ G = nx.complete_bipartite_graph(2, 3)
+ G.add_edge(0, 1)
+ sbn = sb(G, nodes=[1, 2])
+ assert_almost_equal(sbn[1], 0.73, places=2)
+ assert_almost_equal(sbn[2], 0.82, places=2)
diff --git a/networkx/algorithms/centrality/betweenness_subset.py b/networkx/algorithms/centrality/betweenness_subset.py
index b5ad5ba8..c85e2dfa 100644
--- a/networkx/algorithms/centrality/betweenness_subset.py
+++ b/networkx/algorithms/centrality/betweenness_subset.py
@@ -233,7 +233,7 @@ def _rescale(betweenness, n, normalized, directed=False):
if n <= 2:
scale = None # no normalization b=0 for all nodes
else:
- scale = 1.0 / ((n-1) * (n-2))
+ scale = 1.0 / ((n - 1) * (n - 2))
else: # rescale by 2 for undirected graphs
if not directed:
scale = 0.5
@@ -251,7 +251,7 @@ def _rescale_e(betweenness, n, normalized, directed=False):
if n <= 1:
scale = None # no normalization b=0 for all nodes
else:
- scale = 1.0 / (n*(n-1))
+ scale = 1.0 / (n * (n - 1))
else: # rescale by 2 for undirected graphs
if not directed:
scale = 0.5
diff --git a/networkx/algorithms/centrality/degree_alg.py b/networkx/algorithms/centrality/degree_alg.py
index 7cb5be07..47d52ba5 100644
--- a/networkx/algorithms/centrality/degree_alg.py
+++ b/networkx/algorithms/centrality/degree_alg.py
@@ -47,8 +47,8 @@ def degree_centrality(G):
are possible.
"""
centrality = {}
- s = 1.0/(len(G)-1.0)
- centrality = {n: d*s for n, d in G.degree()}
+ s = 1.0 / (len(G) - 1.0)
+ centrality = {n: d * s for n, d in G.degree()}
return centrality
@@ -88,8 +88,8 @@ def in_degree_centrality(G):
are possible.
"""
centrality = {}
- s = 1.0/(len(G)-1.0)
- centrality = {n: d*s for n, d in G.in_degree()}
+ s = 1.0 / (len(G) - 1.0)
+ centrality = {n: d * s for n, d in G.in_degree()}
return centrality
@@ -129,6 +129,6 @@ def out_degree_centrality(G):
are possible.
"""
centrality = {}
- s = 1.0/(len(G)-1.0)
- centrality = {n: d*s for n, d in G.out_degree()}
+ s = 1.0 / (len(G) - 1.0)
+ centrality = {n: d * s for n, d in G.out_degree()}
return centrality
diff --git a/networkx/algorithms/centrality/flow_matrix.py b/networkx/algorithms/centrality/flow_matrix.py
index 9c81adca..8e24e38f 100644
--- a/networkx/algorithms/centrality/flow_matrix.py
+++ b/networkx/algorithms/centrality/flow_matrix.py
@@ -20,9 +20,9 @@ def flow_matrix_row(G, weight=None, dtype=float, solver='lu'):
for u, v in sorted(sorted((u, v)) for u, v in G.edges()):
B = np.zeros(w, dtype=dtype)
c = G[u][v].get(weight, 1.0)
- B[u%w] = c
- B[v%w] = -c
- # get only the rows needed in the inverse laplacian
+ B[u % w] = c
+ B[v % w] = -c
+ # get only the rows needed in the inverse laplacian
# and multiply to get the flow matrix row
row = np.dot(B, C.get_rows(u, v))
yield row, (u, v)
@@ -56,7 +56,7 @@ class InverseLaplacian(object):
raise("Implement solver")
def get_rows(self, r1, r2):
- for r in range(r1, r2+1):
+ for r in range(r1, r2 + 1):
self.C[r % self.w, 1:] = self.solve_inverse(r)
return self.C
@@ -70,8 +70,8 @@ class InverseLaplacian(object):
w = 0
x, y = np.nonzero(row)
if len(y) > 0:
- v = y-i
- w = v.max()-v.min()+1
+ v = y - i
+ w = v.max() - v.min() + 1
m = max(w, m)
return m
@@ -111,7 +111,7 @@ class CGInverseLaplacian(InverseLaplacian):
global linalg
from scipy.sparse import linalg
ilu = linalg.spilu(self.L1.tocsc())
- n = self.n-1
+ n = self.n - 1
self.M = linalg.LinearOperator(shape=(n, n), matvec=ilu.solve)
def solve(self, rhs):
diff --git a/networkx/algorithms/centrality/harmonic.py b/networkx/algorithms/centrality/harmonic.py
index 571dca4a..da98aa73 100644
--- a/networkx/algorithms/centrality/harmonic.py
+++ b/networkx/algorithms/centrality/harmonic.py
@@ -32,7 +32,7 @@ def harmonic_centrality(G, nbunch=None, distance=None):
----------
G : graph
A NetworkX graph
-
+
nbunch : container
Container of nodes. If provided harmonic centrality will be computed
only over the nodes in nbunch.
diff --git a/networkx/algorithms/centrality/katz.py b/networkx/algorithms/centrality/katz.py
index fd309e8b..1cc58c7c 100644
--- a/networkx/algorithms/centrality/katz.py
+++ b/networkx/algorithms/centrality/katz.py
@@ -175,15 +175,15 @@ def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6,
for nbr in G[n]:
x[nbr] += xlast[n] * G[n][nbr].get(weight, 1)
for n in x:
- x[n] = alpha*x[n] + b[n]
+ x[n] = alpha * x[n] + b[n]
# check convergence
- err = sum([abs(x[n]-xlast[n]) for n in x])
- if err < nnodes*tol:
+ err = sum([abs(x[n] - xlast[n]) for n in x])
+ if err < nnodes * tol:
if normalized:
# normalize vector
try:
- s = 1.0/sqrt(sum(v**2 for v in x.values()))
+ s = 1.0 / sqrt(sum(v**2 for v in x.values()))
# this should never be zero?
except ZeroDivisionError:
s = 1.0
@@ -327,12 +327,12 @@ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True,
A = nx.adj_matrix(G, nodelist=nodelist, weight=weight).todense().T
n = A.shape[0]
- centrality = np.linalg.solve(np.eye(n, n) - (alpha*A), b)
+ centrality = np.linalg.solve(np.eye(n, n) - (alpha * A), b)
if normalized:
norm = np.sign(sum(centrality)) * np.linalg.norm(centrality)
else:
norm = 1.0
- centrality = dict(zip(nodelist, map(float, centrality/norm)))
+ centrality = dict(zip(nodelist, map(float, centrality / norm)))
return centrality
diff --git a/networkx/algorithms/centrality/load.py b/networkx/algorithms/centrality/load.py
index 4a4f277b..191d2447 100644
--- a/networkx/algorithms/centrality/load.py
+++ b/networkx/algorithms/centrality/load.py
@@ -76,7 +76,7 @@ def newman_betweenness_centrality(G, v=None, cutoff=None,
order = G.order()
if order <= 2:
return betweenness # no normalization b=0 for all nodes
- betweenness *= 1.0 / ((order-1) * (order-2))
+ betweenness *= 1.0 / ((order - 1) * (order - 2))
return betweenness
else:
betweenness = {}.fromkeys(G, 0.0)
@@ -88,7 +88,7 @@ def newman_betweenness_centrality(G, v=None, cutoff=None,
order = G.order()
if order <= 2:
return betweenness # no normalization b=0 for all nodes
- scale = 1.0 / ((order-1) * (order-2))
+ scale = 1.0 / ((order - 1) * (order - 2))
for v in betweenness:
betweenness[v] *= scale
return betweenness # all nodes
diff --git a/networkx/algorithms/centrality/reaching.py b/networkx/algorithms/centrality/reaching.py
index c8c7d1db..ebbb4057 100644
--- a/networkx/algorithms/centrality/reaching.py
+++ b/networkx/algorithms/centrality/reaching.py
@@ -109,7 +109,7 @@ def global_reaching_centrality(G, weight=None, normalized=True):
# If weight is None, we leave it as-is so that the shortest path
# algorithm can use a faster, unweighted algorithm.
if weight is not None:
- as_distance = lambda u, v, d: total_weight / d.get(weight, 1)
+ def as_distance(u, v, d): return total_weight / d.get(weight, 1)
shortest_paths = nx.shortest_path(G, weight=as_distance)
else:
shortest_paths = nx.shortest_path(G)
@@ -191,7 +191,7 @@ def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True):
raise nx.NetworkXError('Size of G must be positive')
if weight is not None:
# Interpret weights as lengths.
- as_distance = lambda u, v, d: total_weight / d.get(weight, 1)
+ def as_distance(u, v, d): return total_weight / d.get(weight, 1)
paths = nx.shortest_path(G, source=v, weight=as_distance)
else:
paths = nx.shortest_path(G, source=v)
diff --git a/networkx/algorithms/centrality/tests/test_betweenness_centrality.py b/networkx/algorithms/centrality/tests/test_betweenness_centrality.py
index 2435da7c..b2d35cdd 100644
--- a/networkx/algorithms/centrality/tests/test_betweenness_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_betweenness_centrality.py
@@ -2,461 +2,447 @@
from nose.tools import *
import networkx as nx
+
def weighted_G():
- G=nx.Graph()
- G.add_edge(0,1,weight=3)
- G.add_edge(0,2,weight=2)
- G.add_edge(0,3,weight=6)
- G.add_edge(0,4,weight=4)
- G.add_edge(1,3,weight=5)
- G.add_edge(1,5,weight=5)
- G.add_edge(2,4,weight=1)
- G.add_edge(3,4,weight=2)
- G.add_edge(3,5,weight=1)
- G.add_edge(4,5,weight=4)
+ G = nx.Graph()
+ G.add_edge(0, 1, weight=3)
+ G.add_edge(0, 2, weight=2)
+ G.add_edge(0, 3, weight=6)
+ G.add_edge(0, 4, weight=4)
+ G.add_edge(1, 3, weight=5)
+ G.add_edge(1, 5, weight=5)
+ G.add_edge(2, 4, weight=1)
+ G.add_edge(3, 4, weight=2)
+ G.add_edge(3, 5, weight=1)
+ G.add_edge(4, 5, weight=4)
return G
class TestBetweennessCentrality(object):
-
+
def test_K5(self):
"""Betweenness centrality: K5"""
- G=nx.complete_graph(5)
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=False)
- b_answer={0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
+ G = nx.complete_graph(5)
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=False)
+ b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_K5_endpoints(self):
"""Betweenness centrality: K5 endpoints"""
- G=nx.complete_graph(5)
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=False,
- endpoints=True)
- b_answer={0: 4.0, 1: 4.0, 2: 4.0, 3: 4.0, 4: 4.0}
+ G = nx.complete_graph(5)
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=False,
+ endpoints=True)
+ b_answer = {0: 4.0, 1: 4.0, 2: 4.0, 3: 4.0, 4: 4.0}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_P3_normalized(self):
"""Betweenness centrality: P3 normalized"""
- G=nx.path_graph(3)
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=True)
- b_answer={0: 0.0, 1: 1.0, 2: 0.0}
+ G = nx.path_graph(3)
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=True)
+ b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_P3(self):
"""Betweenness centrality: P3"""
- G=nx.path_graph(3)
- b_answer={0: 0.0, 1: 1.0, 2: 0.0}
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=False)
+ G = nx.path_graph(3)
+ b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=False)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_P3_endpoints(self):
"""Betweenness centrality: P3 endpoints"""
- G=nx.path_graph(3)
- b_answer={0: 2.0, 1: 3.0, 2: 2.0}
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=False,
- endpoints=True)
+ G = nx.path_graph(3)
+ b_answer = {0: 2.0, 1: 3.0, 2: 2.0}
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=False,
+ endpoints=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_krackhardt_kite_graph(self):
"""Betweenness centrality: Krackhardt kite graph"""
- G=nx.krackhardt_kite_graph()
- b_answer={0: 1.667,1: 1.667,2: 0.000,3: 7.333,4: 0.000,
- 5: 16.667,6: 16.667,7: 28.000,8: 16.000,9: 0.000}
+ G = nx.krackhardt_kite_graph()
+ b_answer = {0: 1.667, 1: 1.667, 2: 0.000, 3: 7.333, 4: 0.000,
+ 5: 16.667, 6: 16.667, 7: 28.000, 8: 16.000, 9: 0.000}
for b in b_answer:
- b_answer[b]/=2.0
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=False)
+ b_answer[b] /= 2.0
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=False)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=3)
-
+ assert_almost_equal(b[n], b_answer[n], places=3)
def test_krackhardt_kite_graph_normalized(self):
"""Betweenness centrality: Krackhardt kite graph normalized"""
- G=nx.krackhardt_kite_graph()
- b_answer={0:0.023,1:0.023,2:0.000,3:0.102,4:0.000,
- 5:0.231,6:0.231,7:0.389,8:0.222,9:0.000}
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=True)
+ G = nx.krackhardt_kite_graph()
+ b_answer = {0: 0.023, 1: 0.023, 2: 0.000, 3: 0.102, 4: 0.000,
+ 5: 0.231, 6: 0.231, 7: 0.389, 8: 0.222, 9: 0.000}
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=3)
-
+ assert_almost_equal(b[n], b_answer[n], places=3)
def test_florentine_families_graph(self):
"""Betweenness centrality: Florentine families graph"""
- G=nx.florentine_families_graph()
- b_answer=\
- {'Acciaiuoli': 0.000,
- 'Albizzi': 0.212,
- 'Barbadori': 0.093,
- 'Bischeri': 0.104,
- 'Castellani': 0.055,
- 'Ginori': 0.000,
- 'Guadagni': 0.255,
- 'Lamberteschi': 0.000,
- 'Medici': 0.522,
- 'Pazzi': 0.000,
- 'Peruzzi': 0.022,
- 'Ridolfi': 0.114,
- 'Salviati': 0.143,
- 'Strozzi': 0.103,
- 'Tornabuoni': 0.092}
-
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=True)
+ G = nx.florentine_families_graph()
+ b_answer =\
+ {'Acciaiuoli': 0.000,
+ 'Albizzi': 0.212,
+ 'Barbadori': 0.093,
+ 'Bischeri': 0.104,
+ 'Castellani': 0.055,
+ 'Ginori': 0.000,
+ 'Guadagni': 0.255,
+ 'Lamberteschi': 0.000,
+ 'Medici': 0.522,
+ 'Pazzi': 0.000,
+ 'Peruzzi': 0.022,
+ 'Ridolfi': 0.114,
+ 'Salviati': 0.143,
+ 'Strozzi': 0.103,
+ 'Tornabuoni': 0.092}
+
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=3)
-
+ assert_almost_equal(b[n], b_answer[n], places=3)
def test_ladder_graph(self):
"""Betweenness centrality: Ladder graph"""
- G = nx.Graph() # ladder_graph(3)
- G.add_edges_from([(0,1), (0,2), (1,3), (2,3),
- (2,4), (4,5), (3,5)])
- b_answer={0:1.667,1: 1.667,2: 6.667,
- 3: 6.667,4: 1.667,5: 1.667}
+ G = nx.Graph() # ladder_graph(3)
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3),
+ (2, 4), (4, 5), (3, 5)])
+ b_answer = {0: 1.667, 1: 1.667, 2: 6.667,
+ 3: 6.667, 4: 1.667, 5: 1.667}
for b in b_answer:
- b_answer[b]/=2.0
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=False)
+ b_answer[b] /= 2.0
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=False)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=3)
+ assert_almost_equal(b[n], b_answer[n], places=3)
def test_disconnected_path(self):
"""Betweenness centrality: disconnected path"""
- G=nx.Graph()
+ G = nx.Graph()
nx.add_path(G, [0, 1, 2])
nx.add_path(G, [3, 4, 5, 6])
- b_answer={0:0,1:1,2:0,3:0,4:2,5:2,6:0}
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=False)
+ b_answer = {0: 0, 1: 1, 2: 0, 3: 0, 4: 2, 5: 2, 6: 0}
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=False)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_disconnected_path_endpoints(self):
"""Betweenness centrality: disconnected path endpoints"""
- G=nx.Graph()
+ G = nx.Graph()
nx.add_path(G, [0, 1, 2])
nx.add_path(G, [3, 4, 5, 6])
- b_answer={0:2,1:3,2:2,3:3,4:5,5:5,6:3}
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=False,
- endpoints=True)
+ b_answer = {0: 2, 1: 3, 2: 2, 3: 3, 4: 5, 5: 5, 6: 3}
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=False,
+ endpoints=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_directed_path(self):
"""Betweenness centrality: directed path"""
- G=nx.DiGraph()
+ G = nx.DiGraph()
nx.add_path(G, [0, 1, 2])
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=False)
- b_answer={0: 0.0, 1: 1.0, 2: 0.0}
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=False)
+ b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_directed_path_normalized(self):
"""Betweenness centrality: directed path normalized"""
- G=nx.DiGraph()
+ G = nx.DiGraph()
nx.add_path(G, [0, 1, 2])
- b=nx.betweenness_centrality(G,
- weight=None,
- normalized=True)
- b_answer={0: 0.0, 1: 0.5, 2: 0.0}
+ b = nx.betweenness_centrality(G,
+ weight=None,
+ normalized=True)
+ b_answer = {0: 0.0, 1: 0.5, 2: 0.0}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
class TestWeightedBetweennessCentrality(object):
-
+
def test_K5(self):
"""Weighted betweenness centrality: K5"""
- G=nx.complete_graph(5)
- b=nx.betweenness_centrality(G,
- weight='weight',
- normalized=False)
- b_answer={0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
+ G = nx.complete_graph(5)
+ b = nx.betweenness_centrality(G,
+ weight='weight',
+ normalized=False)
+ b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_P3_normalized(self):
"""Weighted betweenness centrality: P3 normalized"""
- G=nx.path_graph(3)
- b=nx.betweenness_centrality(G,
- weight='weight',
- normalized=True)
- b_answer={0: 0.0, 1: 1.0, 2: 0.0}
+ G = nx.path_graph(3)
+ b = nx.betweenness_centrality(G,
+ weight='weight',
+ normalized=True)
+ b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_P3(self):
"""Weighted betweenness centrality: P3"""
- G=nx.path_graph(3)
- b_answer={0: 0.0, 1: 1.0, 2: 0.0}
- b=nx.betweenness_centrality(G,
- weight='weight',
- normalized=False)
+ G = nx.path_graph(3)
+ b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
+ b = nx.betweenness_centrality(G,
+ weight='weight',
+ normalized=False)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_krackhardt_kite_graph(self):
"""Weighted betweenness centrality: Krackhardt kite graph"""
- G=nx.krackhardt_kite_graph()
- b_answer={0: 1.667,1: 1.667,2: 0.000,3: 7.333,4: 0.000,
- 5: 16.667,6: 16.667,7: 28.000,8: 16.000,9: 0.000}
+ G = nx.krackhardt_kite_graph()
+ b_answer = {0: 1.667, 1: 1.667, 2: 0.000, 3: 7.333, 4: 0.000,
+ 5: 16.667, 6: 16.667, 7: 28.000, 8: 16.000, 9: 0.000}
for b in b_answer:
- b_answer[b]/=2.0
+ b_answer[b] /= 2.0
- b=nx.betweenness_centrality(G,
- weight='weight',
- normalized=False)
+ b = nx.betweenness_centrality(G,
+ weight='weight',
+ normalized=False)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=3)
-
+ assert_almost_equal(b[n], b_answer[n], places=3)
def test_krackhardt_kite_graph_normalized(self):
"""Weighted betweenness centrality:
Krackhardt kite graph normalized
"""
- G=nx.krackhardt_kite_graph()
- b_answer={0:0.023,1:0.023,2:0.000,3:0.102,4:0.000,
- 5:0.231,6:0.231,7:0.389,8:0.222,9:0.000}
- b=nx.betweenness_centrality(G,
- weight='weight',
- normalized=True)
+ G = nx.krackhardt_kite_graph()
+ b_answer = {0: 0.023, 1: 0.023, 2: 0.000, 3: 0.102, 4: 0.000,
+ 5: 0.231, 6: 0.231, 7: 0.389, 8: 0.222, 9: 0.000}
+ b = nx.betweenness_centrality(G,
+ weight='weight',
+ normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=3)
-
+ assert_almost_equal(b[n], b_answer[n], places=3)
def test_florentine_families_graph(self):
"""Weighted betweenness centrality:
Florentine families graph"""
- G=nx.florentine_families_graph()
- b_answer=\
- {'Acciaiuoli': 0.000,
- 'Albizzi': 0.212,
- 'Barbadori': 0.093,
- 'Bischeri': 0.104,
- 'Castellani': 0.055,
- 'Ginori': 0.000,
- 'Guadagni': 0.255,
- 'Lamberteschi': 0.000,
- 'Medici': 0.522,
- 'Pazzi': 0.000,
- 'Peruzzi': 0.022,
- 'Ridolfi': 0.114,
- 'Salviati': 0.143,
- 'Strozzi': 0.103,
- 'Tornabuoni': 0.092}
-
- b=nx.betweenness_centrality(G,
- weight='weight',
- normalized=True)
+ G = nx.florentine_families_graph()
+ b_answer =\
+ {'Acciaiuoli': 0.000,
+ 'Albizzi': 0.212,
+ 'Barbadori': 0.093,
+ 'Bischeri': 0.104,
+ 'Castellani': 0.055,
+ 'Ginori': 0.000,
+ 'Guadagni': 0.255,
+ 'Lamberteschi': 0.000,
+ 'Medici': 0.522,
+ 'Pazzi': 0.000,
+ 'Peruzzi': 0.022,
+ 'Ridolfi': 0.114,
+ 'Salviati': 0.143,
+ 'Strozzi': 0.103,
+ 'Tornabuoni': 0.092}
+
+ b = nx.betweenness_centrality(G,
+ weight='weight',
+ normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=3)
-
+ assert_almost_equal(b[n], b_answer[n], places=3)
def test_ladder_graph(self):
"""Weighted betweenness centrality: Ladder graph"""
- G = nx.Graph() # ladder_graph(3)
- G.add_edges_from([(0,1), (0,2), (1,3), (2,3),
- (2,4), (4,5), (3,5)])
- b_answer={0:1.667,1: 1.667,2: 6.667,
- 3: 6.667,4: 1.667,5: 1.667}
+ G = nx.Graph() # ladder_graph(3)
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3),
+ (2, 4), (4, 5), (3, 5)])
+ b_answer = {0: 1.667, 1: 1.667, 2: 6.667,
+ 3: 6.667, 4: 1.667, 5: 1.667}
for b in b_answer:
- b_answer[b]/=2.0
- b=nx.betweenness_centrality(G,
- weight='weight',
- normalized=False)
+ b_answer[b] /= 2.0
+ b = nx.betweenness_centrality(G,
+ weight='weight',
+ normalized=False)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=3)
+ assert_almost_equal(b[n], b_answer[n], places=3)
def test_G(self):
- """Weighted betweenness centrality: G"""
- G = weighted_G()
- b_answer={0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0}
- b=nx.betweenness_centrality(G,
- weight='weight',
- normalized=False)
+ """Weighted betweenness centrality: G"""
+ G = weighted_G()
+ b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0}
+ b = nx.betweenness_centrality(G,
+ weight='weight',
+ normalized=False)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_G2(self):
- """Weighted betweenness centrality: G2"""
- G=nx.DiGraph()
- G.add_weighted_edges_from([('s','u',10) ,('s','x',5) ,
- ('u','v',1) ,('u','x',2) ,
- ('v','y',1) ,('x','u',3) ,
- ('x','v',5) ,('x','y',2) ,
- ('y','s',7) ,('y','v',6)])
-
- b_answer={'y':5.0,'x':5.0,'s':4.0,'u':2.0,'v':2.0}
-
- b=nx.betweenness_centrality(G,
- weight='weight',
- normalized=False)
+ """Weighted betweenness centrality: G2"""
+ G = nx.DiGraph()
+ G.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5),
+ ('u', 'v', 1), ('u', 'x', 2),
+ ('v', 'y', 1), ('x', 'u', 3),
+ ('x', 'v', 5), ('x', 'y', 2),
+ ('y', 's', 7), ('y', 'v', 6)])
+
+ b_answer = {'y': 5.0, 'x': 5.0, 's': 4.0, 'u': 2.0, 'v': 2.0}
+
+ b = nx.betweenness_centrality(G,
+ weight='weight',
+ normalized=False)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
class TestEdgeBetweennessCentrality(object):
-
+
def test_K5(self):
"""Edge betweenness centrality: K5"""
- G=nx.complete_graph(5)
- b=nx.edge_betweenness_centrality(G, weight=None, normalized=False)
- b_answer=dict.fromkeys(G.edges(),1)
+ G = nx.complete_graph(5)
+ b = nx.edge_betweenness_centrality(G, weight=None, normalized=False)
+ b_answer = dict.fromkeys(G.edges(), 1)
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_normalized_K5(self):
"""Edge betweenness centrality: K5"""
- G=nx.complete_graph(5)
- b=nx.edge_betweenness_centrality(G, weight=None, normalized=True)
- b_answer=dict.fromkeys(G.edges(),1/10.0)
+ G = nx.complete_graph(5)
+ b = nx.edge_betweenness_centrality(G, weight=None, normalized=True)
+ b_answer = dict.fromkeys(G.edges(), 1 / 10.0)
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_C4(self):
"""Edge betweenness centrality: C4"""
- G=nx.cycle_graph(4)
- b=nx.edge_betweenness_centrality(G, weight=None, normalized=True)
- b_answer={(0, 1):2,(0, 3):2, (1, 2):2, (2, 3): 2}
+ G = nx.cycle_graph(4)
+ b = nx.edge_betweenness_centrality(G, weight=None, normalized=True)
+ b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2}
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n]/6.0)
+ assert_almost_equal(b[n], b_answer[n] / 6.0)
def test_P4(self):
"""Edge betweenness centrality: P4"""
- G=nx.path_graph(4)
- b=nx.edge_betweenness_centrality(G, weight=None, normalized=False)
- b_answer={(0, 1):3,(1, 2):4, (2, 3):3}
+ G = nx.path_graph(4)
+ b = nx.edge_betweenness_centrality(G, weight=None, normalized=False)
+ b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3}
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_normalized_P4(self):
"""Edge betweenness centrality: P4"""
- G=nx.path_graph(4)
- b=nx.edge_betweenness_centrality(G, weight=None, normalized=True)
- b_answer={(0, 1):3,(1, 2):4, (2, 3):3}
+ G = nx.path_graph(4)
+ b = nx.edge_betweenness_centrality(G, weight=None, normalized=True)
+ b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3}
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n]/6.0)
-
+ assert_almost_equal(b[n], b_answer[n] / 6.0)
def test_balanced_tree(self):
"""Edge betweenness centrality: balanced tree"""
- G=nx.balanced_tree(r=2,h=2)
- b=nx.edge_betweenness_centrality(G, weight=None, normalized=False)
- b_answer={(0, 1):12,(0, 2):12,
- (1, 3):6,(1, 4):6,(2, 5):6,(2,6):6}
+ G = nx.balanced_tree(r=2, h=2)
+ b = nx.edge_betweenness_centrality(G, weight=None, normalized=False)
+ b_answer = {(0, 1): 12, (0, 2): 12,
+ (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6}
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
+
class TestWeightedEdgeBetweennessCentrality(object):
-
+
def test_K5(self):
"""Edge betweenness centrality: K5"""
- G=nx.complete_graph(5)
- b=nx.edge_betweenness_centrality(G, weight='weight', normalized=False)
- b_answer=dict.fromkeys(G.edges(),1)
+ G = nx.complete_graph(5)
+ b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False)
+ b_answer = dict.fromkeys(G.edges(), 1)
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_C4(self):
"""Edge betweenness centrality: C4"""
- G=nx.cycle_graph(4)
- b=nx.edge_betweenness_centrality(G, weight='weight', normalized=False)
- b_answer={(0, 1):2,(0, 3):2, (1, 2):2, (2, 3): 2}
+ G = nx.cycle_graph(4)
+ b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False)
+ b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2}
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_P4(self):
"""Edge betweenness centrality: P4"""
- G=nx.path_graph(4)
- b=nx.edge_betweenness_centrality(G, weight='weight', normalized=False)
- b_answer={(0, 1):3,(1, 2):4, (2, 3):3}
+ G = nx.path_graph(4)
+ b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False)
+ b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3}
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_balanced_tree(self):
"""Edge betweenness centrality: balanced tree"""
- G=nx.balanced_tree(r=2,h=2)
- b=nx.edge_betweenness_centrality(G, weight='weight', normalized=False)
- b_answer={(0, 1):12,(0, 2):12,
- (1, 3):6,(1, 4):6,(2, 5):6,(2,6):6}
+ G = nx.balanced_tree(r=2, h=2)
+ b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False)
+ b_answer = {(0, 1): 12, (0, 2): 12,
+ (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6}
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_weighted_graph(self):
- eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3),
- (0, 4, 2), (1, 2, 4), (1, 3, 1),
+ eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3),
+ (0, 4, 2), (1, 2, 4), (1, 3, 1),
(1, 4, 3), (2, 4, 5), (3, 4, 4)]
G = nx.Graph()
G.add_weighted_edges_from(eList)
b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False)
- b_answer={(0, 1):0.0,
- (0, 2):1.0,
- (0, 3):2.0,
- (0, 4):1.0,
- (1, 2):2.0,
- (1, 3):3.5,
- (1, 4):1.5,
- (2, 4):1.0,
- (3, 4):0.5}
+ b_answer = {(0, 1): 0.0,
+ (0, 2): 1.0,
+ (0, 3): 2.0,
+ (0, 4): 1.0,
+ (1, 2): 2.0,
+ (1, 3): 3.5,
+ (1, 4): 1.5,
+ (2, 4): 1.0,
+ (3, 4): 0.5}
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_normalized_weighted_graph(self):
- eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3),
- (0, 4, 2), (1, 2, 4), (1, 3, 1),
+ eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3),
+ (0, 4, 2), (1, 2, 4), (1, 3, 1),
(1, 4, 3), (2, 4, 5), (3, 4, 4)]
G = nx.Graph()
G.add_weighted_edges_from(eList)
b = nx.edge_betweenness_centrality(G, weight='weight', normalized=True)
- b_answer={(0, 1):0.0,
- (0, 2):1.0,
- (0, 3):2.0,
- (0, 4):1.0,
- (1, 2):2.0,
- (1, 3):3.5,
- (1, 4):1.5,
- (2, 4):1.0,
- (3, 4):0.5}
-
- norm = len(G)*(len(G)-1)/2.0
+ b_answer = {(0, 1): 0.0,
+ (0, 2): 1.0,
+ (0, 3): 2.0,
+ (0, 4): 1.0,
+ (1, 2): 2.0,
+ (1, 3): 3.5,
+ (1, 4): 1.5,
+ (2, 4): 1.0,
+ (3, 4): 0.5}
+
+ norm = len(G) * (len(G) - 1) / 2.0
for n in sorted(G.edges()):
- assert_almost_equal(b[n],b_answer[n]/norm)
-
+ assert_almost_equal(b[n], b_answer[n] / norm)
diff --git a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
index 35b56be7..d4b0e83e 100644
--- a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
@@ -55,7 +55,7 @@ class TestFlowBetweennessCentrality(object):
"""Betweenness centrality: P4 normalized"""
G = nx.path_graph(4)
b = nx.current_flow_betweenness_centrality(G, normalized=True)
- b_answer = {0: 0, 1: 2./3, 2: 2./3, 3: 0}
+ b_answer = {0: 0, 1: 2. / 3, 2: 2. / 3, 3: 0}
for n in sorted(G):
assert_almost_equal(b[n], b_answer[n])
@@ -106,7 +106,7 @@ class TestApproximateFlowBetweennessCentrality(object):
G = nx.complete_graph(4)
b = nx.current_flow_betweenness_centrality(G, normalized=True)
epsilon = 0.1
- ba = approximate_cfbc(G, normalized=True, epsilon=0.5*epsilon)
+ ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
for n in sorted(G):
assert_allclose(b[n], ba[n], atol=epsilon)
@@ -115,9 +115,9 @@ class TestApproximateFlowBetweennessCentrality(object):
G = nx.complete_graph(4)
b = nx.current_flow_betweenness_centrality(G, normalized=False)
epsilon = 0.1
- ba = approximate_cfbc(G, normalized=False, epsilon=0.5*epsilon)
+ ba = approximate_cfbc(G, normalized=False, epsilon=0.5 * epsilon)
for n in sorted(G):
- assert_allclose(b[n], ba[n], atol=epsilon*len(G)**2)
+ assert_allclose(b[n], ba[n], atol=epsilon * len(G)**2)
def test_star(self):
"Approximate current-flow betweenness centrality: star"
@@ -125,7 +125,7 @@ class TestApproximateFlowBetweennessCentrality(object):
nx.add_star(G, ['a', 'b', 'c', 'd'])
b = nx.current_flow_betweenness_centrality(G, normalized=True)
epsilon = 0.1
- ba = approximate_cfbc(G, normalized=True, epsilon=0.5*epsilon)
+ ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
for n in sorted(G):
assert_allclose(b[n], ba[n], atol=epsilon)
@@ -134,7 +134,7 @@ class TestApproximateFlowBetweennessCentrality(object):
G = nx.grid_2d_graph(4, 4)
b = nx.current_flow_betweenness_centrality(G, normalized=True)
epsilon = 0.1
- ba = approximate_cfbc(G, normalized=True, epsilon=0.5*epsilon)
+ ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
for n in sorted(G):
assert_allclose(b[n], ba[n], atol=epsilon)
@@ -144,7 +144,7 @@ class TestApproximateFlowBetweennessCentrality(object):
epsilon = 0.1
for solver in ['full', 'lu', 'cg']:
b = approximate_cfbc(G, normalized=False, solver=solver,
- epsilon=0.5*epsilon)
+ epsilon=0.5 * epsilon)
b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
for n in sorted(G):
assert_allclose(b[n], b_answer[n], atol=epsilon)
diff --git a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py
index bcb600aa..7379bd6e 100644
--- a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py
+++ b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py
@@ -10,8 +10,10 @@ from networkx import edge_current_flow_betweenness_centrality \
from networkx import edge_current_flow_betweenness_centrality_subset \
as edge_current_flow_subset
+
class TestFlowBetweennessCentrality(object):
- numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+ numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+
@classmethod
def setupClass(cls):
global np
@@ -21,90 +23,85 @@ class TestFlowBetweennessCentrality(object):
except ImportError:
raise SkipTest('NumPy not available.')
-
def test_K4_normalized(self):
"""Betweenness centrality: K4"""
- G=nx.complete_graph(4)
- b=nx.current_flow_betweenness_centrality_subset(G,
- list(G),
- list(G),
- normalized=True)
- b_answer=nx.current_flow_betweenness_centrality(G,normalized=True)
+ G = nx.complete_graph(4)
+ b = nx.current_flow_betweenness_centrality_subset(G,
+ list(G),
+ list(G),
+ normalized=True)
+ b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_K4(self):
"""Betweenness centrality: K4"""
- G=nx.complete_graph(4)
- b=nx.current_flow_betweenness_centrality_subset(G,
- list(G),
- list(G),
- normalized=True)
- b_answer=nx.current_flow_betweenness_centrality(G,normalized=True)
+ G = nx.complete_graph(4)
+ b = nx.current_flow_betweenness_centrality_subset(G,
+ list(G),
+ list(G),
+ normalized=True)
+ b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
# test weighted network
- G.add_edge(0,1,weight=0.5,other=0.3)
- b=nx.current_flow_betweenness_centrality_subset(G,
- list(G),
- list(G),
- normalized=True,
- weight=None)
+ G.add_edge(0, 1, weight=0.5, other=0.3)
+ b = nx.current_flow_betweenness_centrality_subset(G,
+ list(G),
+ list(G),
+ normalized=True,
+ weight=None)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
- b=nx.current_flow_betweenness_centrality_subset(G,
- list(G),
- list(G),
- normalized=True)
- b_answer=nx.current_flow_betweenness_centrality(G,normalized=True)
+ assert_almost_equal(b[n], b_answer[n])
+ b = nx.current_flow_betweenness_centrality_subset(G,
+ list(G),
+ list(G),
+ normalized=True)
+ b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
- b=nx.current_flow_betweenness_centrality_subset(G,
- list(G),
- list(G),
- normalized=True,
- weight='other')
- b_answer=nx.current_flow_betweenness_centrality(G,normalized=True,weight='other')
+ assert_almost_equal(b[n], b_answer[n])
+ b = nx.current_flow_betweenness_centrality_subset(G,
+ list(G),
+ list(G),
+ normalized=True,
+ weight='other')
+ b_answer = nx.current_flow_betweenness_centrality(G, normalized=True, weight='other')
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_P4_normalized(self):
"""Betweenness centrality: P4 normalized"""
- G=nx.path_graph(4)
- b=nx.current_flow_betweenness_centrality_subset(G,
- list(G),
- list(G),
- normalized=True)
- b_answer=nx.current_flow_betweenness_centrality(G,normalized=True)
+ G = nx.path_graph(4)
+ b = nx.current_flow_betweenness_centrality_subset(G,
+ list(G),
+ list(G),
+ normalized=True)
+ b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_P4(self):
"""Betweenness centrality: P4"""
- G=nx.path_graph(4)
- b=nx.current_flow_betweenness_centrality_subset(G,
- list(G),
- list(G),
- normalized=True)
- b_answer=nx.current_flow_betweenness_centrality(G,normalized=True)
+ G = nx.path_graph(4)
+ b = nx.current_flow_betweenness_centrality_subset(G,
+ list(G),
+ list(G),
+ normalized=True)
+ b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_star(self):
"""Betweenness centrality: star """
- G=nx.Graph()
+ G = nx.Graph()
nx.add_star(G, ['a', 'b', 'c', 'd'])
- b=nx.current_flow_betweenness_centrality_subset(G,
- list(G),
- list(G),
- normalized=True)
- b_answer=nx.current_flow_betweenness_centrality(G,normalized=True)
+ b = nx.current_flow_betweenness_centrality_subset(G,
+ list(G),
+ list(G),
+ normalized=True)
+ b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
# class TestWeightedFlowBetweennessCentrality():
@@ -112,7 +109,8 @@ class TestFlowBetweennessCentrality(object):
class TestEdgeFlowBetweennessCentrality(object):
- numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+ numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+
@classmethod
def setupClass(cls):
global np
@@ -121,61 +119,58 @@ class TestEdgeFlowBetweennessCentrality(object):
import scipy
except ImportError:
raise SkipTest('NumPy not available.')
-
+
def test_K4_normalized(self):
"""Betweenness centrality: K4"""
- G=nx.complete_graph(4)
- b=edge_current_flow_subset(G,list(G),list(G),normalized=True)
- b_answer=edge_current_flow(G,normalized=True)
- for (s,t),v1 in b_answer.items():
- v2=b.get((s,t),b.get((t,s)))
- assert_almost_equal(v1,v2)
+ G = nx.complete_graph(4)
+ b = edge_current_flow_subset(G, list(G), list(G), normalized=True)
+ b_answer = edge_current_flow(G, normalized=True)
+ for (s, t), v1 in b_answer.items():
+ v2 = b.get((s, t), b.get((t, s)))
+ assert_almost_equal(v1, v2)
def test_K4(self):
"""Betweenness centrality: K4"""
- G=nx.complete_graph(4)
- b=edge_current_flow_subset(G,list(G),list(G),normalized=False)
- b_answer=edge_current_flow(G,normalized=False)
- for (s,t),v1 in b_answer.items():
- v2=b.get((s,t),b.get((t,s)))
- assert_almost_equal(v1,v2)
+ G = nx.complete_graph(4)
+ b = edge_current_flow_subset(G, list(G), list(G), normalized=False)
+ b_answer = edge_current_flow(G, normalized=False)
+ for (s, t), v1 in b_answer.items():
+ v2 = b.get((s, t), b.get((t, s)))
+ assert_almost_equal(v1, v2)
# test weighted network
- G.add_edge(0,1,weight=0.5,other=0.3)
- b=edge_current_flow_subset(G,list(G),list(G),normalized=False,weight=None)
+ G.add_edge(0, 1, weight=0.5, other=0.3)
+ b = edge_current_flow_subset(G, list(G), list(G), normalized=False, weight=None)
# weight is None => same as unweighted network
- for (s,t),v1 in b_answer.items():
- v2=b.get((s,t),b.get((t,s)))
- assert_almost_equal(v1,v2)
-
- b=edge_current_flow_subset(G,list(G),list(G),normalized=False)
- b_answer=edge_current_flow(G,normalized=False)
- for (s,t),v1 in b_answer.items():
- v2=b.get((s,t),b.get((t,s)))
- assert_almost_equal(v1,v2)
-
- b=edge_current_flow_subset(G,list(G),list(G),normalized=False,weight='other')
- b_answer=edge_current_flow(G,normalized=False,weight='other')
- for (s,t),v1 in b_answer.items():
- v2=b.get((s,t),b.get((t,s)))
- assert_almost_equal(v1,v2)
-
+ for (s, t), v1 in b_answer.items():
+ v2 = b.get((s, t), b.get((t, s)))
+ assert_almost_equal(v1, v2)
+
+ b = edge_current_flow_subset(G, list(G), list(G), normalized=False)
+ b_answer = edge_current_flow(G, normalized=False)
+ for (s, t), v1 in b_answer.items():
+ v2 = b.get((s, t), b.get((t, s)))
+ assert_almost_equal(v1, v2)
+
+ b = edge_current_flow_subset(G, list(G), list(G), normalized=False, weight='other')
+ b_answer = edge_current_flow(G, normalized=False, weight='other')
+ for (s, t), v1 in b_answer.items():
+ v2 = b.get((s, t), b.get((t, s)))
+ assert_almost_equal(v1, v2)
def test_C4(self):
"""Edge betweenness centrality: C4"""
- G=nx.cycle_graph(4)
- b=edge_current_flow_subset(G,list(G),list(G),normalized=True)
- b_answer=edge_current_flow(G,normalized=True)
- for (s,t),v1 in b_answer.items():
- v2=b.get((s,t),b.get((t,s)))
- assert_almost_equal(v1,v2)
-
+ G = nx.cycle_graph(4)
+ b = edge_current_flow_subset(G, list(G), list(G), normalized=True)
+ b_answer = edge_current_flow(G, normalized=True)
+ for (s, t), v1 in b_answer.items():
+ v2 = b.get((s, t), b.get((t, s)))
+ assert_almost_equal(v1, v2)
def test_P4(self):
"""Edge betweenness centrality: P4"""
- G=nx.path_graph(4)
- b=edge_current_flow_subset(G, list(G), list(G), normalized=True)
- b_answer=edge_current_flow(G,normalized=True)
- for (s,t),v1 in b_answer.items():
- v2=b.get((s,t),b.get((t,s)))
- assert_almost_equal(v1,v2)
-
+ G = nx.path_graph(4)
+ b = edge_current_flow_subset(G, list(G), list(G), normalized=True)
+ b_answer = edge_current_flow(G, normalized=True)
+ for (s, t), v1 in b_answer.items():
+ v2 = b.get((s, t), b.get((t, s)))
+ assert_almost_equal(v1, v2)
diff --git a/networkx/algorithms/centrality/tests/test_current_flow_closeness.py b/networkx/algorithms/centrality/tests/test_current_flow_closeness.py
index 41ec0302..0ec8bfef 100644
--- a/networkx/algorithms/centrality/tests/test_current_flow_closeness.py
+++ b/networkx/algorithms/centrality/tests/test_current_flow_closeness.py
@@ -3,8 +3,10 @@ from nose.tools import *
from nose import SkipTest
import networkx as nx
+
class TestFlowClosenessCentrality(object):
- numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+ numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+
@classmethod
def setupClass(cls):
global np
@@ -13,34 +15,31 @@ class TestFlowClosenessCentrality(object):
import scipy
except ImportError:
raise SkipTest('NumPy not available.')
-
-
+
def test_K4(self):
"""Closeness centrality: K4"""
- G=nx.complete_graph(4)
- b=nx.current_flow_closeness_centrality(G)
- b_answer={0: 2.0/3, 1: 2.0/3, 2: 2.0/3, 3: 2.0/3}
+ G = nx.complete_graph(4)
+ b = nx.current_flow_closeness_centrality(G)
+ b_answer = {0: 2.0 / 3, 1: 2.0 / 3, 2: 2.0 / 3, 3: 2.0 / 3}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
def test_P4(self):
"""Closeness centrality: P4"""
- G=nx.path_graph(4)
- b=nx.current_flow_closeness_centrality(G)
- b_answer={0: 1.0/6, 1: 1.0/4, 2: 1.0/4, 3:1.0/6}
+ G = nx.path_graph(4)
+ b = nx.current_flow_closeness_centrality(G)
+ b_answer = {0: 1.0 / 6, 1: 1.0 / 4, 2: 1.0 / 4, 3: 1.0 / 6}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
def test_star(self):
"""Closeness centrality: star """
- G=nx.Graph()
+ G = nx.Graph()
nx.add_star(G, ['a', 'b', 'c', 'd'])
- b=nx.current_flow_closeness_centrality(G)
- b_answer={'a': 1.0/3, 'b': 0.6/3, 'c': 0.6/3, 'd':0.6/3}
+ b = nx.current_flow_closeness_centrality(G)
+ b_answer = {'a': 1.0 / 3, 'b': 0.6 / 3, 'c': 0.6 / 3, 'd': 0.6 / 3}
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
-
+ assert_almost_equal(b[n], b_answer[n])
class TestWeightedFlowClosenessCentrality(object):
diff --git a/networkx/algorithms/centrality/tests/test_degree_centrality.py b/networkx/algorithms/centrality/tests/test_degree_centrality.py
index 9109ddc9..d80b7125 100644
--- a/networkx/algorithms/centrality/tests/test_degree_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_degree_centrality.py
@@ -14,79 +14,78 @@ class TestDegreeCentrality:
self.P3 = nx.path_graph(3)
self.K5 = nx.complete_graph(5)
- F = nx.Graph() # Florentine families
- F.add_edge('Acciaiuoli','Medici')
- F.add_edge('Castellani','Peruzzi')
- F.add_edge('Castellani','Strozzi')
- F.add_edge('Castellani','Barbadori')
- F.add_edge('Medici','Barbadori')
- F.add_edge('Medici','Ridolfi')
- F.add_edge('Medici','Tornabuoni')
- F.add_edge('Medici','Albizzi')
- F.add_edge('Medici','Salviati')
- F.add_edge('Salviati','Pazzi')
- F.add_edge('Peruzzi','Strozzi')
- F.add_edge('Peruzzi','Bischeri')
- F.add_edge('Strozzi','Ridolfi')
- F.add_edge('Strozzi','Bischeri')
- F.add_edge('Ridolfi','Tornabuoni')
- F.add_edge('Tornabuoni','Guadagni')
- F.add_edge('Albizzi','Ginori')
- F.add_edge('Albizzi','Guadagni')
- F.add_edge('Bischeri','Guadagni')
- F.add_edge('Guadagni','Lamberteschi')
+ F = nx.Graph() # Florentine families
+ F.add_edge('Acciaiuoli', 'Medici')
+ F.add_edge('Castellani', 'Peruzzi')
+ F.add_edge('Castellani', 'Strozzi')
+ F.add_edge('Castellani', 'Barbadori')
+ F.add_edge('Medici', 'Barbadori')
+ F.add_edge('Medici', 'Ridolfi')
+ F.add_edge('Medici', 'Tornabuoni')
+ F.add_edge('Medici', 'Albizzi')
+ F.add_edge('Medici', 'Salviati')
+ F.add_edge('Salviati', 'Pazzi')
+ F.add_edge('Peruzzi', 'Strozzi')
+ F.add_edge('Peruzzi', 'Bischeri')
+ F.add_edge('Strozzi', 'Ridolfi')
+ F.add_edge('Strozzi', 'Bischeri')
+ F.add_edge('Ridolfi', 'Tornabuoni')
+ F.add_edge('Tornabuoni', 'Guadagni')
+ F.add_edge('Albizzi', 'Ginori')
+ F.add_edge('Albizzi', 'Guadagni')
+ F.add_edge('Bischeri', 'Guadagni')
+ F.add_edge('Guadagni', 'Lamberteschi')
self.F = F
G = nx.DiGraph()
- G.add_edge(0,5)
- G.add_edge(1,5)
- G.add_edge(2,5)
- G.add_edge(3,5)
- G.add_edge(4,5)
- G.add_edge(5,6)
- G.add_edge(5,7)
- G.add_edge(5,8)
+ G.add_edge(0, 5)
+ G.add_edge(1, 5)
+ G.add_edge(2, 5)
+ G.add_edge(3, 5)
+ G.add_edge(4, 5)
+ G.add_edge(5, 6)
+ G.add_edge(5, 7)
+ G.add_edge(5, 8)
self.G = G
def test_degree_centrality_1(self):
d = nx.degree_centrality(self.K5)
- exact = dict(zip(range(5), [1]*5))
- for n,dc in d.items():
+ exact = dict(zip(range(5), [1] * 5))
+ for n, dc in d.items():
assert_almost_equal(exact[n], dc)
def test_degree_centrality_2(self):
d = nx.degree_centrality(self.P3)
- exact = {0:0.5, 1:1, 2:0.5}
- for n,dc in d.items():
+ exact = {0: 0.5, 1: 1, 2: 0.5}
+ for n, dc in d.items():
assert_almost_equal(exact[n], dc)
def test_degree_centrality_3(self):
d = nx.degree_centrality(self.K)
- exact = {0:.444, 1:.444, 2:.333, 3:.667, 4:.333,
- 5:.556, 6:.556, 7:.333, 8:.222, 9:.111}
- for n,dc in d.items():
+ exact = {0: .444, 1: .444, 2: .333, 3: .667, 4: .333,
+ 5: .556, 6: .556, 7: .333, 8: .222, 9: .111}
+ for n, dc in d.items():
assert_almost_equal(exact[n], float("%5.3f" % dc))
def test_degree_centrality_4(self):
d = nx.degree_centrality(self.F)
names = sorted(self.F.nodes())
- dcs = [0.071, 0.214, 0.143, 0.214, 0.214, 0.071, 0.286,
+ dcs = [0.071, 0.214, 0.143, 0.214, 0.214, 0.071, 0.286,
0.071, 0.429, 0.071, 0.214, 0.214, 0.143, 0.286, 0.214]
exact = dict(zip(names, dcs))
- for n,dc in d.items():
+ for n, dc in d.items():
assert_almost_equal(exact[n], float("%5.3f" % dc))
def test_indegree_centrality(self):
d = nx.in_degree_centrality(self.G)
- exact = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0,
+ exact = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0,
5: 0.625, 6: 0.125, 7: 0.125, 8: 0.125}
- for n,dc in d.items():
+ for n, dc in d.items():
assert_almost_equal(exact[n], dc)
def test_outdegree_centrality(self):
d = nx.out_degree_centrality(self.G)
- exact = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125,
+ exact = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125,
4: 0.125, 5: 0.375, 6: 0.0, 7: 0.0, 8: 0.0}
- for n,dc in d.items():
+ for n, dc in d.items():
assert_almost_equal(exact[n], dc)
-
diff --git a/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py b/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py
index babe0039..0d8c10df 100644
--- a/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py
@@ -4,8 +4,10 @@ from nose import SkipTest
from nose.tools import *
import networkx as nx
+
class TestEigenvectorCentrality(object):
- numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+ numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+
@classmethod
def setupClass(cls):
global np
@@ -17,52 +19,49 @@ class TestEigenvectorCentrality(object):
def test_K5(self):
"""Eigenvector centrality: K5"""
- G=nx.complete_graph(5)
- b=nx.eigenvector_centrality(G)
- v=math.sqrt(1/5.0)
- b_answer=dict.fromkeys(G,v)
+ G = nx.complete_graph(5)
+ b = nx.eigenvector_centrality(G)
+ v = math.sqrt(1 / 5.0)
+ b_answer = dict.fromkeys(G, v)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
- nstart = dict([(n,1) for n in G])
- b=nx.eigenvector_centrality(G,nstart=nstart)
+ assert_almost_equal(b[n], b_answer[n])
+ nstart = dict([(n, 1) for n in G])
+ b = nx.eigenvector_centrality(G, nstart=nstart)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n])
+ assert_almost_equal(b[n], b_answer[n])
-
- b=nx.eigenvector_centrality_numpy(G)
+ b = nx.eigenvector_centrality_numpy(G)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=3)
-
+ assert_almost_equal(b[n], b_answer[n], places=3)
def test_P3(self):
"""Eigenvector centrality: P3"""
- G=nx.path_graph(3)
- b_answer={0: 0.5, 1: 0.7071, 2: 0.5}
- b=nx.eigenvector_centrality_numpy(G)
+ G = nx.path_graph(3)
+ b_answer = {0: 0.5, 1: 0.7071, 2: 0.5}
+ b = nx.eigenvector_centrality_numpy(G)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=4)
- b=nx.eigenvector_centrality(G)
+ assert_almost_equal(b[n], b_answer[n], places=4)
+ b = nx.eigenvector_centrality(G)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=4)
-
+ assert_almost_equal(b[n], b_answer[n], places=4)
def test_P3_unweighted(self):
"""Eigenvector centrality: P3"""
- G=nx.path_graph(3)
- b_answer={0: 0.5, 1: 0.7071, 2: 0.5}
- b=nx.eigenvector_centrality_numpy(G, weight=None)
+ G = nx.path_graph(3)
+ b_answer = {0: 0.5, 1: 0.7071, 2: 0.5}
+ b = nx.eigenvector_centrality_numpy(G, weight=None)
for n in sorted(G):
- assert_almost_equal(b[n],b_answer[n],places=4)
-
-
+ assert_almost_equal(b[n], b_answer[n], places=4)
@raises(nx.PowerIterationFailedConvergence)
def test_maxiter(self):
- G=nx.path_graph(3)
- b=nx.eigenvector_centrality(G,max_iter=0)
+ G = nx.path_graph(3)
+ b = nx.eigenvector_centrality(G, max_iter=0)
+
class TestEigenvectorCentralityDirected(object):
- numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+ numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+
@classmethod
def setupClass(cls):
global np
@@ -74,57 +73,56 @@ class TestEigenvectorCentralityDirected(object):
def setUp(self):
- G=nx.DiGraph()
+ G = nx.DiGraph()
- edges=[(1,2),(1,3),(2,4),(3,2),(3,5),(4,2),(4,5),(4,6),\
- (5,6),(5,7),(5,8),(6,8),(7,1),(7,5),\
- (7,8),(8,6),(8,7)]
+ edges = [(1, 2), (1, 3), (2, 4), (3, 2), (3, 5), (4, 2), (4, 5), (4, 6),
+ (5, 6), (5, 7), (5, 8), (6, 8), (7, 1), (7, 5),
+ (7, 8), (8, 6), (8, 7)]
- G.add_edges_from(edges,weight=2.0)
- self.G=G.reverse()
- self.G.evc=[0.25368793, 0.19576478, 0.32817092, 0.40430835,
- 0.48199885, 0.15724483, 0.51346196, 0.32475403]
+ G.add_edges_from(edges, weight=2.0)
+ self.G = G.reverse()
+ self.G.evc = [0.25368793, 0.19576478, 0.32817092, 0.40430835,
+ 0.48199885, 0.15724483, 0.51346196, 0.32475403]
- H=nx.DiGraph()
+ H = nx.DiGraph()
- edges=[(1,2),(1,3),(2,4),(3,2),(3,5),(4,2),(4,5),(4,6),\
- (5,6),(5,7),(5,8),(6,8),(7,1),(7,5),\
- (7,8),(8,6),(8,7)]
+ edges = [(1, 2), (1, 3), (2, 4), (3, 2), (3, 5), (4, 2), (4, 5), (4, 6),
+ (5, 6), (5, 7), (5, 8), (6, 8), (7, 1), (7, 5),
+ (7, 8), (8, 6), (8, 7)]
G.add_edges_from(edges)
- self.H=G.reverse()
- self.H.evc=[0.25368793, 0.19576478, 0.32817092, 0.40430835,
- 0.48199885, 0.15724483, 0.51346196, 0.32475403]
-
+ self.H = G.reverse()
+ self.H.evc = [0.25368793, 0.19576478, 0.32817092, 0.40430835,
+ 0.48199885, 0.15724483, 0.51346196, 0.32475403]
def test_eigenvector_centrality_weighted(self):
- G=self.G
- p=nx.eigenvector_centrality(G)
- for (a,b) in zip(list(p.values()),self.G.evc):
- assert_almost_equal(a,b,places=4)
+ G = self.G
+ p = nx.eigenvector_centrality(G)
+ for (a, b) in zip(list(p.values()), self.G.evc):
+ assert_almost_equal(a, b, places=4)
def test_eigenvector_centrality_weighted_numpy(self):
- G=self.G
- p=nx.eigenvector_centrality_numpy(G)
- for (a,b) in zip(list(p.values()),self.G.evc):
- assert_almost_equal(a,b)
-
+ G = self.G
+ p = nx.eigenvector_centrality_numpy(G)
+ for (a, b) in zip(list(p.values()), self.G.evc):
+ assert_almost_equal(a, b)
def test_eigenvector_centrality_unweighted(self):
- G=self.H
- p=nx.eigenvector_centrality(G)
- for (a,b) in zip(list(p.values()),self.G.evc):
- assert_almost_equal(a,b,places=4)
-
+ G = self.H
+ p = nx.eigenvector_centrality(G)
+ for (a, b) in zip(list(p.values()), self.G.evc):
+ assert_almost_equal(a, b, places=4)
def test_eigenvector_centrality_unweighted_numpy(self):
- G=self.H
- p=nx.eigenvector_centrality_numpy(G)
- for (a,b) in zip(list(p.values()),self.G.evc):
- assert_almost_equal(a,b)
+ G = self.H
+ p = nx.eigenvector_centrality_numpy(G)
+ for (a, b) in zip(list(p.values()), self.G.evc):
+ assert_almost_equal(a, b)
+
class TestEigenvectorCentralityExceptions(object):
- numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+ numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+
@classmethod
def setupClass(cls):
global np
@@ -133,7 +131,8 @@ class TestEigenvectorCentralityExceptions(object):
import scipy
except ImportError:
raise SkipTest('SciPy not available.')
- numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+ numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+
@raises(nx.NetworkXException)
def test_multigraph(self):
e = nx.eigenvector_centrality(nx.MultiGraph())
@@ -142,7 +141,6 @@ class TestEigenvectorCentralityExceptions(object):
def test_multigraph_numpy(self):
e = nx.eigenvector_centrality_numpy(nx.MultiGraph())
-
@raises(nx.NetworkXException)
def test_empty(self):
e = nx.eigenvector_centrality(nx.Graph())
diff --git a/networkx/algorithms/centrality/tests/test_harmonic_centrality.py b/networkx/algorithms/centrality/tests/test_harmonic_centrality.py
index fc45e609..d870dd96 100644
--- a/networkx/algorithms/centrality/tests/test_harmonic_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_harmonic_centrality.py
@@ -5,6 +5,7 @@ from nose.tools import *
import networkx as nx
from networkx.algorithms.centrality import harmonic_centrality
+
class TestClosenessCentrality:
def setUp(self):
self.P3 = nx.path_graph(3)
@@ -14,14 +15,12 @@ class TestClosenessCentrality:
self.C4 = nx.cycle_graph(4)
self.C5 = nx.cycle_graph(5)
-
self.T = nx.balanced_tree(r=2, h=2)
self.Gb = nx.DiGraph()
self.Gb.add_edges_from([(0, 1), (0, 2), (0, 4), (2, 1),
(2, 3), (4, 3)])
-
def test_p3_harmonic(self):
c = harmonic_centrality(self.P3)
d = {0: 1.5,
@@ -30,7 +29,6 @@ class TestClosenessCentrality:
for n in sorted(self.P3):
assert_almost_equal(c[n], d[n], places=3)
-
def test_p4_harmonic(self):
c = harmonic_centrality(self.P4)
d = {0: 1.8333333,
@@ -40,7 +38,6 @@ class TestClosenessCentrality:
for n in sorted(self.P4):
assert_almost_equal(c[n], d[n], places=3)
-
def test_clique_complete(self):
c = harmonic_centrality(self.K5)
d = {0: 4,
@@ -49,31 +46,28 @@ class TestClosenessCentrality:
3: 4,
4: 4}
for n in sorted(self.P3):
- assert_almost_equal(c[n], d[n],places=3)
-
+ assert_almost_equal(c[n], d[n], places=3)
def test_cycle_C4(self):
c = harmonic_centrality(self.C4)
d = {0: 2.5,
1: 2.5,
2: 2.5,
- 3: 2.5,}
+ 3: 2.5, }
for n in sorted(self.C4):
assert_almost_equal(c[n], d[n], places=3)
-
def test_cycle_C5(self):
c = harmonic_centrality(self.C5)
- d={0: 3,
- 1: 3,
- 2: 3,
- 3: 3,
- 4: 3,
- 5: 4}
+ d = {0: 3,
+ 1: 3,
+ 2: 3,
+ 3: 3,
+ 4: 3,
+ 5: 4}
for n in sorted(self.C5):
assert_almost_equal(c[n], d[n], places=3)
-
def test_bal_tree(self):
c = harmonic_centrality(self.T)
d = {0: 4.0,
@@ -86,7 +80,6 @@ class TestClosenessCentrality:
for n in sorted(self.T):
assert_almost_equal(c[n], d[n], places=3)
-
def test_exampleGraph(self):
c = harmonic_centrality(self.Gb)
d = {0: 0,
@@ -97,11 +90,10 @@ class TestClosenessCentrality:
for n in sorted(self.Gb):
assert_almost_equal(c[n], d[n], places=3)
-
def test_weighted_harmonic(self):
XG = nx.DiGraph()
- XG.add_weighted_edges_from([('a','b',10), ('d','c',5), ('a','c',1),
- ('e','f',2), ('f','c',1), ('a','f',3),
+ XG.add_weighted_edges_from([('a', 'b', 10), ('d', 'c', 5), ('a', 'c', 1),
+ ('e', 'f', 2), ('f', 'c', 1), ('a', 'f', 3),
])
c = harmonic_centrality(XG, distance='weight')
d = {'a': 0,
@@ -113,14 +105,12 @@ class TestClosenessCentrality:
for n in sorted(XG):
assert_almost_equal(c[n], d[n], places=3)
-
def test_empty(self):
G = nx.DiGraph()
c = harmonic_centrality(G, distance='weight')
d = {}
assert_equal(c, d)
-
def test_singleton(self):
G = nx.DiGraph()
G.add_node(0)
diff --git a/networkx/algorithms/centrality/tests/test_katz_centrality.py b/networkx/algorithms/centrality/tests/test_katz_centrality.py
index 4b210ed7..f6661f38 100644
--- a/networkx/algorithms/centrality/tests/test_katz_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_katz_centrality.py
@@ -241,7 +241,7 @@ class TestKatzCentralityDirected(object):
0.272257430756461,
0.4201989685435462,
0.34229059218038554,
- ]
+ ]
H = nx.DiGraph(edges)
self.H = G.reverse()
@@ -255,7 +255,7 @@ class TestKatzCentralityDirected(object):
0.272257430756461,
0.4201989685435462,
0.34229059218038554,
- ]
+ ]
def test_katz_centrality_weighted(self):
G = self.G
@@ -317,6 +317,6 @@ class TestKatzEigenvectorVKatz(object):
G = nx.gnp_random_graph(10, 0.5, seed=1234)
l = float(max(eigvals(nx.adjacency_matrix(G).todense())))
e = nx.eigenvector_centrality_numpy(G)
- k = nx.katz_centrality_numpy(G, 1.0/l)
+ k = nx.katz_centrality_numpy(G, 1.0 / l)
for n in G:
assert_almost_equal(e[n], k[n])
diff --git a/networkx/algorithms/centrality/tests/test_subgraph.py b/networkx/algorithms/centrality/tests/test_subgraph.py
index 572a5f37..edd6ba84 100644
--- a/networkx/algorithms/centrality/tests/test_subgraph.py
+++ b/networkx/algorithms/centrality/tests/test_subgraph.py
@@ -4,6 +4,7 @@ from nose import SkipTest
import networkx as nx
from networkx.algorithms.centrality.subgraph_alg import *
+
class TestSubgraph:
@classmethod
def setupClass(cls):
@@ -12,31 +13,31 @@ class TestSubgraph:
try:
import numpy
except ImportError:
- raise SkipTest('NumPy not available.')
+ raise SkipTest('NumPy not available.')
try:
import scipy
except ImportError:
- raise SkipTest('SciPy not available.')
+ raise SkipTest('SciPy not available.')
def test_subgraph_centrality(self):
- answer={0: 1.5430806348152433, 1: 1.5430806348152433}
- result=subgraph_centrality(nx.path_graph(2))
- for k,v in result.items():
- assert_almost_equal(answer[k],result[k],places=7)
+ answer = {0: 1.5430806348152433, 1: 1.5430806348152433}
+ result = subgraph_centrality(nx.path_graph(2))
+ for k, v in result.items():
+ assert_almost_equal(answer[k], result[k], places=7)
- answer1={'1': 1.6445956054135658,
- 'Albert': 2.4368257358712189,
- 'Aric': 2.4368257358712193,
- 'Dan':3.1306328496328168,
- 'Franck': 2.3876142275231915}
- G1=nx.Graph([('Franck','Aric'),('Aric','Dan'),('Dan','Albert'),
- ('Albert','Franck'),('Dan','1'),('Franck','Albert')])
- result1=subgraph_centrality(G1)
- for k,v in result1.items():
- assert_almost_equal(answer1[k],result1[k],places=7)
- result1=subgraph_centrality_exp(G1)
- for k,v in result1.items():
- assert_almost_equal(answer1[k],result1[k],places=7)
+ answer1 = {'1': 1.6445956054135658,
+ 'Albert': 2.4368257358712189,
+ 'Aric': 2.4368257358712193,
+ 'Dan': 3.1306328496328168,
+ 'Franck': 2.3876142275231915}
+ G1 = nx.Graph([('Franck', 'Aric'), ('Aric', 'Dan'), ('Dan', 'Albert'),
+ ('Albert', 'Franck'), ('Dan', '1'), ('Franck', 'Albert')])
+ result1 = subgraph_centrality(G1)
+ for k, v in result1.items():
+ assert_almost_equal(answer1[k], result1[k], places=7)
+ result1 = subgraph_centrality_exp(G1)
+ for k, v in result1.items():
+ assert_almost_equal(answer1[k], result1[k], places=7)
def test_subgraph_centrality_big_graph(self):
g199 = nx.complete_graph(199)
@@ -49,25 +50,25 @@ class TestSubgraph:
comm200_exp = nx.subgraph_centrality_exp(g200)
def test_communicability_betweenness_centrality(self):
- answer={0: 0.07017447951484615, 1: 0.71565598701107991,
- 2: 0.71565598701107991, 3: 0.07017447951484615}
- result=communicability_betweenness_centrality(nx.path_graph(4))
- for k,v in result.items():
- assert_almost_equal(answer[k],result[k],places=7)
+ answer = {0: 0.07017447951484615, 1: 0.71565598701107991,
+ 2: 0.71565598701107991, 3: 0.07017447951484615}
+ result = communicability_betweenness_centrality(nx.path_graph(4))
+ for k, v in result.items():
+ assert_almost_equal(answer[k], result[k], places=7)
- answer1={'1': 0.060039074193949521,
- 'Albert': 0.315470761661372,
- 'Aric': 0.31547076166137211,
- 'Dan': 0.68297778678316201,
- 'Franck': 0.21977926617449497}
- G1=nx.Graph([('Franck','Aric'),
- ('Aric','Dan'),('Dan','Albert'),('Albert','Franck'),
- ('Dan','1'),('Franck','Albert')])
- result1=communicability_betweenness_centrality(G1)
- for k,v in result1.items():
- assert_almost_equal(answer1[k],result1[k],places=7)
+ answer1 = {'1': 0.060039074193949521,
+ 'Albert': 0.315470761661372,
+ 'Aric': 0.31547076166137211,
+ 'Dan': 0.68297778678316201,
+ 'Franck': 0.21977926617449497}
+ G1 = nx.Graph([('Franck', 'Aric'),
+ ('Aric', 'Dan'), ('Dan', 'Albert'), ('Albert', 'Franck'),
+ ('Dan', '1'), ('Franck', 'Albert')])
+ result1 = communicability_betweenness_centrality(G1)
+ for k, v in result1.items():
+ assert_almost_equal(answer1[k], result1[k], places=7)
def test_estrada_index(self):
- answer=1041.2470334195475
- result=estrada_index(nx.karate_club_graph())
- assert_almost_equal(answer,result,places=7)
+ answer = 1041.2470334195475
+ result = estrada_index(nx.karate_club_graph())
+ assert_almost_equal(answer, result, places=7)
diff --git a/networkx/algorithms/coloring/tests/test_coloring.py b/networkx/algorithms/coloring/tests/test_coloring.py
index 2e6d43f3..87be286e 100644
--- a/networkx/algorithms/coloring/tests/test_coloring.py
+++ b/networkx/algorithms/coloring/tests/test_coloring.py
@@ -30,7 +30,7 @@ INTERCHANGE_INVALID = [
'saturation_largest_first',
'DSATUR'
]
-
+
class TestColoring:
def test_basic_cases(self):
@@ -61,13 +61,14 @@ class TestColoring:
assert_true(any(verify_length(coloring, n_colors)
for n_colors in colors))
assert_true(verify_coloring(graph, coloring))
-
+
for strategy, arglist in SPECIAL_TEST_CASES.items():
for args in arglist:
yield (check_special_case, strategy, args[0], args[1], args[2])
def test_interchange_invalid(self):
graph = one_node_graph()
+
def check_raises(strategy):
assert_raises(nx.NetworkXPointlessConcept,
nx.coloring.greedy_color,
@@ -103,10 +104,12 @@ def verify_coloring(graph, coloring):
return True
+
def verify_length(coloring, expected):
coloring = dict_to_sets(coloring)
return len(coloring) == expected
+
def dict_to_sets(colors):
if len(colors) == 0:
return []
@@ -120,26 +123,32 @@ def dict_to_sets(colors):
return sets
############################## Graph Generation ##############################
+
+
def empty_graph():
return nx.Graph()
+
def one_node_graph():
graph = nx.Graph()
graph.add_nodes_from([1])
return graph
+
def two_node_graph():
graph = nx.Graph()
- graph.add_nodes_from([1,2])
- graph.add_edges_from([(1,2)])
+ graph.add_nodes_from([1, 2])
+ graph.add_edges_from([(1, 2)])
return graph
+
def three_node_clique():
graph = nx.Graph()
- graph.add_nodes_from([1,2, 3])
- graph.add_edges_from([(1,2), (1,3), (2,3)])
+ graph.add_nodes_from([1, 2, 3])
+ graph.add_edges_from([(1, 2), (1, 3), (2, 3)])
return graph
+
def disconnected():
graph = nx.Graph()
graph.add_edges_from([
@@ -150,51 +159,55 @@ def disconnected():
])
return graph
+
def rs_shc():
graph = nx.Graph()
- graph.add_nodes_from([1,2,3,4])
+ graph.add_nodes_from([1, 2, 3, 4])
graph.add_edges_from([
- (1,2),
- (2,3),
- (3,4)
+ (1, 2),
+ (2, 3),
+ (3, 4)
])
return graph
+
def slf_shc():
graph = nx.Graph()
- graph.add_nodes_from([1,2,3,4,5,6,7])
+ graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
graph.add_edges_from([
- (1,2),
- (1,5),
- (1,6),
- (2,3),
- (2,7),
- (3,4),
- (3,7),
- (4,5),
- (4,6),
- (5,6)
+ (1, 2),
+ (1, 5),
+ (1, 6),
+ (2, 3),
+ (2, 7),
+ (3, 4),
+ (3, 7),
+ (4, 5),
+ (4, 6),
+ (5, 6)
])
return graph
+
def slf_hc():
graph = nx.Graph()
- graph.add_nodes_from([1,2,3,4,5,6,7,8])
+ graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
graph.add_edges_from([
- (1,2),
- (1,3),
- (1,4),
- (1,5),
- (2,3),
- (2,4),
- (2,6),
- (5,7),
- (5,8),
- (6,7),
- (6,8),
- (7,8)
+ (1, 2),
+ (1, 3),
+ (1, 4),
+ (1, 5),
+ (2, 3),
+ (2, 4),
+ (2, 6),
+ (5, 7),
+ (5, 8),
+ (6, 7),
+ (6, 8),
+ (7, 8)
])
- return graph
+ return graph
+
def lf_shc():
graph = nx.Graph()
@@ -208,6 +221,7 @@ def lf_shc():
])
return graph
+
def lf_hc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
@@ -225,7 +239,8 @@ def lf_hc():
(4, 3)
])
return graph
-
+
+
def sl_shc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5, 6])
@@ -242,6 +257,7 @@ def sl_shc():
])
return graph
+
def sl_hc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
@@ -265,6 +281,7 @@ def sl_hc():
])
return graph
+
def gis_shc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4])
@@ -275,6 +292,7 @@ def gis_shc():
])
return graph
+
def gis_hc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5, 6])
@@ -287,6 +305,7 @@ def gis_hc():
])
return graph
+
def cs_shc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5])
@@ -301,6 +320,7 @@ def cs_shc():
])
return graph
+
def rsi_shc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5, 6])
@@ -316,6 +336,7 @@ def rsi_shc():
])
return graph
+
def lfi_shc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
@@ -333,6 +354,7 @@ def lfi_shc():
])
return graph
+
def lfi_hc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9])
@@ -354,6 +376,7 @@ def lfi_hc():
])
return graph
+
def sli_shc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
@@ -372,6 +395,7 @@ def sli_shc():
])
return graph
+
def sli_hc():
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9])
diff --git a/networkx/algorithms/community/community_generators.py b/networkx/algorithms/community/community_generators.py
index 93854741..be433ce0 100644
--- a/networkx/algorithms/community/community_generators.py
+++ b/networkx/algorithms/community/community_generators.py
@@ -359,8 +359,10 @@ def LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=None,
# Generate a degree sequence with a power law distribution.
low, high = min_degree, max_degree
- condition = lambda seq: sum(seq) % 2 == 0
- length = lambda seq: len(seq) >= n
+
+ def condition(seq): return sum(seq) % 2 == 0
+
+ def length(seq): return len(seq) >= n
deg_seq = _powerlaw_sequence(tau1, low, high, condition, length, max_iters)
# Validate parameters for generating the community size sequence.
@@ -378,8 +380,10 @@ def LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=None,
# that one. As a result, this code is allowed many more chances to
# generate a valid community size sequence.
low, high = min_community, max_community
- condition = lambda seq: sum(seq) == n
- length = lambda seq: sum(seq) >= n
+
+ def condition(seq): return sum(seq) == n
+
+ def length(seq): return sum(seq) >= n
comms = _powerlaw_sequence(tau2, low, high, condition, length, max_iters)
# Generate the communities based on the given degree sequence and
diff --git a/networkx/algorithms/community/kclique.py b/networkx/algorithms/community/kclique.py
index 42a175e4..34ade574 100644
--- a/networkx/algorithms/community/kclique.py
+++ b/networkx/algorithms/community/kclique.py
@@ -1,5 +1,5 @@
#-*- coding: utf-8 -*-
-# Copyright (C) 2011 by
+# Copyright (C) 2011 by
# Conrad Lee <conradlee@gmail.com>
# Aric Hagberg <hagberg@lanl.gov>
# All rights reserved.
@@ -10,6 +10,7 @@ __author__ = """\n""".join(['Conrad Lee <conradlee@gmail.com>',
'Aric Hagberg <aric.hagberg@gmail.com>'])
__all__ = ['k_clique_communities']
+
def k_clique_communities(G, k, cliques=None):
"""Find k-clique communities in graph using the percolation method.
@@ -50,7 +51,7 @@ def k_clique_communities(G, k, cliques=None):
doi:10.1038/nature03607
"""
if k < 2:
- raise nx.NetworkXError("k=%d, k must be greater than 1."%k)
+ raise nx.NetworkXError("k=%d, k must be greater than 1." % k)
if cliques is None:
cliques = nx.find_cliques(G)
cliques = [frozenset(c) for c in cliques if len(c) >= k]
@@ -74,6 +75,7 @@ def k_clique_communities(G, k, cliques=None):
for component in nx.connected_components(perc_graph):
yield(frozenset.union(*component))
+
def _get_adjacent_cliques(clique, membership_dict):
adjacent_cliques = set()
for n in clique:
diff --git a/networkx/algorithms/community/label_propagation.py b/networkx/algorithms/community/label_propagation.py
index 8b32954c..01a908a4 100644
--- a/networkx/algorithms/community/label_propagation.py
+++ b/networkx/algorithms/community/label_propagation.py
@@ -80,7 +80,7 @@ def asyn_lpa_communities(G, weight=None):
label_freq = Counter()
for v in G[node]:
label_freq.update({labels[v]: G.edges[v, node][weight]
- if weight else 1})
+ if weight else 1})
# Choose the label with the highest frecuency. If more than 1 label
# has the highest frecuency choose one randomly.
max_freq = max(label_freq.values())
diff --git a/networkx/algorithms/community/tests/test_asyn_fluidc.py b/networkx/algorithms/community/tests/test_asyn_fluidc.py
index 514014af..417da12e 100644
--- a/networkx/algorithms/community/tests/test_asyn_fluidc.py
+++ b/networkx/algorithms/community/tests/test_asyn_fluidc.py
@@ -3,6 +3,7 @@ from networkx import Graph
from networkx.algorithms.community.asyn_fluidc import *
import random
+
def test_single_node():
test = Graph()
@@ -117,4 +118,3 @@ def five_clique_ring():
communities = asyn_fluidc(test, 5)
result = {frozenset(c) for c in communities}
assert_equal(result, ground_truth)
-
diff --git a/networkx/algorithms/community/tests/test_centrality.py b/networkx/algorithms/community/tests/test_centrality.py
index 05866cf5..abe1efcf 100644
--- a/networkx/algorithms/community/tests/test_centrality.py
+++ b/networkx/algorithms/community/tests/test_centrality.py
@@ -83,7 +83,8 @@ class TestGirvanNewman(object):
G = nx.Graph()
G.add_weighted_edges_from([(0, 1, 3), (1, 2, 2), (2, 3, 1)])
# Let the most valuable edge be the one with the highest weight.
- heaviest = lambda G: max(G.edges(data='weight'), key=itemgetter(2))[:2]
+
+ def heaviest(G): return max(G.edges(data='weight'), key=itemgetter(2))[:2]
communities = list(girvan_newman(G, heaviest))
assert_equal(len(communities), 3)
validate_communities(communities[0], [{0}, {1, 2, 3}])
diff --git a/networkx/algorithms/community/tests/test_label_propagation.py b/networkx/algorithms/community/tests/test_label_propagation.py
index ce46e4fd..84321c50 100644
--- a/networkx/algorithms/community/tests/test_label_propagation.py
+++ b/networkx/algorithms/community/tests/test_label_propagation.py
@@ -88,7 +88,7 @@ def test_connected_communities():
frozenset(['x', 'y']),
frozenset(['z'])])
ground_truth2 = set([frozenset(['a', 'b', 'c', 'd', 'e',
- '1', '2', '3', '4', '5']),
+ '1', '2', '3', '4', '5']),
frozenset(['x', 'y']),
frozenset(['z'])])
ground_truth = (ground_truth1, ground_truth2)
diff --git a/networkx/algorithms/community/tests/test_quality.py b/networkx/algorithms/community/tests/test_quality.py
index 79ce7e7f..a941311b 100644
--- a/networkx/algorithms/community/tests/test_quality.py
+++ b/networkx/algorithms/community/tests/test_quality.py
@@ -66,14 +66,14 @@ def test_modularity():
def test_inter_community_edges_with_digraphs():
- G = nx.complete_graph(2, create_using = nx.DiGraph())
+ G = nx.complete_graph(2, create_using=nx.DiGraph())
partition = [{0}, {1}]
assert_equal(inter_community_edges(G, partition), 2)
- G = nx.complete_graph(10, create_using = nx.DiGraph())
+ G = nx.complete_graph(10, create_using=nx.DiGraph())
partition = [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}]
assert_equal(inter_community_edges(G, partition), 70)
- G = nx.cycle_graph(4, create_using = nx.DiGraph())
+ G = nx.cycle_graph(4, create_using=nx.DiGraph())
partition = [{0, 1}, {2, 3}]
assert_equal(inter_community_edges(G, partition), 2)
diff --git a/networkx/algorithms/components/tests/test_attracting.py b/networkx/algorithms/components/tests/test_attracting.py
index 953fd97f..bfeae1aa 100644
--- a/networkx/algorithms/components/tests/test_attracting.py
+++ b/networkx/algorithms/components/tests/test_attracting.py
@@ -51,7 +51,7 @@ class TestAttractingComponents(object):
assert_false(nx.is_attracting_component(self.G4))
def test_connected_raise(self):
- G=nx.Graph()
+ G = nx.Graph()
assert_raises(NetworkXNotImplemented, nx.attracting_components, G)
assert_raises(NetworkXNotImplemented, nx.number_attracting_components, G)
assert_raises(NetworkXNotImplemented, nx.is_attracting_component, G)
diff --git a/networkx/algorithms/components/tests/test_biconnected.py b/networkx/algorithms/components/tests/test_biconnected.py
index 54612bc4..ae9e1039 100644
--- a/networkx/algorithms/components/tests/test_biconnected.py
+++ b/networkx/algorithms/components/tests/test_biconnected.py
@@ -4,6 +4,7 @@ import networkx as nx
from networkx.algorithms.components import biconnected
from networkx import NetworkXNotImplemented
+
def assert_components_edges_equal(x, y):
sx = {frozenset([frozenset(e) for e in c]) for c in x}
sy = {frozenset([frozenset(e) for e in c]) for c in y}
@@ -38,62 +39,70 @@ def test_barbell():
]
assert_components_equal(list(nx.biconnected_components(G)), answer)
- G.add_edge(2,17)
+ G.add_edge(2, 17)
pts = set(nx.articulation_points(G))
assert_equal(pts, {7, 20, 21, 22})
+
def test_articulation_points_repetitions():
G = nx.Graph()
G.add_edges_from([(0, 1), (1, 2), (1, 3)])
assert_equal(list(nx.articulation_points(G)), [1])
+
def test_articulation_points_cycle():
- G=nx.cycle_graph(3)
+ G = nx.cycle_graph(3)
nx.add_cycle(G, [1, 3, 4])
- pts=set(nx.articulation_points(G))
+ pts = set(nx.articulation_points(G))
assert_equal(pts, {1})
+
def test_is_biconnected():
- G=nx.cycle_graph(3)
+ G = nx.cycle_graph(3)
assert_true(nx.is_biconnected(G))
nx.add_cycle(G, [1, 3, 4])
assert_false(nx.is_biconnected(G))
+
def test_empty_is_biconnected():
- G=nx.empty_graph(5)
+ G = nx.empty_graph(5)
assert_false(nx.is_biconnected(G))
G.add_edge(0, 1)
assert_false(nx.is_biconnected(G))
+
def test_biconnected_components_cycle():
- G=nx.cycle_graph(3)
+ G = nx.cycle_graph(3)
nx.add_cycle(G, [1, 3, 4])
answer = [{0, 1, 2}, {1, 3, 4}]
assert_components_equal(list(nx.biconnected_components(G)), answer)
# deprecated
+
+
def test_biconnected_component_subgraphs_cycle():
- G=nx.cycle_graph(3)
+ G = nx.cycle_graph(3)
nx.add_cycle(G, [1, 3, 4, 5])
Gc = set(nx.biconnected_component_subgraphs(G))
assert_equal(len(Gc), 2)
- g1, g2=Gc
+ g1, g2 = Gc
if 0 in g1:
- assert_true(nx.is_isomorphic(g1, nx.Graph([(0,1),(0,2),(1,2)])))
- assert_true(nx.is_isomorphic(g2, nx.Graph([(1,3),(1,5),(3,4),(4,5)])))
+ assert_true(nx.is_isomorphic(g1, nx.Graph([(0, 1), (0, 2), (1, 2)])))
+ assert_true(nx.is_isomorphic(g2, nx.Graph([(1, 3), (1, 5), (3, 4), (4, 5)])))
else:
- assert_true(nx.is_isomorphic(g1, nx.Graph([(1,3),(1,5),(3,4),(4,5)])))
- assert_true(nx.is_isomorphic(g2, nx.Graph([(0,1),(0,2),(1,2)])))
+ assert_true(nx.is_isomorphic(g1, nx.Graph([(1, 3), (1, 5), (3, 4), (4, 5)])))
+ assert_true(nx.is_isomorphic(g2, nx.Graph([(0, 1), (0, 2), (1, 2)])))
+
def test_biconnected_components1():
# graph example from
# http://www.ibluemojo.com/school/articul_algorithm.html
- edges=[
+ edges = [
(0, 1), (0, 5), (0, 6), (0, 14), (1, 5), (1, 6), (1, 14), (2, 4),
(2, 10), (3, 4), (3, 15), (4, 6), (4, 7), (4, 10), (5, 14), (6, 14),
(7, 9), (8, 9), (8, 12), (8, 13), (10, 15), (11, 12), (11, 13), (12, 13)
]
- G=nx.Graph(edges)
+ G = nx.Graph(edges)
pts = set(nx.articulation_points(G))
assert_equal(pts, {4, 6, 7, 8, 9})
comps = list(nx.biconnected_component_edges(G))
@@ -108,13 +117,14 @@ def test_biconnected_components1():
]
assert_components_edges_equal(comps, answer)
+
def test_biconnected_components2():
- G=nx.Graph()
+ G = nx.Graph()
nx.add_cycle(G, 'ABC')
nx.add_cycle(G, 'CDE')
nx.add_cycle(G, 'FIJHG')
nx.add_cycle(G, 'GIJ')
- G.add_edge('E','G')
+ G.add_edge('E', 'G')
comps = list(nx.biconnected_component_edges(G))
answer = [
[tuple('GF'), tuple('FI'), tuple('IG'), tuple('IJ'),
@@ -122,16 +132,18 @@ def test_biconnected_components2():
[tuple('EG')],
[tuple('CD'), tuple('DE'), tuple('CE')],
[tuple('AB'), tuple('BC'), tuple('AC')]
- ]
+ ]
assert_components_edges_equal(comps, answer)
+
def test_biconnected_davis():
D = nx.davis_southern_women_graph()
bcc = list(nx.biconnected_components(D))[0]
- assert_true(set(D) == bcc) # All nodes in a giant bicomponent
+ assert_true(set(D) == bcc) # All nodes in a giant bicomponent
# So no articulation points
assert_equal(len(list(nx.articulation_points(D))), 0)
+
def test_biconnected_karate():
K = nx.karate_club_graph()
answer = [{0, 1, 2, 3, 7, 8, 9, 12, 13, 14, 15, 17, 18, 19,
@@ -142,6 +154,7 @@ def test_biconnected_karate():
assert_components_equal(bcc, answer)
assert_equal(set(nx.articulation_points(K)), {0})
+
def test_biconnected_eppstein():
# tests from http://www.ics.uci.edu/~eppstein/PADS/Biconnectivity.py
G1 = nx.Graph({
@@ -170,6 +183,7 @@ def test_biconnected_eppstein():
bcc = list(nx.biconnected_components(G2))
assert_components_equal(bcc, answer_G2)
+
def test_null_graph():
G = nx.Graph()
assert_false(nx.is_biconnected(G))
@@ -177,6 +191,7 @@ def test_null_graph():
assert_equal(list(nx.biconnected_component_edges(G)), [])
assert_equal(list(nx.articulation_points(G)), [])
+
def test_connected_raise():
DG = nx.DiGraph()
assert_raises(NetworkXNotImplemented, nx.biconnected_components, DG)
diff --git a/networkx/algorithms/components/tests/test_connected.py b/networkx/algorithms/components/tests/test_connected.py
index 99f58386..594f5d0b 100644
--- a/networkx/algorithms/components/tests/test_connected.py
+++ b/networkx/algorithms/components/tests/test_connected.py
@@ -4,6 +4,7 @@ import networkx as nx
from networkx import convert_node_labels_to_integers as cnlti
from networkx import NetworkXNotImplemented
+
class TestConnected:
def setUp(self):
@@ -24,20 +25,20 @@ class TestConnected:
G = nx.DiGraph()
G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
- C = [[2, 3, 4],[1]]
+ C = [[2, 3, 4], [1]]
self.gc.append((G, C))
G = nx.DiGraph()
G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
C = [[1, 2, 3]]
- self.gc.append((G,C))
+ self.gc.append((G, C))
# Eppstein's tests
- G = nx.DiGraph({0:[1], 1:[2, 3], 2:[4, 5], 3:[4, 5], 4:[6], 5:[], 6:[]})
- C = [[0], [1], [2],[ 3], [4], [5], [6]]
- self.gc.append((G,C))
+ G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
+ C = [[0], [1], [2], [3], [4], [5], [6]]
+ self.gc.append((G, C))
- G = nx.DiGraph({0:[1], 1:[2, 3, 4], 2:[0, 3], 3:[4], 4:[3]})
+ G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
C = [[0, 1, 2], [3, 4]]
self.gc.append((G, C))
@@ -45,7 +46,6 @@ class TestConnected:
C = []
self.gc.append((G, C))
-
def test_connected_components(self):
cc = nx.connected_components
G = self.G
@@ -95,7 +95,7 @@ class TestConnected:
def test_connected_raise(self):
assert_raises(NetworkXNotImplemented, nx.connected_components, self.DG)
assert_raises(NetworkXNotImplemented, nx.number_connected_components, self.DG)
- assert_raises(NetworkXNotImplemented, nx.node_connected_component, self.DG,1)
+ assert_raises(NetworkXNotImplemented, nx.node_connected_component, self.DG, 1)
assert_raises(NetworkXNotImplemented, nx.is_connected, self.DG)
assert_raises(nx.NetworkXPointlessConcept, nx.is_connected, nx.Graph())
# deprecated
diff --git a/networkx/algorithms/components/tests/test_semiconnected.py b/networkx/algorithms/components/tests/test_semiconnected.py
index 2a4f3c22..e92a6480 100644
--- a/networkx/algorithms/components/tests/test_semiconnected.py
+++ b/networkx/algorithms/components/tests/test_semiconnected.py
@@ -2,6 +2,7 @@ from itertools import chain
import networkx as nx
from nose.tools import *
+
class TestIsSemiconnected(object):
def test_undirected(self):
diff --git a/networkx/algorithms/components/tests/test_strongly_connected.py b/networkx/algorithms/components/tests/test_strongly_connected.py
index 5468aec7..d0aecb25 100644
--- a/networkx/algorithms/components/tests/test_strongly_connected.py
+++ b/networkx/algorithms/components/tests/test_strongly_connected.py
@@ -3,6 +3,7 @@ from nose.tools import *
import networkx as nx
from networkx import NetworkXNotImplemented
+
class TestStronglyConnected:
def setUp(self):
@@ -24,7 +25,7 @@ class TestStronglyConnected:
self.gc.append((G, C))
# Eppstein's tests
- G = nx.DiGraph({0: [1], 1:[2, 3], 2:[4, 5], 3:[4, 5], 4:[6], 5:[], 6:[]})
+ G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
C = {
frozenset([0]),
frozenset([1]),
@@ -36,7 +37,7 @@ class TestStronglyConnected:
}
self.gc.append((G, C))
- G = nx.DiGraph({0:[1], 1:[2, 3, 4], 2:[0, 3], 3:[4], 4:[3]})
+ G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
C = {frozenset([0, 1, 2]), frozenset([3, 4])}
self.gc.append((G, C))
@@ -46,7 +47,7 @@ class TestStronglyConnected:
assert_equal({frozenset(g) for g in scc(G)}, C)
def test_tarjan_recursive(self):
- scc=nx.strongly_connected_components_recursive
+ scc = nx.strongly_connected_components_recursive
for G, C in self.gc:
assert_equal({frozenset(g) for g in scc(G)}, C)
@@ -87,7 +88,7 @@ class TestStronglyConnected:
# nodes
assert_equal(sorted(cG.nodes()), [0, 1, 2, 3])
# edges
- mapping={}
+ mapping = {}
for i, component in enumerate(scc):
for n in component:
mapping[n] = i
@@ -136,7 +137,7 @@ class TestStronglyConnected:
assert_equal(set(C[n]), cG.nodes[n]['members'])
def test_null_graph(self):
- G=nx.DiGraph()
+ G = nx.DiGraph()
assert_equal(list(nx.strongly_connected_components(G)), [])
assert_equal(list(nx.kosaraju_strongly_connected_components(G)), [])
assert_equal(list(nx.strongly_connected_components_recursive(G)), [])
@@ -144,7 +145,7 @@ class TestStronglyConnected:
assert_raises(nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph())
def test_connected_raise(self):
- G=nx.Graph()
+ G = nx.Graph()
assert_raises(NetworkXNotImplemented, nx.strongly_connected_components, G)
assert_raises(NetworkXNotImplemented, nx.kosaraju_strongly_connected_components, G)
assert_raises(NetworkXNotImplemented, nx.strongly_connected_components_recursive, G)
diff --git a/networkx/algorithms/components/tests/test_subgraph_copies.py b/networkx/algorithms/components/tests/test_subgraph_copies.py
index 9acedba2..fceeaa6c 100644
--- a/networkx/algorithms/components/tests/test_subgraph_copies.py
+++ b/networkx/algorithms/components/tests/test_subgraph_copies.py
@@ -20,7 +20,7 @@ class TestSubgraphAttributesDicts:
nx.attracting_component_subgraphs,
]
self.subgraph_funcs = self.undirected + self.directed
-
+
self.D = nx.DiGraph()
self.D.add_edge(1, 2, eattr='red')
self.D.add_edge(2, 1, eattr='red')
@@ -33,7 +33,7 @@ class TestSubgraphAttributesDicts:
self.G.graph['gattr'] = 'green'
def test_subgraphs_default_copy_behavior(self):
- # Test the default behavior of subgraph functions
+ # Test the default behavior of subgraph functions
# For the moment (1.10) the default is to copy
for subgraph_func in self.subgraph_funcs:
G = deepcopy(self.G if subgraph_func in self.undirected else self.D)
diff --git a/networkx/algorithms/components/tests/test_weakly_connected.py b/networkx/algorithms/components/tests/test_weakly_connected.py
index 8c993deb..764da597 100644
--- a/networkx/algorithms/components/tests/test_weakly_connected.py
+++ b/networkx/algorithms/components/tests/test_weakly_connected.py
@@ -3,6 +3,7 @@ from nose.tools import *
import networkx as nx
from networkx import NetworkXNotImplemented
+
class TestWeaklyConnected:
def setUp(self):
@@ -15,24 +16,23 @@ class TestWeaklyConnected:
G = nx.DiGraph()
G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
- C = [[2, 3, 4],[1]]
+ C = [[2, 3, 4], [1]]
self.gc.append((G, C))
G = nx.DiGraph()
G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
C = [[1, 2, 3]]
- self.gc.append((G,C))
+ self.gc.append((G, C))
# Eppstein's tests
- G = nx.DiGraph({0:[1], 1:[2, 3], 2:[4, 5], 3:[4, 5], 4:[6], 5:[], 6:[]})
- C = [[0], [1], [2],[ 3], [4], [5], [6]]
- self.gc.append((G,C))
+ G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
+ C = [[0], [1], [2], [3], [4], [5], [6]]
+ self.gc.append((G, C))
- G = nx.DiGraph({0:[1], 1:[2, 3, 4], 2:[0, 3], 3:[4], 4:[3]})
+ G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
C = [[0, 1, 2], [3, 4]]
self.gc.append((G, C))
-
def test_weakly_connected_components(self):
for G, C in self.gc:
U = G.to_undirected()
@@ -63,15 +63,15 @@ class TestWeaklyConnected:
assert_equal(nx.is_weakly_connected(G), nx.is_connected(U))
def test_null_graph(self):
- G=nx.DiGraph()
+ G = nx.DiGraph()
assert_equal(list(nx.weakly_connected_components(G)), [])
assert_equal(nx.number_weakly_connected_components(G), 0)
assert_raises(nx.NetworkXPointlessConcept, nx.is_weakly_connected, G)
def test_connected_raise(self):
- G=nx.Graph()
- assert_raises(NetworkXNotImplemented,nx.weakly_connected_components, G)
- assert_raises(NetworkXNotImplemented,nx.number_weakly_connected_components, G)
- assert_raises(NetworkXNotImplemented,nx.is_weakly_connected, G)
+ G = nx.Graph()
+ assert_raises(NetworkXNotImplemented, nx.weakly_connected_components, G)
+ assert_raises(NetworkXNotImplemented, nx.number_weakly_connected_components, G)
+ assert_raises(NetworkXNotImplemented, nx.is_weakly_connected, G)
# deprecated
- assert_raises(NetworkXNotImplemented,nx.weakly_connected_component_subgraphs, G)
+ assert_raises(NetworkXNotImplemented, nx.weakly_connected_component_subgraphs, G)
diff --git a/networkx/algorithms/connectivity/__init__.py b/networkx/algorithms/connectivity/__init__.py
index c1605f0c..fd77d4a3 100644
--- a/networkx/algorithms/connectivity/__init__.py
+++ b/networkx/algorithms/connectivity/__init__.py
@@ -19,4 +19,4 @@ __all__ = sum([connectivity.__all__,
kcutsets.__all__,
stoerwagner.__all__,
utils.__all__,
- ], [])
+ ], [])
diff --git a/networkx/algorithms/connectivity/cuts.py b/networkx/algorithms/connectivity/cuts.py
index bbcd7422..6cfb63e2 100644
--- a/networkx/algorithms/connectivity/cuts.py
+++ b/networkx/algorithms/connectivity/cuts.py
@@ -12,7 +12,7 @@ from networkx.algorithms.flow import build_residual_network
default_flow_func = edmonds_karp
from .utils import (build_auxiliary_node_connectivity,
- build_auxiliary_edge_connectivity)
+ build_auxiliary_edge_connectivity)
__author__ = '\n'.join(['Jordi Torrents <jtorrents@milnou.net>'])
@@ -147,7 +147,7 @@ def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None,
cut_value, partition = nx.minimum_cut(H, s, t, **kwargs)
reachable, non_reachable = partition
- # Any edge in the original graph linking the two sets in the
+ # Any edge in the original graph linking the two sets in the
# partition is part of the edge cutset
cutset = set()
for u, nbrs in ((n, G[n]) for n in reachable):
@@ -406,6 +406,7 @@ def minimum_node_cut(G, s=None, t=None, flow_func=None):
if not nx.is_weakly_connected(G):
raise nx.NetworkXError('Input graph is not connected')
iter_func = itertools.permutations
+
def neighbors(v):
return itertools.chain.from_iterable([G.predecessors(v),
G.successors(v)])
@@ -565,17 +566,17 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None):
n = len(nodes)
for i in range(n):
try:
- this_cut = minimum_st_edge_cut(H, nodes[i], nodes[i+1], **kwargs)
+ this_cut = minimum_st_edge_cut(H, nodes[i], nodes[i + 1], **kwargs)
if len(this_cut) <= len(min_cut):
min_cut = this_cut
- except IndexError: # Last node!
+ except IndexError: # Last node!
this_cut = minimum_st_edge_cut(H, nodes[i], nodes[0], **kwargs)
if len(this_cut) <= len(min_cut):
min_cut = this_cut
return min_cut
- else: # undirected
+ else: # undirected
# Based on algorithm 6 in [1]
if not nx.is_connected(G):
raise nx.NetworkXError('Input graph is not connected')
diff --git a/networkx/algorithms/connectivity/disjoint_paths.py b/networkx/algorithms/connectivity/disjoint_paths.py
index 1a223d56..d78316f6 100644
--- a/networkx/algorithms/connectivity/disjoint_paths.py
+++ b/networkx/algorithms/connectivity/disjoint_paths.py
@@ -24,7 +24,7 @@ from .utils import build_auxiliary_edge_connectivity
try:
from itertools import filterfalse as _filterfalse
-except ImportError: # Python 2
+except ImportError: # Python 2
def _filterfalse(predicate, iterable):
# https://docs.python.org/3/library/itertools.html
# filterfalse(lambda x: x%2, range(10)) --> 0 2 4 6 8
@@ -178,7 +178,7 @@ def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None,
if flow_func is None:
flow_func = default_flow_func
-
+
if auxiliary is None:
H = build_auxiliary_edge_connectivity(G)
else:
@@ -249,7 +249,7 @@ def node_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None,
Node dijoint paths are paths that only share their first and last
nodes. The number of node independent paths between two nodes is
equal to their local node connectivity.
-
+
Parameters
----------
G : NetworkX graph
diff --git a/networkx/algorithms/connectivity/kcutsets.py b/networkx/algorithms/connectivity/kcutsets.py
index ac785c56..a647fdd1 100644
--- a/networkx/algorithms/connectivity/kcutsets.py
+++ b/networkx/algorithms/connectivity/kcutsets.py
@@ -19,6 +19,7 @@ __author__ = '\n'.join(['Jordi Torrents <jtorrents@milnou.net>'])
__all__ = ['all_node_cuts']
+
def all_node_cuts(G, k=None, flow_func=None):
r"""Returns all minimum k cutsets of an undirected graph G.
@@ -26,7 +27,7 @@ def all_node_cuts(G, k=None, flow_func=None):
minimum-size node cut-sets of an undirected graph G; ie the set (or sets)
of nodes of cardinality equal to the node connectivity of G. Thus if
removed, would break G into two or more connected components.
-
+
Parameters
----------
G : NetworkX graph
@@ -41,7 +42,7 @@ def all_node_cuts(G, k=None, flow_func=None):
edmonds_karp. This function performs better in sparse graphs with
right tailed degree distributions. shortest_augmenting_path will
perform better in denser graphs.
-
+
Returns
-------
@@ -100,13 +101,13 @@ def all_node_cuts(G, k=None, flow_func=None):
return
# For complete Graphs
if nx.density(G) == 1:
- for cut_set in combinations(G, len(G)-1):
+ for cut_set in combinations(G, len(G) - 1):
yield set(cut_set)
return
# Initialize data structures.
# Keep track of the cuts already computed so we do not repeat them.
seen = []
- # Even-Tarjan reduction is what we call auxiliary digraph
+ # Even-Tarjan reduction is what we call auxiliary digraph
# for node connectivity.
H = build_auxiliary_node_connectivity(G)
mapping = H.graph['mapping']
@@ -121,7 +122,7 @@ def all_node_cuts(G, k=None, flow_func=None):
# step 1: Find node connectivity k of G
if k is None:
k = nx.node_connectivity(G, flow_func=flow_func)
- # step 2:
+ # step 2:
# Find k nodes with top degree, call it X:
X = {n for n, d in sorted(G.degree(), key=itemgetter(1), reverse=True)[:k]}
# Check if X is a k-node-cutset
@@ -140,12 +141,12 @@ def all_node_cuts(G, k=None, flow_func=None):
flow_value = R.graph['flow_value']
if flow_value == k:
- ## Remove saturated edges form the residual network
+ # Remove saturated edges form the residual network
saturated_edges = [(u, w, d) for (u, w, d) in
- R.edges(data=True)
- if d['capacity'] == d['flow']]
+ R.edges(data=True)
+ if d['capacity'] == d['flow']]
R.remove_edges_from(saturated_edges)
- # step 6: shrink the strongly connected components of
+ # step 6: shrink the strongly connected components of
# residual flow network R and call it L
L = nx.condensation(R)
cmap = L.graph['mapping']
@@ -170,7 +171,7 @@ def all_node_cuts(G, k=None, flow_func=None):
seen.append(node_cut)
# Add an edge (x, v) to make sure that we do not
# find this cutset again. This is equivalent
- # of adding the edge in the input graph
+ # of adding the edge in the input graph
# G.add_edge(x, v) and then regenerate H and R:
# Add edges to the auxiliary digraph.
H.add_edge('%sB' % mapping[x], '%sA' % mapping[v],
diff --git a/networkx/algorithms/connectivity/tests/test_connectivity.py b/networkx/algorithms/connectivity/tests/test_connectivity.py
index 254d3572..3f3f81f1 100644
--- a/networkx/algorithms/connectivity/tests/test_connectivity.py
+++ b/networkx/algorithms/connectivity/tests/test_connectivity.py
@@ -18,6 +18,8 @@ flow_funcs = [
msg = "Assertion failed in function: {0}"
# helper functions for tests
+
+
def _generate_no_biconnected(max_attempts=50):
attempts = 0
while True:
@@ -35,13 +37,13 @@ def _generate_no_biconnected(max_attempts=50):
def test_average_connectivity():
# figure 1 from:
- # Beineke, L., O. Oellermann, and R. Pippert (2002). The average
+ # Beineke, L., O. Oellermann, and R. Pippert (2002). The average
# connectivity of a graph. Discrete mathematics 252(1-3), 31-45
# http://www.sciencedirect.com/science/article/pii/S0012365X01001807
G1 = nx.path_graph(3)
- G1.add_edges_from([(1, 3),(1, 4)])
+ G1.add_edges_from([(1, 3), (1, 4)])
G2 = nx.path_graph(3)
- G2.add_edges_from([(1, 3),(1, 4),(0, 3),(0, 4),(3, 4)])
+ G2.add_edges_from([(1, 3), (1, 4), (0, 3), (0, 4), (3, 4)])
G3 = nx.Graph()
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
@@ -52,12 +54,14 @@ def test_average_connectivity():
assert_equal(nx.average_node_connectivity(G3, **kwargs), 0,
msg=msg.format(flow_func.__name__))
+
def test_average_connectivity_directed():
- G = nx.DiGraph([(1,3),(1,4),(1,5)])
+ G = nx.DiGraph([(1, 3), (1, 4), (1, 5)])
for flow_func in flow_funcs:
assert_equal(nx.average_node_connectivity(G), 0.25,
msg=msg.format(flow_func.__name__))
+
def test_articulation_points():
Ggen = _generate_no_biconnected()
for flow_func in flow_funcs:
@@ -66,6 +70,7 @@ def test_articulation_points():
assert_equal(nx.node_connectivity(G, flow_func=flow_func), 1,
msg=msg.format(flow_func.__name__))
+
def test_brandes_erlebach():
# Figure 1 chapter 7: Connectivity
# http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
@@ -83,11 +88,12 @@ def test_brandes_erlebach():
msg=msg.format(flow_func.__name__))
assert_equal(2, nx.node_connectivity(G, 1, 11, **kwargs),
msg=msg.format(flow_func.__name__))
- assert_equal(2, nx.edge_connectivity(G, **kwargs), # node 5 has degree 2
+ assert_equal(2, nx.edge_connectivity(G, **kwargs), # node 5 has degree 2
msg=msg.format(flow_func.__name__))
assert_equal(2, nx.node_connectivity(G, **kwargs),
msg=msg.format(flow_func.__name__))
+
def test_white_harary_1():
# Figure 1b white and harary (2001)
# # http://eclectic.ss.uci.edu/~drwhite/sm-w23.PDF
@@ -107,6 +113,7 @@ def test_white_harary_1():
assert_equal(3, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_white_harary_2():
# Figure 8 white and harary (2001)
# # http://eclectic.ss.uci.edu/~drwhite/sm-w23.PDF
@@ -120,21 +127,23 @@ def test_white_harary_2():
assert_equal(1, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_complete_graphs():
for n in range(5, 20, 5):
for flow_func in flow_funcs:
G = nx.complete_graph(n)
- assert_equal(n-1, nx.node_connectivity(G, flow_func=flow_func),
+ assert_equal(n - 1, nx.node_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
- assert_equal(n-1, nx.node_connectivity(G.to_directed(),
- flow_func=flow_func),
+ assert_equal(n - 1, nx.node_connectivity(G.to_directed(),
+ flow_func=flow_func),
msg=msg.format(flow_func.__name__))
- assert_equal(n-1, nx.edge_connectivity(G, flow_func=flow_func),
+ assert_equal(n - 1, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
- assert_equal(n-1, nx.edge_connectivity(G.to_directed(),
- flow_func=flow_func),
+ assert_equal(n - 1, nx.edge_connectivity(G.to_directed(),
+ flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_empty_graphs():
for k in range(5, 25, 5):
G = nx.empty_graph(k)
@@ -144,6 +153,7 @@ def test_empty_graphs():
assert_equal(0, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_petersen():
G = nx.petersen_graph()
for flow_func in flow_funcs:
@@ -152,6 +162,7 @@ def test_petersen():
assert_equal(3, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_tutte():
G = nx.tutte_graph()
for flow_func in flow_funcs:
@@ -160,6 +171,7 @@ def test_tutte():
assert_equal(3, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_dodecahedral():
G = nx.dodecahedral_graph()
for flow_func in flow_funcs:
@@ -168,46 +180,53 @@ def test_dodecahedral():
assert_equal(3, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_octahedral():
- G=nx.octahedral_graph()
+ G = nx.octahedral_graph()
for flow_func in flow_funcs:
assert_equal(4, nx.node_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
assert_equal(4, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_icosahedral():
- G=nx.icosahedral_graph()
+ G = nx.icosahedral_graph()
for flow_func in flow_funcs:
assert_equal(5, nx.node_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
assert_equal(5, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_missing_source():
G = nx.path_graph(4)
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, nx.node_connectivity, G, 10, 1,
flow_func=flow_func)
+
def test_missing_target():
G = nx.path_graph(4)
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, nx.node_connectivity, G, 1, 10,
flow_func=flow_func)
+
def test_edge_missing_source():
G = nx.path_graph(4)
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, nx.edge_connectivity, G, 10, 1,
flow_func=flow_func)
+
def test_edge_missing_target():
G = nx.path_graph(4)
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, nx.edge_connectivity, G, 1, 10,
flow_func=flow_func)
+
def test_not_weakly_connected():
G = nx.DiGraph()
nx.add_path(G, [1, 2, 3])
@@ -218,6 +237,7 @@ def test_not_weakly_connected():
assert_equal(nx.edge_connectivity(G), 0,
msg=msg.format(flow_func.__name__))
+
def test_not_connected():
G = nx.Graph()
nx.add_path(G, [1, 2, 3])
@@ -228,9 +248,10 @@ def test_not_connected():
assert_equal(nx.edge_connectivity(G), 0,
msg=msg.format(flow_func.__name__))
+
def test_directed_edge_connectivity():
- G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction
- D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges
+ G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction
+ D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges
for flow_func in flow_funcs:
assert_equal(1, nx.edge_connectivity(G, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
@@ -245,6 +266,7 @@ def test_directed_edge_connectivity():
assert_equal(2, nx.edge_connectivity(D, 1, 4, flow_func=flow_func),
msg=msg.format(flow_func.__name__))
+
def test_cutoff():
G = nx.complete_graph(5)
for local_func in [local_edge_connectivity, local_node_connectivity]:
@@ -257,21 +279,25 @@ def test_cutoff():
assert_equal(cutoff, result,
msg="cutoff error in {0}".format(flow_func.__name__))
+
def test_invalid_auxiliary():
G = nx.complete_graph(5)
assert_raises(nx.NetworkXError, local_node_connectivity, G, 0, 3,
auxiliary=G)
+
def test_interface_only_source():
G = nx.complete_graph(5)
for interface_func in [nx.node_connectivity, nx.edge_connectivity]:
assert_raises(nx.NetworkXError, interface_func, G, s=0)
+
def test_interface_only_target():
G = nx.complete_graph(5)
for interface_func in [nx.node_connectivity, nx.edge_connectivity]:
assert_raises(nx.NetworkXError, interface_func, G, t=3)
+
def test_edge_connectivity_flow_vs_stoer_wagner():
graph_funcs = [
nx.icosahedral_graph,
@@ -296,8 +322,8 @@ class TestAllPairsNodeConnectivity:
self.K10 = nx.complete_graph(10)
self.K5 = nx.complete_graph(5)
self.G_list = [self.path, self.directed_path, self.cycle,
- self.directed_cycle, self.gnp, self.directed_gnp, self.K10,
- self.K5, self.K20]
+ self.directed_cycle, self.gnp, self.directed_gnp, self.K10,
+ self.K5, self.K20]
def test_cycles(self):
K_undir = nx.all_pairs_node_connectivity(self.cycle)
@@ -314,7 +340,7 @@ class TestAllPairsNodeConnectivity:
K = nx.all_pairs_node_connectivity(G)
for source in K:
for target, k in K[source].items():
- assert_true(k == len(G)-1)
+ assert_true(k == len(G) - 1)
def test_paths(self):
K_undir = nx.all_pairs_node_connectivity(self.path)
@@ -345,7 +371,7 @@ class TestAllPairsNodeConnectivity:
nodes = [0, 1, 2, 3]
nx.add_path(G, nodes)
A = {n: {} for n in G}
- for u, v in itertools.combinations(nodes,2):
+ for u, v in itertools.combinations(nodes, 2):
A[u][v] = A[v][u] = nx.node_connectivity(G, u, v)
C = nx.all_pairs_node_connectivity(G)
assert_equal(sorted((k, sorted(v)) for k, v in A.items()),
diff --git a/networkx/algorithms/connectivity/tests/test_cuts.py b/networkx/algorithms/connectivity/tests/test_cuts.py
index 70b9a97e..8eeb2943 100644
--- a/networkx/algorithms/connectivity/tests/test_cuts.py
+++ b/networkx/algorithms/connectivity/tests/test_cuts.py
@@ -17,20 +17,23 @@ flow_funcs = [
msg = "Assertion failed in function: {0}"
# Tests for node and edge cutsets
+
+
def _generate_no_biconnected(max_attempts=50):
attempts = 0
while True:
- G = nx.fast_gnp_random_graph(100,0.0575)
+ G = nx.fast_gnp_random_graph(100, 0.0575)
if nx.is_connected(G) and not nx.is_biconnected(G):
attempts = 0
yield G
else:
if attempts >= max_attempts:
- msg = "Tried %d times: no suitable Graph."%attempts
+ msg = "Tried %d times: no suitable Graph." % attempts
raise Exception(msg % max_attempts)
else:
attempts += 1
-
+
+
def test_articulation_points():
Ggen = _generate_no_biconnected()
for flow_func in flow_funcs:
@@ -41,6 +44,7 @@ def test_articulation_points():
assert_true(cut.pop() in set(nx.articulation_points(G)),
msg=msg.format(flow_func.__name__))
+
def test_brandes_erlebach_book():
# Figure 1 chapter 7: Connectivity
# http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
@@ -70,6 +74,7 @@ def test_brandes_erlebach_book():
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
+
def test_white_harary_paper():
# Figure 1b white and harary (2001)
# http://eclectic.ss.uci.edu/~drwhite/sm-w23.PDF
@@ -77,12 +82,12 @@ def test_white_harary_paper():
# (node connectivity)
G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
G.remove_node(7)
- for i in range(4,7):
- G.add_edge(0,i)
+ for i in range(4, 7):
+ G.add_edge(0, i)
G = nx.disjoint_union(G, nx.complete_graph(4))
- G.remove_node(G.order()-1)
- for i in range(7,10):
- G.add_edge(0,i)
+ G.remove_node(G.order() - 1)
+ for i in range(7, 10):
+ G.add_edge(0, i)
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge cuts
@@ -98,6 +103,7 @@ def test_white_harary_paper():
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
+
def test_petersen_cutset():
G = nx.petersen_graph()
for flow_func in flow_funcs:
@@ -115,8 +121,9 @@ def test_petersen_cutset():
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
+
def test_octahedral_cutset():
- G=nx.octahedral_graph()
+ G = nx.octahedral_graph()
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge cuts
@@ -132,8 +139,9 @@ def test_octahedral_cutset():
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
+
def test_icosahedral_cutset():
- G=nx.icosahedral_graph()
+ G = nx.icosahedral_graph()
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge cuts
@@ -149,12 +157,14 @@ def test_icosahedral_cutset():
H.remove_nodes_from(node_cut)
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
+
def test_node_cutset_exception():
- G=nx.Graph()
+ G = nx.Graph()
G.add_edges_from([(1, 2), (3, 4)])
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, nx.minimum_node_cut, G, flow_func=flow_func)
+
def test_node_cutset_random_graphs():
for flow_func in flow_funcs:
for i in range(3):
@@ -169,6 +179,7 @@ def test_node_cutset_random_graphs():
G.remove_nodes_from(cutset)
assert_false(nx.is_connected(G), msg=msg.format(flow_func.__name__))
+
def test_edge_cutset_random_graphs():
for flow_func in flow_funcs:
for i in range(3):
@@ -183,6 +194,7 @@ def test_edge_cutset_random_graphs():
G.remove_edges_from(cutset)
assert_false(nx.is_connected(G), msg=msg.format(flow_func.__name__))
+
def test_empty_graphs():
G = nx.Graph()
D = nx.DiGraph()
@@ -193,11 +205,13 @@ def test_empty_graphs():
assert_raises(nx.NetworkXPointlessConcept, interface_func, D,
flow_func=flow_func)
+
def test_unbounded():
G = nx.complete_graph(5)
for flow_func in flow_funcs:
assert_equal(4, len(minimum_st_edge_cut(G, 1, 4, flow_func=flow_func)))
+
def test_missing_source():
G = nx.path_graph(4)
for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
@@ -205,6 +219,7 @@ def test_missing_source():
assert_raises(nx.NetworkXError, interface_func, G, 10, 1,
flow_func=flow_func)
+
def test_missing_target():
G = nx.path_graph(4)
for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
@@ -212,6 +227,7 @@ def test_missing_target():
assert_raises(nx.NetworkXError, interface_func, G, 1, 10,
flow_func=flow_func)
+
def test_not_weakly_connected():
G = nx.DiGraph()
nx.add_path(G, [1, 2, 3])
@@ -221,6 +237,7 @@ def test_not_weakly_connected():
assert_raises(nx.NetworkXError, interface_func, G,
flow_func=flow_func)
+
def test_not_connected():
G = nx.Graph()
nx.add_path(G, [1, 2, 3])
@@ -230,12 +247,14 @@ def test_not_connected():
assert_raises(nx.NetworkXError, interface_func, G,
flow_func=flow_func)
+
def tests_min_cut_complete():
G = nx.complete_graph(5)
for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
for flow_func in flow_funcs:
assert_equal(4, len(interface_func(G, flow_func=flow_func)))
+
def tests_min_cut_complete_directed():
G = nx.complete_graph(5)
G = G.to_directed()
@@ -243,6 +262,7 @@ def tests_min_cut_complete_directed():
for flow_func in flow_funcs:
assert_equal(4, len(interface_func(G, flow_func=flow_func)))
+
def tests_minimum_st_node_cut():
G = nx.Graph()
G.add_nodes_from([0, 1, 2, 3, 7, 8, 11, 12])
@@ -250,16 +270,19 @@ def tests_minimum_st_node_cut():
nodelist = minimum_st_node_cut(G, 7, 11)
assert(nodelist == [])
+
def test_invalid_auxiliary():
G = nx.complete_graph(5)
assert_raises(nx.NetworkXError, minimum_st_node_cut, G, 0, 3,
- auxiliary=G)
+ auxiliary=G)
+
def test_interface_only_source():
G = nx.complete_graph(5)
for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]:
assert_raises(nx.NetworkXError, interface_func, G, s=0)
+
def test_interface_only_target():
G = nx.complete_graph(5)
for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]:
diff --git a/networkx/algorithms/connectivity/tests/test_disjoint_paths.py b/networkx/algorithms/connectivity/tests/test_disjoint_paths.py
index 5160b900..fd46ed00 100644
--- a/networkx/algorithms/connectivity/tests/test_disjoint_paths.py
+++ b/networkx/algorithms/connectivity/tests/test_disjoint_paths.py
@@ -140,7 +140,7 @@ def test_petersen_disjoint_paths():
def test_octahedral_disjoint_paths():
- G=nx.octahedral_graph()
+ G = nx.octahedral_graph()
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge disjoint paths
@@ -154,7 +154,7 @@ def test_octahedral_disjoint_paths():
def test_icosahedral_disjoint_paths():
- G=nx.icosahedral_graph()
+ G = nx.icosahedral_graph()
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
# edge disjoint paths
@@ -168,7 +168,7 @@ def test_icosahedral_disjoint_paths():
def test_cutoff_disjoint_paths():
- G=nx.icosahedral_graph()
+ G = nx.icosahedral_graph()
for flow_func in flow_funcs:
kwargs = dict(flow_func=flow_func)
for cutoff in [2, 4]:
diff --git a/networkx/algorithms/connectivity/tests/test_edge_augmentation.py b/networkx/algorithms/connectivity/tests/test_edge_augmentation.py
index 4cf1c5eb..356ed5dc 100644
--- a/networkx/algorithms/connectivity/tests/test_edge_augmentation.py
+++ b/networkx/algorithms/connectivity/tests/test_edge_augmentation.py
@@ -29,7 +29,7 @@ def tarjan_bridge_graph():
# doi:10.1016/0020-0190(74)90003-9.
# define 2-connected components and bridges
ccs = [(1, 2, 4, 3, 1, 4), (5, 6, 7, 5), (8, 9, 10, 8),
- (17, 18, 16, 15, 17), (11, 12, 14, 13, 11, 14)]
+ (17, 18, 16, 15, 17), (11, 12, 14, 13, 11, 14)]
bridges = [(4, 8), (3, 5), (3, 17)]
G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges)))
return G
diff --git a/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py b/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py
index 05d4866e..f1ddaef9 100644
--- a/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py
+++ b/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py
@@ -172,7 +172,7 @@ def test_not_implemented():
def test_general_k_edge_subgraph_quick_return():
- #tests quick return optimization
+ # tests quick return optimization
G = nx.Graph()
G.add_node(0)
subgraphs = list(general_k_edge_subgraphs(G, k=1))
@@ -230,7 +230,7 @@ def test_tarjan_bridge():
# doi:10.1016/0020-0190(74)90003-9.
# define 2-connected components and bridges
ccs = [(1, 2, 4, 3, 1, 4), (5, 6, 7, 5), (8, 9, 10, 8),
- (17, 18, 16, 15, 17), (11, 12, 14, 13, 11, 14)]
+ (17, 18, 16, 15, 17), (11, 12, 14, 13, 11, 14)]
bridges = [(4, 8), (3, 5), (3, 17)]
G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges)))
_check_edge_connectivity(G)
diff --git a/networkx/algorithms/connectivity/tests/test_stoer_wagner.py b/networkx/algorithms/connectivity/tests/test_stoer_wagner.py
index 8e9299ff..ee5d3ac2 100644
--- a/networkx/algorithms/connectivity/tests/test_stoer_wagner.py
+++ b/networkx/algorithms/connectivity/tests/test_stoer_wagner.py
@@ -33,27 +33,27 @@ def _test_stoer_wagner(G, answer, weight='weight'):
def test_graph1():
G = nx.Graph()
- G.add_edge('x','a', weight=3)
- G.add_edge('x','b', weight=1)
- G.add_edge('a','c', weight=3)
- G.add_edge('b','c', weight=5)
- G.add_edge('b','d', weight=4)
- G.add_edge('d','e', weight=2)
- G.add_edge('c','y', weight=2)
- G.add_edge('e','y', weight=3)
+ G.add_edge('x', 'a', weight=3)
+ G.add_edge('x', 'b', weight=1)
+ G.add_edge('a', 'c', weight=3)
+ G.add_edge('b', 'c', weight=5)
+ G.add_edge('b', 'd', weight=4)
+ G.add_edge('d', 'e', weight=2)
+ G.add_edge('c', 'y', weight=2)
+ G.add_edge('e', 'y', weight=3)
_test_stoer_wagner(G, 4)
def test_graph2():
G = nx.Graph()
- G.add_edge('x','a')
- G.add_edge('x','b')
- G.add_edge('a','c')
- G.add_edge('b','c')
- G.add_edge('b','d')
- G.add_edge('d','e')
- G.add_edge('c','y')
- G.add_edge('e','y')
+ G.add_edge('x', 'a')
+ G.add_edge('x', 'b')
+ G.add_edge('a', 'c')
+ G.add_edge('b', 'c')
+ G.add_edge('b', 'd')
+ G.add_edge('d', 'e')
+ G.add_edge('c', 'y')
+ G.add_edge('e', 'y')
_test_stoer_wagner(G, 2)
diff --git a/networkx/algorithms/connectivity/utils.py b/networkx/algorithms/connectivity/utils.py
index 31714e85..d6582ed8 100644
--- a/networkx/algorithms/connectivity/utils.py
+++ b/networkx/algorithms/connectivity/utils.py
@@ -43,7 +43,7 @@ def build_auxiliary_node_connectivity(G):
mapping = {}
H = nx.DiGraph()
-
+
for i, node in enumerate(G):
mapping[node] = i
H.add_node('%dA' % i, id=node)
diff --git a/networkx/algorithms/covering.py b/networkx/algorithms/covering.py
index f816c3cc..2bfce3e4 100644
--- a/networkx/algorithms/covering.py
+++ b/networkx/algorithms/covering.py
@@ -73,7 +73,7 @@ def min_edge_cover(G, matching_algorithm=None):
maximum_matching = matching_algorithm(G)
# ``min_cover`` is superset of ``maximum_matching``
try:
- min_cover = set(maximum_matching.items()) # bipartite matching case returns dict
+ min_cover = set(maximum_matching.items()) # bipartite matching case returns dict
except AttributeError:
min_cover = maximum_matching
# iterate for uncovered nodes
diff --git a/networkx/algorithms/cycles.py b/networkx/algorithms/cycles.py
index 4929f93e..36662c61 100644
--- a/networkx/algorithms/cycles.py
+++ b/networkx/algorithms/cycles.py
@@ -26,7 +26,7 @@ __all__ = [
'cycle_basis', 'simple_cycles',
'recursive_simple_cycles', 'find_cycle',
'minimum_cycle_basis',
- ]
+]
@not_implemented_for('directed')
@@ -521,7 +521,7 @@ def _min_cycle_basis(comp, weight):
# *minimum* spanning tree. That is why we call the next function with
# weight=None. Depending on implementation, it may be faster as well
spanning_tree_edges = list(nx.minimum_spanning_edges(comp, weight=None,
- data=False))
+ data=False))
edges_excl = [frozenset(e) for e in comp.edges()
if e not in spanning_tree_edges]
N = len(edges_excl)
diff --git a/networkx/algorithms/dag.py b/networkx/algorithms/dag.py
index 7264a05f..b4c66ca5 100644
--- a/networkx/algorithms/dag.py
+++ b/networkx/algorithms/dag.py
@@ -253,7 +253,7 @@ def lexicographical_topological_sort(G, key=None):
"Topological sort not defined on undirected graphs.")
if key is None:
- key = lambda x: x
+ def key(x): return x
def create_tuple(node):
return key(node), node
diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py
index 7c2d1688..9b645523 100644
--- a/networkx/algorithms/distance_measures.py
+++ b/networkx/algorithms/distance_measures.py
@@ -154,13 +154,13 @@ def extrema_bounding(G, compute="diameter"):
# updating maxuppernode and minlowernode for selection in next round
for i in candidates:
if minlowernode is None \
- or (ecc_lower[i] == ecc_lower[minlowernode] \
+ or (ecc_lower[i] == ecc_lower[minlowernode]
and degrees[i] > degrees[minlowernode]) \
or (ecc_lower[i] < ecc_lower[minlowernode]):
minlowernode = i
if maxuppernode is None \
- or (ecc_upper[i] == ecc_upper[maxuppernode] \
+ or (ecc_upper[i] == ecc_upper[maxuppernode]
and degrees[i] > degrees[maxuppernode]) \
or (ecc_upper[i] > ecc_upper[maxuppernode]):
maxuppernode = i
diff --git a/networkx/algorithms/flow/capacityscaling.py b/networkx/algorithms/flow/capacityscaling.py
index b184721b..3f5be9f7 100644
--- a/networkx/algorithms/flow/capacityscaling.py
+++ b/networkx/algorithms/flow/capacityscaling.py
@@ -18,6 +18,7 @@ from ...utils import generate_unique_node
from ...utils import not_implemented_for
from ...utils import arbitrary_element
+
def _detect_unboundedness(R):
"""Detect infinite-capacity negative cycles.
"""
@@ -265,7 +266,7 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight',
flow_cost = sum(
0 if e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0
else e[capacity] * e[weight]
- for u, v, e in nx.selfloop_edges(G,data=True))
+ for u, v, e in nx.selfloop_edges(G, data=True))
# Determine the maxmimum edge capacity.
wmax = max(chain([-inf],
diff --git a/networkx/algorithms/flow/gomory_hu.py b/networkx/algorithms/flow/gomory_hu.py
index fec99a4b..7ef3d206 100644
--- a/networkx/algorithms/flow/gomory_hu.py
+++ b/networkx/algorithms/flow/gomory_hu.py
@@ -30,7 +30,7 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None):
A Gomory-Hu tree of an undirected graph with capacities is a
weighted tree that represents the minimum s-t cuts for all s-t
pairs in the graph.
-
+
It only requires `n-1` minimum cut computations instead of the
obvious `n(n-1)/2`. The tree represents all s-t cuts as the
minimum cut value among any pair of nodes is the minimum edge
@@ -44,7 +44,7 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None):
cut.
See Examples section below for details.
-
+
Parameters
----------
G : NetworkX graph
@@ -143,7 +143,7 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None):
if flow_func is None:
flow_func = default_flow_func
- if len(G) == 0: # empty graph
+ if len(G) == 0: # empty graph
msg = 'Empty Graph does not have a Gomory-Hu tree representation'
raise nx.NetworkXError(msg)
@@ -164,8 +164,8 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None):
target = tree[source]
# compute minimum cut
cut_value, partition = nx.minimum_cut(G, source, target,
- capacity=capacity, flow_func=flow_func,
- residual=R)
+ capacity=capacity, flow_func=flow_func,
+ residual=R)
labels[(source, target)] = cut_value
# Update the tree
# Source will always be in partition[0] and target in partition[1]
diff --git a/networkx/algorithms/flow/mincost.py b/networkx/algorithms/flow/mincost.py
index ad461491..44cdc455 100644
--- a/networkx/algorithms/flow/mincost.py
+++ b/networkx/algorithms/flow/mincost.py
@@ -17,8 +17,8 @@ __all__ = ['min_cost_flow_cost',
import networkx as nx
-def min_cost_flow_cost(G, demand = 'demand', capacity = 'capacity',
- weight = 'weight'):
+def min_cost_flow_cost(G, demand='demand', capacity='capacity',
+ weight='weight'):
r"""Find the cost of a minimum cost flow satisfying all demands in digraph G.
G is a digraph with edge costs and capacities and in which nodes
@@ -105,12 +105,12 @@ def min_cost_flow_cost(G, demand = 'demand', capacity = 'capacity',
>>> flowCost
24
"""
- return nx.network_simplex(G, demand = demand, capacity = capacity,
- weight = weight)[0]
+ return nx.network_simplex(G, demand=demand, capacity=capacity,
+ weight=weight)[0]
-def min_cost_flow(G, demand = 'demand', capacity = 'capacity',
- weight = 'weight'):
+def min_cost_flow(G, demand='demand', capacity='capacity',
+ weight='weight'):
r"""Return a minimum cost flow satisfying all demands in digraph G.
G is a digraph with edge costs and capacities and in which nodes
@@ -196,11 +196,11 @@ def min_cost_flow(G, demand = 'demand', capacity = 'capacity',
>>> G.add_edge('c', 'd', weight = 2, capacity = 5)
>>> flowDict = nx.min_cost_flow(G)
"""
- return nx.network_simplex(G, demand = demand, capacity = capacity,
- weight = weight)[1]
+ return nx.network_simplex(G, demand=demand, capacity=capacity,
+ weight=weight)[1]
-def cost_of_flow(G, flowDict, weight = 'weight'):
+def cost_of_flow(G, flowDict, weight='weight'):
"""Compute the cost of the flow given by flowDict on graph G.
Note that this function does not check for the validity of the
@@ -242,10 +242,10 @@ def cost_of_flow(G, flowDict, weight = 'weight'):
constant factor (eg 100).
"""
return sum((flowDict[u][v] * d.get(weight, 0)
- for u, v, d in G.edges(data = True)))
+ for u, v, d in G.edges(data=True)))
-def max_flow_min_cost(G, s, t, capacity = 'capacity', weight = 'weight'):
+def max_flow_min_cost(G, s, t, capacity='capacity', weight='weight'):
"""Return a maximum (s, t)-flow of minimum cost.
G is a digraph with edge costs and capacities. There is a source
@@ -336,8 +336,8 @@ def max_flow_min_cost(G, s, t, capacity = 'capacity', weight = 'weight'):
True
"""
- maxFlow = nx.maximum_flow_value(G, s, t, capacity = capacity)
+ maxFlow = nx.maximum_flow_value(G, s, t, capacity=capacity)
H = nx.DiGraph(G)
- H.add_node(s, demand = -maxFlow)
- H.add_node(t, demand = maxFlow)
- return min_cost_flow(H, capacity = capacity, weight = weight)
+ H.add_node(s, demand=-maxFlow)
+ H.add_node(t, demand=maxFlow)
+ return min_cost_flow(H, capacity=capacity, weight=weight)
diff --git a/networkx/algorithms/flow/networksimplex.py b/networkx/algorithms/flow/networksimplex.py
index 9bfb06b0..8ac5bb7a 100644
--- a/networkx/algorithms/flow/networksimplex.py
+++ b/networkx/algorithms/flow/networksimplex.py
@@ -311,7 +311,7 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'):
B = int(ceil(sqrt(e))) # pivot block size
M = (e + B - 1) // B # number of blocks needed to cover all edges
m = 0 # number of consecutive blocks without eligible
- # entering edges
+ # entering edges
f = 0 # first edge in block
while m < M:
# Determine the next block of edges.
@@ -392,7 +392,6 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'):
We += WeR
return Wn, We
-
def residual_capacity(i, p):
"""Return the residual capacity of an edge i in the direction away
from its endpoint p.
diff --git a/networkx/algorithms/flow/tests/test_gomory_hu.py b/networkx/algorithms/flow/tests/test_gomory_hu.py
index 3b66553a..4ce2c750 100644
--- a/networkx/algorithms/flow/tests/test_gomory_hu.py
+++ b/networkx/algorithms/flow/tests/test_gomory_hu.py
@@ -16,6 +16,7 @@ flow_funcs = [
shortest_augmenting_path,
]
+
class TestGomoryHuTree:
def minimum_edge_weight(self, T, u, v):
diff --git a/networkx/algorithms/flow/tests/test_maxflow.py b/networkx/algorithms/flow/tests/test_maxflow.py
index aa32c3bf..6cc63a6d 100644
--- a/networkx/algorithms/flow/tests/test_maxflow.py
+++ b/networkx/algorithms/flow/tests/test_maxflow.py
@@ -60,7 +60,7 @@ def validate_cuts(G, s, t, solnValue, partition, capacity, flow_func):
assert_true(all(G.has_edge(u, v) for (u, v) in cutset),
msg=msg.format(flow_func.__name__))
assert_equal(solnValue, sum(G[u][v][capacity] for (u, v) in cutset),
- msg=msg.format(flow_func.__name__))
+ msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_edges_from(cutset)
if not G.is_directed():
@@ -89,7 +89,7 @@ class TestMaxflowMinCutCommon:
def test_graph1(self):
# Trivial undirected graph
G = nx.Graph()
- G.add_edge(1,2, capacity = 1.0)
+ G.add_edge(1, 2, capacity=1.0)
solnFlows = {1: {2: 1.0},
2: {1: 1.0}}
@@ -100,14 +100,14 @@ class TestMaxflowMinCutCommon:
# A more complex undirected graph
# adapted from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow
G = nx.Graph()
- G.add_edge('x','a', capacity = 3.0)
- G.add_edge('x','b', capacity = 1.0)
- G.add_edge('a','c', capacity = 3.0)
- G.add_edge('b','c', capacity = 5.0)
- G.add_edge('b','d', capacity = 4.0)
- G.add_edge('d','e', capacity = 2.0)
- G.add_edge('c','y', capacity = 2.0)
- G.add_edge('e','y', capacity = 3.0)
+ G.add_edge('x', 'a', capacity=3.0)
+ G.add_edge('x', 'b', capacity=1.0)
+ G.add_edge('a', 'c', capacity=3.0)
+ G.add_edge('b', 'c', capacity=5.0)
+ G.add_edge('b', 'd', capacity=4.0)
+ G.add_edge('d', 'e', capacity=2.0)
+ G.add_edge('c', 'y', capacity=2.0)
+ G.add_edge('e', 'y', capacity=3.0)
H = {'x': {'a': 3, 'b': 1},
'a': {'c': 3, 'x': 3},
@@ -122,11 +122,11 @@ class TestMaxflowMinCutCommon:
def test_digraph1(self):
# The classic directed graph example
G = nx.DiGraph()
- G.add_edge('a','b', capacity = 1000.0)
- G.add_edge('a','c', capacity = 1000.0)
- G.add_edge('b','c', capacity = 1.0)
- G.add_edge('b','d', capacity = 1000.0)
- G.add_edge('c','d', capacity = 1000.0)
+ G.add_edge('a', 'b', capacity=1000.0)
+ G.add_edge('a', 'c', capacity=1000.0)
+ G.add_edge('b', 'c', capacity=1.0)
+ G.add_edge('b', 'd', capacity=1000.0)
+ G.add_edge('c', 'd', capacity=1000.0)
H = {'a': {'b': 1000.0, 'c': 1000.0},
'b': {'c': 0, 'd': 1000.0},
@@ -138,12 +138,12 @@ class TestMaxflowMinCutCommon:
def test_digraph2(self):
# An example in which some edges end up with zero flow.
G = nx.DiGraph()
- G.add_edge('s', 'b', capacity = 2)
- G.add_edge('s', 'c', capacity = 1)
- G.add_edge('c', 'd', capacity = 1)
- G.add_edge('d', 'a', capacity = 1)
- G.add_edge('b', 'a', capacity = 2)
- G.add_edge('a', 't', capacity = 2)
+ G.add_edge('s', 'b', capacity=2)
+ G.add_edge('s', 'c', capacity=1)
+ G.add_edge('c', 'd', capacity=1)
+ G.add_edge('d', 'a', capacity=1)
+ G.add_edge('b', 'a', capacity=2)
+ G.add_edge('a', 't', capacity=2)
H = {'s': {'b': 2, 'c': 0},
'c': {'d': 0},
@@ -157,16 +157,16 @@ class TestMaxflowMinCutCommon:
def test_digraph3(self):
# A directed graph example from Cormen et al.
G = nx.DiGraph()
- G.add_edge('s','v1', capacity = 16.0)
- G.add_edge('s','v2', capacity = 13.0)
- G.add_edge('v1','v2', capacity = 10.0)
- G.add_edge('v2','v1', capacity = 4.0)
- G.add_edge('v1','v3', capacity = 12.0)
- G.add_edge('v3','v2', capacity = 9.0)
- G.add_edge('v2','v4', capacity = 14.0)
- G.add_edge('v4','v3', capacity = 7.0)
- G.add_edge('v3','t', capacity = 20.0)
- G.add_edge('v4','t', capacity = 4.0)
+ G.add_edge('s', 'v1', capacity=16.0)
+ G.add_edge('s', 'v2', capacity=13.0)
+ G.add_edge('v1', 'v2', capacity=10.0)
+ G.add_edge('v2', 'v1', capacity=4.0)
+ G.add_edge('v1', 'v3', capacity=12.0)
+ G.add_edge('v3', 'v2', capacity=9.0)
+ G.add_edge('v2', 'v4', capacity=14.0)
+ G.add_edge('v4', 'v3', capacity=7.0)
+ G.add_edge('v3', 't', capacity=20.0)
+ G.add_edge('v4', 't', capacity=4.0)
H = {'s': {'v1': 12.0, 'v2': 11.0},
'v2': {'v1': 0, 'v4': 11.0},
@@ -181,14 +181,14 @@ class TestMaxflowMinCutCommon:
# A more complex directed graph
# from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow
G = nx.DiGraph()
- G.add_edge('x','a', capacity = 3.0)
- G.add_edge('x','b', capacity = 1.0)
- G.add_edge('a','c', capacity = 3.0)
- G.add_edge('b','c', capacity = 5.0)
- G.add_edge('b','d', capacity = 4.0)
- G.add_edge('d','e', capacity = 2.0)
- G.add_edge('c','y', capacity = 2.0)
- G.add_edge('e','y', capacity = 3.0)
+ G.add_edge('x', 'a', capacity=3.0)
+ G.add_edge('x', 'b', capacity=1.0)
+ G.add_edge('a', 'c', capacity=3.0)
+ G.add_edge('b', 'c', capacity=5.0)
+ G.add_edge('b', 'd', capacity=4.0)
+ G.add_edge('d', 'e', capacity=2.0)
+ G.add_edge('c', 'y', capacity=2.0)
+ G.add_edge('e', 'y', capacity=3.0)
H = {'x': {'a': 2.0, 'b': 1.0},
'a': {'c': 2.0},
@@ -225,14 +225,14 @@ class TestMaxflowMinCutCommon:
def test_optional_capacity(self):
# Test optional capacity parameter.
G = nx.DiGraph()
- G.add_edge('x','a', spam = 3.0)
- G.add_edge('x','b', spam = 1.0)
- G.add_edge('a','c', spam = 3.0)
- G.add_edge('b','c', spam = 5.0)
- G.add_edge('b','d', spam = 4.0)
- G.add_edge('d','e', spam = 2.0)
- G.add_edge('c','y', spam = 2.0)
- G.add_edge('e','y', spam = 3.0)
+ G.add_edge('x', 'a', spam=3.0)
+ G.add_edge('x', 'b', spam=1.0)
+ G.add_edge('a', 'c', spam=3.0)
+ G.add_edge('b', 'c', spam=5.0)
+ G.add_edge('b', 'd', spam=4.0)
+ G.add_edge('d', 'e', spam=2.0)
+ G.add_edge('c', 'y', spam=2.0)
+ G.add_edge('e', 'y', spam=3.0)
solnFlows = {'x': {'a': 2.0, 'b': 1.0},
'a': {'c': 2.0},
@@ -245,16 +245,16 @@ class TestMaxflowMinCutCommon:
s = 'x'
t = 'y'
- compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity = 'spam')
+ compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity='spam')
def test_digraph_infcap_edges(self):
# DiGraph with infinite capacity edges
G = nx.DiGraph()
G.add_edge('s', 'a')
- G.add_edge('s', 'b', capacity = 30)
- G.add_edge('a', 'c', capacity = 25)
- G.add_edge('b', 'c', capacity = 12)
- G.add_edge('a', 't', capacity = 60)
+ G.add_edge('s', 'b', capacity=30)
+ G.add_edge('a', 'c', capacity=25)
+ G.add_edge('b', 'c', capacity=12)
+ G.add_edge('a', 't', capacity=60)
G.add_edge('c', 't')
H = {'s': {'a': 85, 'b': 12},
@@ -267,13 +267,13 @@ class TestMaxflowMinCutCommon:
# DiGraph with infinite capacity digon
G = nx.DiGraph()
- G.add_edge('s', 'a', capacity = 85)
- G.add_edge('s', 'b', capacity = 30)
+ G.add_edge('s', 'a', capacity=85)
+ G.add_edge('s', 'b', capacity=30)
G.add_edge('a', 'c')
G.add_edge('c', 'a')
- G.add_edge('b', 'c', capacity = 12)
- G.add_edge('a', 't', capacity = 60)
- G.add_edge('c', 't', capacity = 37)
+ G.add_edge('b', 'c', capacity=12)
+ G.add_edge('a', 't', capacity=60)
+ G.add_edge('c', 't', capacity=37)
H = {'s': {'a': 85, 'b': 12},
'a': {'c': 25, 't': 60},
@@ -283,15 +283,14 @@ class TestMaxflowMinCutCommon:
compare_flows_and_cuts(G, 's', 't', H, 97)
-
def test_digraph_infcap_path(self):
# Graph with infinite capacity (s, t)-path
G = nx.DiGraph()
G.add_edge('s', 'a')
- G.add_edge('s', 'b', capacity = 30)
+ G.add_edge('s', 'b', capacity=30)
G.add_edge('a', 'c')
- G.add_edge('b', 'c', capacity = 12)
- G.add_edge('a', 't', capacity = 60)
+ G.add_edge('b', 'c', capacity=12)
+ G.add_edge('a', 't', capacity=60)
G.add_edge('c', 't')
for flow_func in all_funcs:
@@ -302,10 +301,10 @@ class TestMaxflowMinCutCommon:
# Undirected graph with infinite capacity edges
G = nx.Graph()
G.add_edge('s', 'a')
- G.add_edge('s', 'b', capacity = 30)
- G.add_edge('a', 'c', capacity = 25)
- G.add_edge('b', 'c', capacity = 12)
- G.add_edge('a', 't', capacity = 60)
+ G.add_edge('s', 'b', capacity=30)
+ G.add_edge('a', 'c', capacity=25)
+ G.add_edge('b', 'c', capacity=12)
+ G.add_edge('a', 't', capacity=60)
G.add_edge('c', 't')
H = {'s': {'a': 85, 'b': 12},
@@ -319,34 +318,33 @@ class TestMaxflowMinCutCommon:
def test_digraph4(self):
# From ticket #429 by mfrasca.
G = nx.DiGraph()
- G.add_edge('s', 'a', capacity = 2)
- G.add_edge('s', 'b', capacity = 2)
- G.add_edge('a', 'b', capacity = 5)
- G.add_edge('a', 't', capacity = 1)
- G.add_edge('b', 'a', capacity = 1)
- G.add_edge('b', 't', capacity = 3)
+ G.add_edge('s', 'a', capacity=2)
+ G.add_edge('s', 'b', capacity=2)
+ G.add_edge('a', 'b', capacity=5)
+ G.add_edge('a', 't', capacity=1)
+ G.add_edge('b', 'a', capacity=1)
+ G.add_edge('b', 't', capacity=3)
flowSoln = {'a': {'b': 1, 't': 1},
'b': {'a': 0, 't': 3},
's': {'a': 2, 'b': 2},
't': {}}
compare_flows_and_cuts(G, 's', 't', flowSoln, 4)
-
def test_disconnected(self):
G = nx.Graph()
- G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)],weight='capacity')
+ G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity')
G.remove_node(1)
- assert_equal(nx.maximum_flow_value(G,0,3), 0)
+ assert_equal(nx.maximum_flow_value(G, 0, 3), 0)
flowSoln = {0: {}, 2: {3: 0}, 3: {2: 0}}
compare_flows_and_cuts(G, 0, 3, flowSoln, 0)
def test_source_target_not_in_graph(self):
G = nx.Graph()
- G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)],weight='capacity')
+ G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity')
G.remove_node(0)
for flow_func in all_funcs:
assert_raises(nx.NetworkXError, flow_func, G, 0, 3)
- G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)],weight='capacity')
+ G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity')
G.remove_node(3)
for flow_func in all_funcs:
assert_raises(nx.NetworkXError, flow_func, G, 0, 3)
@@ -369,24 +367,24 @@ class TestMaxFlowMinCutInterface:
def setup(self):
G = nx.DiGraph()
- G.add_edge('x','a', capacity = 3.0)
- G.add_edge('x','b', capacity = 1.0)
- G.add_edge('a','c', capacity = 3.0)
- G.add_edge('b','c', capacity = 5.0)
- G.add_edge('b','d', capacity = 4.0)
- G.add_edge('d','e', capacity = 2.0)
- G.add_edge('c','y', capacity = 2.0)
- G.add_edge('e','y', capacity = 3.0)
+ G.add_edge('x', 'a', capacity=3.0)
+ G.add_edge('x', 'b', capacity=1.0)
+ G.add_edge('a', 'c', capacity=3.0)
+ G.add_edge('b', 'c', capacity=5.0)
+ G.add_edge('b', 'd', capacity=4.0)
+ G.add_edge('d', 'e', capacity=2.0)
+ G.add_edge('c', 'y', capacity=2.0)
+ G.add_edge('e', 'y', capacity=3.0)
self.G = G
H = nx.DiGraph()
- H.add_edge(0, 1, capacity = 1.0)
- H.add_edge(1, 2, capacity = 1.0)
+ H.add_edge(0, 1, capacity=1.0)
+ H.add_edge(1, 2, capacity=1.0)
self.H = H
def test_flow_func_not_callable(self):
- elements = ['this_should_be_callable', 10, set([1,2,3])]
+ elements = ['this_should_be_callable', 10, set([1, 2, 3])]
G = nx.Graph()
- G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)], weight='capacity')
+ G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity')
for flow_func in interface_funcs:
for element in elements:
assert_raises(nx.NetworkXError,
@@ -403,7 +401,7 @@ class TestMaxFlowMinCutInterface:
if interface_func in max_min_funcs:
result = result[0]
assert_equal(fv, result, msg=msgi.format(flow_func.__name__,
- interface_func.__name__))
+ interface_func.__name__))
def test_minimum_cut_no_cutoff(self):
G = self.G
@@ -426,7 +424,7 @@ class TestMaxFlowMinCutInterface:
if interface_func in max_min_funcs:
result = result[0]
assert_equal(fv, result, msg=msgi.format(flow_func.__name__,
- interface_func.__name__))
+ interface_func.__name__))
def test_kwargs_default_flow_func(self):
G = self.H
@@ -460,14 +458,16 @@ def test_preflow_push_global_relabel_freq():
assert_raises(nx.NetworkXError, preflow_push, G, 1, 2,
global_relabel_freq=-1)
+
def test_preflow_push_makes_enough_space():
- #From ticket #1542
+ # From ticket #1542
G = nx.DiGraph()
nx.add_path(G, [0, 1, 3], capacity=1)
nx.add_path(G, [1, 2, 3], capacity=1)
R = preflow_push(G, 0, 3, value_only=False)
assert_equal(R.graph['flow_value'], 1)
+
def test_shortest_augmenting_path_two_phase():
k = 5
p = 1000
@@ -499,7 +499,6 @@ class TestCutoff:
R = edmonds_karp(G, 's', 't', cutoff=k)
ok_(k <= R.graph['flow_value'] <= 2 * k)
-
def test_complete_graph_cutoff(self):
G = nx.complete_graph(5)
nx.set_edge_attributes(G, {(u, v): 1 for u, v in G.edges()},
@@ -509,4 +508,4 @@ class TestCutoff:
result = nx.maximum_flow_value(G, 0, 4, flow_func=flow_func,
cutoff=cutoff)
assert_equal(cutoff, result,
- msg="cutoff error in {0}".format(flow_func.__name__))
+ msg="cutoff error in {0}".format(flow_func.__name__))
diff --git a/networkx/algorithms/flow/tests/test_maxflow_large_graph.py b/networkx/algorithms/flow/tests/test_maxflow_large_graph.py
index 46b09520..818a9e07 100644
--- a/networkx/algorithms/flow/tests/test_maxflow_large_graph.py
+++ b/networkx/algorithms/flow/tests/test_maxflow_large_graph.py
@@ -27,26 +27,27 @@ flow_funcs = [
msg = "Assertion failed in function: {0}"
+
def gen_pyramid(N):
- # This graph admits a flow of value 1 for which every arc is at
- # capacity (except the arcs incident to the sink which have
- # infinite capacity).
- G = nx.DiGraph()
+ # This graph admits a flow of value 1 for which every arc is at
+ # capacity (except the arcs incident to the sink which have
+ # infinite capacity).
+ G = nx.DiGraph()
- for i in range(N - 1):
- cap = 1. / (i + 2)
- for j in range(i + 1):
- G.add_edge((i, j), (i + 1, j),
- capacity = cap)
- cap = 1. / (i + 1) - cap
- G.add_edge((i, j), (i + 1, j + 1),
- capacity = cap)
- cap = 1. / (i + 2) - cap
+ for i in range(N - 1):
+ cap = 1. / (i + 2)
+ for j in range(i + 1):
+ G.add_edge((i, j), (i + 1, j),
+ capacity=cap)
+ cap = 1. / (i + 1) - cap
+ G.add_edge((i, j), (i + 1, j + 1),
+ capacity=cap)
+ cap = 1. / (i + 2) - cap
- for j in range(N):
- G.add_edge((N - 1, j), 't')
+ for j in range(N):
+ G.add_edge((N - 1, j), 't')
- return G
+ return G
def read_graph(name):
@@ -97,7 +98,7 @@ class TestMaxflowLargeGraph:
def test_pyramid(self):
N = 10
- #N = 100 # this gives a graph with 5051 nodes
+ # N = 100 # this gives a graph with 5051 nodes
G = gen_pyramid(N)
R = build_residual_network(G, 'capacity')
kwargs = dict(residual=R)
@@ -118,7 +119,7 @@ class TestMaxflowLargeGraph:
# do one flow_func to save time
flow_func = flow_funcs[0]
validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs),
- flow_func)
+ flow_func)
# for flow_func in flow_funcs:
# validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs),
# flow_func)
@@ -144,7 +145,7 @@ class TestMaxflowLargeGraph:
# do one flow_func to save time
flow_func = flow_funcs[0]
validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs),
- flow_func)
+ flow_func)
# for flow_func in flow_funcs:
# validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs),
# flow_func)
diff --git a/networkx/algorithms/flow/tests/test_mincost.py b/networkx/algorithms/flow/tests/test_mincost.py
index 7ed3de76..e53fd50e 100644
--- a/networkx/algorithms/flow/tests/test_mincost.py
+++ b/networkx/algorithms/flow/tests/test_mincost.py
@@ -4,15 +4,16 @@ import networkx as nx
from nose.tools import assert_equal, assert_raises
import os
+
class TestMinCostFlow:
def test_simple_digraph(self):
G = nx.DiGraph()
- G.add_node('a', demand = -5)
- G.add_node('d', demand = 5)
- G.add_edge('a', 'b', weight = 3, capacity = 4)
- G.add_edge('a', 'c', weight = 6, capacity = 10)
- G.add_edge('b', 'd', weight = 1, capacity = 9)
- G.add_edge('c', 'd', weight = 2, capacity = 5)
+ G.add_node('a', demand=-5)
+ G.add_node('d', demand=5)
+ G.add_edge('a', 'b', weight=3, capacity=4)
+ G.add_edge('a', 'c', weight=6, capacity=10)
+ G.add_edge('b', 'd', weight=1, capacity=9)
+ G.add_edge('c', 'd', weight=2, capacity=5)
flowCost, H = nx.network_simplex(G)
soln = {'a': {'b': 4, 'c': 1},
'b': {'d': 4},
@@ -31,68 +32,68 @@ class TestMinCostFlow:
def test_negcycle_infcap(self):
G = nx.DiGraph()
- G.add_node('s', demand = -5)
- G.add_node('t', demand = 5)
- G.add_edge('s', 'a', weight = 1, capacity = 3)
- G.add_edge('a', 'b', weight = 3)
- G.add_edge('c', 'a', weight = -6)
- G.add_edge('b', 'd', weight = 1)
- G.add_edge('d', 'c', weight = -2)
- G.add_edge('d', 't', weight = 1, capacity = 3)
+ G.add_node('s', demand=-5)
+ G.add_node('t', demand=5)
+ G.add_edge('s', 'a', weight=1, capacity=3)
+ G.add_edge('a', 'b', weight=3)
+ G.add_edge('c', 'a', weight=-6)
+ G.add_edge('b', 'd', weight=1)
+ G.add_edge('d', 'c', weight=-2)
+ G.add_edge('d', 't', weight=1, capacity=3)
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
assert_raises(nx.NetworkXUnbounded, nx.capacity_scaling, G)
def test_sum_demands_not_zero(self):
G = nx.DiGraph()
- G.add_node('s', demand = -5)
- G.add_node('t', demand = 4)
- G.add_edge('s', 'a', weight = 1, capacity = 3)
- G.add_edge('a', 'b', weight = 3)
- G.add_edge('a', 'c', weight = -6)
- G.add_edge('b', 'd', weight = 1)
- G.add_edge('c', 'd', weight = -2)
- G.add_edge('d', 't', weight = 1, capacity = 3)
+ G.add_node('s', demand=-5)
+ G.add_node('t', demand=4)
+ G.add_edge('s', 'a', weight=1, capacity=3)
+ G.add_edge('a', 'b', weight=3)
+ G.add_edge('a', 'c', weight=-6)
+ G.add_edge('b', 'd', weight=1)
+ G.add_edge('c', 'd', weight=-2)
+ G.add_edge('d', 't', weight=1, capacity=3)
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
def test_no_flow_satisfying_demands(self):
G = nx.DiGraph()
- G.add_node('s', demand = -5)
- G.add_node('t', demand = 5)
- G.add_edge('s', 'a', weight = 1, capacity = 3)
- G.add_edge('a', 'b', weight = 3)
- G.add_edge('a', 'c', weight = -6)
- G.add_edge('b', 'd', weight = 1)
- G.add_edge('c', 'd', weight = -2)
- G.add_edge('d', 't', weight = 1, capacity = 3)
+ G.add_node('s', demand=-5)
+ G.add_node('t', demand=5)
+ G.add_edge('s', 'a', weight=1, capacity=3)
+ G.add_edge('a', 'b', weight=3)
+ G.add_edge('a', 'c', weight=-6)
+ G.add_edge('b', 'd', weight=1)
+ G.add_edge('c', 'd', weight=-2)
+ G.add_edge('d', 't', weight=1, capacity=3)
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
def test_transshipment(self):
G = nx.DiGraph()
- G.add_node('a', demand = 1)
- G.add_node('b', demand = -2)
- G.add_node('c', demand = -2)
- G.add_node('d', demand = 3)
- G.add_node('e', demand = -4)
- G.add_node('f', demand = -4)
- G.add_node('g', demand = 3)
- G.add_node('h', demand = 2)
- G.add_node('r', demand = 3)
- G.add_edge('a', 'c', weight = 3)
- G.add_edge('r', 'a', weight = 2)
- G.add_edge('b', 'a', weight = 9)
- G.add_edge('r', 'c', weight = 0)
- G.add_edge('b', 'r', weight = -6)
- G.add_edge('c', 'd', weight = 5)
- G.add_edge('e', 'r', weight = 4)
- G.add_edge('e', 'f', weight = 3)
- G.add_edge('h', 'b', weight = 4)
- G.add_edge('f', 'd', weight = 7)
- G.add_edge('f', 'h', weight = 12)
- G.add_edge('g', 'd', weight = 12)
- G.add_edge('f', 'g', weight = -1)
- G.add_edge('h', 'g', weight = -10)
+ G.add_node('a', demand=1)
+ G.add_node('b', demand=-2)
+ G.add_node('c', demand=-2)
+ G.add_node('d', demand=3)
+ G.add_node('e', demand=-4)
+ G.add_node('f', demand=-4)
+ G.add_node('g', demand=3)
+ G.add_node('h', demand=2)
+ G.add_node('r', demand=3)
+ G.add_edge('a', 'c', weight=3)
+ G.add_edge('r', 'a', weight=2)
+ G.add_edge('b', 'a', weight=9)
+ G.add_edge('r', 'c', weight=0)
+ G.add_edge('b', 'r', weight=-6)
+ G.add_edge('c', 'd', weight=5)
+ G.add_edge('e', 'r', weight=4)
+ G.add_edge('e', 'f', weight=3)
+ G.add_edge('h', 'b', weight=4)
+ G.add_edge('f', 'd', weight=7)
+ G.add_edge('f', 'h', weight=12)
+ G.add_edge('g', 'd', weight=12)
+ G.add_edge('f', 'g', weight=-1)
+ G.add_edge('h', 'g', weight=-10)
flowCost, H = nx.network_simplex(G)
soln = {'a': {'c': 0},
'b': {'a': 0, 'r': 2},
@@ -116,40 +117,40 @@ class TestMinCostFlow:
def test_max_flow_min_cost(self):
G = nx.DiGraph()
- G.add_edge('s', 'a', bandwidth = 6)
- G.add_edge('s', 'c', bandwidth = 10, cost = 10)
- G.add_edge('a', 'b', cost = 6)
- G.add_edge('b', 'd', bandwidth = 8, cost = 7)
- G.add_edge('c', 'd', cost = 10)
- G.add_edge('d', 't', bandwidth = 5, cost = 5)
+ G.add_edge('s', 'a', bandwidth=6)
+ G.add_edge('s', 'c', bandwidth=10, cost=10)
+ G.add_edge('a', 'b', cost=6)
+ G.add_edge('b', 'd', bandwidth=8, cost=7)
+ G.add_edge('c', 'd', cost=10)
+ G.add_edge('d', 't', bandwidth=5, cost=5)
soln = {'s': {'a': 5, 'c': 0},
'a': {'b': 5},
'b': {'d': 5},
'c': {'d': 0},
'd': {'t': 5},
't': {}}
- flow = nx.max_flow_min_cost(G, 's', 't', capacity = 'bandwidth',
- weight = 'cost')
+ flow = nx.max_flow_min_cost(G, 's', 't', capacity='bandwidth',
+ weight='cost')
assert_equal(flow, soln)
- assert_equal(nx.cost_of_flow(G, flow, weight = 'cost'), 90)
+ assert_equal(nx.cost_of_flow(G, flow, weight='cost'), 90)
- G.add_edge('t', 's', cost = -100)
- flowCost, flow = nx.capacity_scaling(G, capacity = 'bandwidth',
- weight = 'cost')
+ G.add_edge('t', 's', cost=-100)
+ flowCost, flow = nx.capacity_scaling(G, capacity='bandwidth',
+ weight='cost')
G.remove_edge('t', 's')
assert_equal(flowCost, -410)
assert_equal(flow['t']['s'], 5)
del flow['t']['s']
assert_equal(flow, soln)
- assert_equal(nx.cost_of_flow(G, flow, weight = 'cost'), 90)
+ assert_equal(nx.cost_of_flow(G, flow, weight='cost'), 90)
def test_digraph1(self):
# From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied
# Mathematical Programming. Addison-Wesley, 1977.
G = nx.DiGraph()
- G.add_node(1, demand = -20)
- G.add_node(4, demand = 5)
- G.add_node(5, demand = 15)
+ G.add_node(1, demand=-20)
+ G.add_node(4, demand=5)
+ G.add_node(5, demand=15)
G.add_edges_from([(1, 2, {'capacity': 15, 'weight': 4}),
(1, 3, {'capacity': 8, 'weight': 4}),
(2, 3, {'weight': 2}),
@@ -223,17 +224,17 @@ class TestMinCostFlow:
G = nx.DiGraph()
G.add_edge('s', 'a')
- G['s']['a'].update({0: 2, 1: 4})
+ G['s']['a'].update({0: 2, 1: 4})
G.add_edge('s', 'b')
- G['s']['b'].update({0: 2, 1: 1})
+ G['s']['b'].update({0: 2, 1: 1})
G.add_edge('a', 'b')
- G['a']['b'].update({0: 5, 1: 2})
+ G['a']['b'].update({0: 5, 1: 2})
G.add_edge('a', 't')
- G['a']['t'].update({0: 1, 1: 5})
+ G['a']['t'].update({0: 1, 1: 5})
G.add_edge('b', 'a')
- G['b']['a'].update({0: 1, 1: 3})
+ G['b']['a'].update({0: 1, 1: 3})
G.add_edge('b', 't')
- G['b']['t'].update({0: 3, 1: 2})
+ G['b']['t'].update({0: 3, 1: 2})
"PS.ex.7.1: testing main function"
sol = nx.max_flow_min_cost(G, 's', 't', capacity=0, weight=1)
diff --git a/networkx/algorithms/isomorphism/__init__.py b/networkx/algorithms/isomorphism/__init__.py
index 1a6d4be5..0c441f5b 100644
--- a/networkx/algorithms/isomorphism/__init__.py
+++ b/networkx/algorithms/isomorphism/__init__.py
@@ -2,4 +2,3 @@ from networkx.algorithms.isomorphism.isomorph import *
from networkx.algorithms.isomorphism.vf2userfunc import *
from networkx.algorithms.isomorphism.matchhelpers import *
from networkx.algorithms.isomorphism.temporalisomorphvf2 import *
-
diff --git a/networkx/algorithms/isomorphism/isomorph.py b/networkx/algorithms/isomorphism/isomorph.py
index d16eb875..7c91b00f 100644
--- a/networkx/algorithms/isomorphism/isomorph.py
+++ b/networkx/algorithms/isomorphism/isomorph.py
@@ -17,7 +17,8 @@ __all__ = ['could_be_isomorphic',
'faster_could_be_isomorphic',
'is_isomorphic']
-def could_be_isomorphic(G1,G2):
+
+def could_be_isomorphic(G1, G2):
"""Returns False if graphs are definitely not isomorphic.
True does NOT guarantee isomorphism.
@@ -32,18 +33,19 @@ def could_be_isomorphic(G1,G2):
"""
# Check global properties
- if G1.order() != G2.order(): return False
+ if G1.order() != G2.order():
+ return False
# Check local properties
d1 = G1.degree()
- t1=nx.triangles(G1)
- c1=nx.number_of_cliques(G1)
+ t1 = nx.triangles(G1)
+ c1 = nx.number_of_cliques(G1)
props1 = [[d, t1[v], c1[v]] for v, d in d1]
props1.sort()
- d2=G2.degree()
- t2=nx.triangles(G2)
- c2=nx.number_of_cliques(G2)
+ d2 = G2.degree()
+ t2 = nx.triangles(G2)
+ c2 = nx.number_of_cliques(G2)
props2 = [[d, t2[v], c2[v]] for v, d in d2]
props2.sort()
@@ -53,9 +55,11 @@ def could_be_isomorphic(G1,G2):
# OK...
return True
-graph_could_be_isomorphic=could_be_isomorphic
-def fast_could_be_isomorphic(G1,G2):
+graph_could_be_isomorphic = could_be_isomorphic
+
+
+def fast_could_be_isomorphic(G1, G2):
"""Returns False if graphs are definitely not isomorphic.
True does NOT guarantee isomorphism.
@@ -70,27 +74,31 @@ def fast_could_be_isomorphic(G1,G2):
Checks for matching degree and triangle sequences.
"""
# Check global properties
- if G1.order() != G2.order(): return False
+ if G1.order() != G2.order():
+ return False
# Check local properties
d1 = G1.degree()
t1 = nx.triangles(G1)
- props1 = [ [d, t1[v]] for v, d in d1 ]
+ props1 = [[d, t1[v]] for v, d in d1]
props1.sort()
d2 = G2.degree()
- t2=nx.triangles(G2)
- props2=[ [d, t2[v]] for v, d in d2 ]
+ t2 = nx.triangles(G2)
+ props2 = [[d, t2[v]] for v, d in d2]
props2.sort()
- if props1 != props2: return False
+ if props1 != props2:
+ return False
# OK...
return True
-fast_graph_could_be_isomorphic=fast_could_be_isomorphic
-def faster_could_be_isomorphic(G1,G2):
+fast_graph_could_be_isomorphic = fast_could_be_isomorphic
+
+
+def faster_could_be_isomorphic(G1, G2):
"""Returns False if graphs are definitely not isomorphic.
True does NOT guarantee isomorphism.
@@ -105,18 +113,22 @@ def faster_could_be_isomorphic(G1,G2):
Checks for matching degree sequences.
"""
# Check global properties
- if G1.order() != G2.order(): return False
+ if G1.order() != G2.order():
+ return False
# Check local properties
d1 = sorted(d for n, d in G1.degree())
d2 = sorted(d for n, d in G2.degree())
- if d1 != d2: return False
+ if d1 != d2:
+ return False
# OK...
return True
-faster_graph_could_be_isomorphic=faster_could_be_isomorphic
+
+faster_graph_could_be_isomorphic = faster_could_be_isomorphic
+
def is_isomorphic(G1, G2, node_match=None, edge_match=None):
"""Returns True if the graphs G1 and G2 are isomorphic and False otherwise.
@@ -220,7 +232,7 @@ def is_isomorphic(G1, G2, node_match=None, edge_match=None):
elif (not G1.is_directed()) and (not G2.is_directed()):
GM = nx.algorithms.isomorphism.GraphMatcher
else:
- raise NetworkXError("Graphs G1 and G2 are not of the same type.")
+ raise NetworkXError("Graphs G1 and G2 are not of the same type.")
gm = GM(G1, G2, node_match=node_match, edge_match=edge_match)
diff --git a/networkx/algorithms/isomorphism/isomorphvf2.py b/networkx/algorithms/isomorphism/isomorphvf2.py
index 21a7faa8..2fba448f 100644
--- a/networkx/algorithms/isomorphism/isomorphvf2.py
+++ b/networkx/algorithms/isomorphism/isomorphvf2.py
@@ -155,11 +155,13 @@ import networkx as nx
__all__ = ['GraphMatcher',
'DiGraphMatcher']
+
class GraphMatcher(object):
"""Implementation of VF2 algorithm for matching undirected graphs.
Suitable for Graph and MultiGraph instances.
"""
+
def __init__(self, G1, G2):
"""Initialize GraphMatcher.
@@ -197,15 +199,15 @@ class GraphMatcher(object):
def reset_recursion_limit(self):
"""Restores the recursion limit."""
- ### TODO:
- ### Currently, we use recursion and set the recursion level higher.
- ### It would be nice to restore the level, but because the
- ### (Di)GraphMatcher classes make use of cyclic references, garbage
- ### collection will never happen when we define __del__() to
- ### restore the recursion level. The result is a memory leak.
- ### So for now, we do not automatically restore the recursion level,
- ### and instead provide a method to do this manually. Eventually,
- ### we should turn this into a non-recursive implementation.
+ # TODO:
+ # Currently, we use recursion and set the recursion level higher.
+ # It would be nice to restore the level, but because the
+ # (Di)GraphMatcher classes make use of cyclic references, garbage
+ # collection will never happen when we define __del__() to
+ # restore the recursion level. The result is a memory leak.
+ # So for now, we do not automatically restore the recursion level,
+ # and instead provide a method to do this manually. Eventually,
+ # we should turn this into a non-recursive implementation.
sys.setrecursionlimit(self.old_recursion_limit)
def candidate_pairs_iter(self):
@@ -229,7 +231,7 @@ class GraphMatcher(object):
else:
# If T1_inout and T2_inout were both empty....
# P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
- ##if not (T1_inout or T2_inout): # as suggested by [2], incorrect
+ # if not (T1_inout or T2_inout): # as suggested by [2], incorrect
if 1: # as inferred from [1], correct
# First we determine the candidate node for G2
other_node = min(G2_nodes - set(self.core_2))
@@ -278,12 +280,14 @@ class GraphMatcher(object):
# For now, I just copy the code.
# Check global properties
- if self.G1.order() != self.G2.order(): return False
+ if self.G1.order() != self.G2.order():
+ return False
# Check local properties
- d1 = sorted(d for n,d in self.G1.degree())
- d2 = sorted(d for n,d in self.G2.degree())
- if d1 != d2: return False
+ d1 = sorted(d for n, d in self.G1.degree())
+ d2 = sorted(d for n, d in self.G2.degree())
+ if d1 != d2:
+ return False
try:
x = next(self.isomorphisms_iter())
@@ -404,12 +408,11 @@ class GraphMatcher(object):
# singlet for Graph instances. For MultiGraphs, the value in the
# innermost dictionary is a list.
-
###
- ### Test at each step to get a return value as soon as possible.
+ # Test at each step to get a return value as soon as possible.
###
- ### Look ahead 0
+ # Look ahead 0
# R_self
@@ -417,10 +420,9 @@ class GraphMatcher(object):
# self-loops for G2_node. Without this check, we would fail on
# R_neighbor at the next recursion level. But it is good to prune the
# search tree now.
- if self.G1.number_of_edges(G1_node,G1_node) != self.G2.number_of_edges(G2_node,G2_node):
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(G2_node, G2_node):
return False
-
# R_neighbor
# For each neighbor n' of n in the partial mapping, the corresponding
@@ -439,7 +441,7 @@ class GraphMatcher(object):
elif self.G1.number_of_edges(self.core_2[neighbor], G1_node) != self.G2.number_of_edges(neighbor, G2_node):
return False
- ### Look ahead 1
+ # Look ahead 1
# R_terminout
# The number of neighbors of n that are in T_1^{inout} is equal to the
@@ -455,12 +457,11 @@ class GraphMatcher(object):
if self.test == 'graph':
if not (num1 == num2):
return False
- else: # self.test == 'subgraph'
+ else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
-
- ### Look ahead 2
+ # Look ahead 2
# R_new
@@ -478,7 +479,7 @@ class GraphMatcher(object):
if self.test == 'graph':
if not (num1 == num2):
return False
- else: # self.test == 'subgraph'
+ else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
@@ -531,7 +532,7 @@ class DiGraphMatcher(GraphMatcher):
# If T1_out and T2_out were both empty....
# We compute the in-terminal sets.
- ##elif not (T1_out or T2_out): # as suggested by [2], incorrect
+ # elif not (T1_out or T2_out): # as suggested by [2], incorrect
else: # as suggested by [1], correct
T1_in = [node for node in G1_nodes if (node in self.in_1) and (node not in self.core_1)]
T2_in = [node for node in G2_nodes if (node in self.in_2) and (node not in self.core_2)]
@@ -546,7 +547,7 @@ class DiGraphMatcher(GraphMatcher):
# If all terminal sets are empty...
# P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
- ##elif not (T1_in or T2_in): # as suggested by [2], incorrect
+ # elif not (T1_in or T2_in): # as suggested by [2], incorrect
else: # as inferred from [1], correct
node_2 = min(G2_nodes - set(self.core_2))
for node_1 in G1_nodes:
@@ -612,14 +613,11 @@ class DiGraphMatcher(GraphMatcher):
# dictionary is a singlet for DiGraph instances. For MultiDiGraphs,
# the value in the innermost dictionary is a list.
-
###
- ### Test at each step to get a return value as soon as possible.
+ # Test at each step to get a return value as soon as possible.
###
-
-
- ### Look ahead 0
+ # Look ahead 0
# R_self
@@ -627,10 +625,9 @@ class DiGraphMatcher(GraphMatcher):
# self-loops for G2_node. Without this check, we would fail on R_pred
# at the next recursion level. This should prune the tree even further.
- if self.G1.number_of_edges(G1_node,G1_node) != self.G2.number_of_edges(G2_node,G2_node):
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(G2_node, G2_node):
return False
-
# R_pred
# For each predecessor n' of n in the partial mapping, the
@@ -650,7 +647,6 @@ class DiGraphMatcher(GraphMatcher):
elif self.G1.number_of_edges(self.core_2[predecessor], G1_node) != self.G2.number_of_edges(predecessor, G2_node):
return False
-
# R_succ
# For each successor n' of n in the partial mapping, the corresponding
@@ -670,8 +666,7 @@ class DiGraphMatcher(GraphMatcher):
elif self.G1.number_of_edges(G1_node, self.core_2[successor]) != self.G2.number_of_edges(G2_node, successor):
return False
-
- ### Look ahead 1
+ # Look ahead 1
# R_termin
# The number of predecessors of n that are in T_1^{in} is equal to the
@@ -687,7 +682,7 @@ class DiGraphMatcher(GraphMatcher):
if self.test == 'graph':
if not (num1 == num2):
return False
- else: # self.test == 'subgraph'
+ else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
@@ -704,7 +699,7 @@ class DiGraphMatcher(GraphMatcher):
if self.test == 'graph':
if not (num1 == num2):
return False
- else: # self.test == 'subgraph'
+ else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
@@ -723,7 +718,7 @@ class DiGraphMatcher(GraphMatcher):
if self.test == 'graph':
if not (num1 == num2):
return False
- else: # self.test == 'subgraph'
+ else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
@@ -740,11 +735,11 @@ class DiGraphMatcher(GraphMatcher):
if self.test == 'graph':
if not (num1 == num2):
return False
- else: # self.test == 'subgraph'
+ else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
- ### Look ahead 2
+ # Look ahead 2
# R_new
@@ -762,7 +757,7 @@ class DiGraphMatcher(GraphMatcher):
if self.test == 'graph':
if not (num1 == num2):
return False
- else: # self.test == 'subgraph'
+ else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
@@ -780,7 +775,7 @@ class DiGraphMatcher(GraphMatcher):
if self.test == 'graph':
if not (num1 == num2):
return False
- else: # self.test == 'subgraph'
+ else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
@@ -796,6 +791,7 @@ class GMState(object):
these objects in memory at a time, due to the depth-first search
strategy employed by the VF2 algorithm.
"""
+
def __init__(self, GM, G1_node=None, G2_node=None):
"""Initializes GMState object.
@@ -835,7 +831,7 @@ class GMState(object):
if G1_node not in GM.inout_1:
GM.inout_1[G1_node] = self.depth
if G2_node not in GM.inout_2:
- GM.inout_2[G2_node] = self.depth
+ GM.inout_2[G2_node] = self.depth
# Now we add every other node...
@@ -880,6 +876,7 @@ class DiGMState(object):
strategy employed by the VF2 algorithm.
"""
+
def __init__(self, GM, G1_node=None, G2_node=None):
"""Initializes DiGMState object.
@@ -930,7 +927,8 @@ class DiGMState(object):
# Updates for T_1^{in}
new_nodes = set([])
for node in GM.core_1:
- new_nodes.update([predecessor for predecessor in GM.G1.predecessors(node) if predecessor not in GM.core_1])
+ new_nodes.update([predecessor for predecessor in GM.G1.predecessors(node)
+ if predecessor not in GM.core_1])
for node in new_nodes:
if node not in GM.in_1:
GM.in_1[node] = self.depth
@@ -938,7 +936,8 @@ class DiGMState(object):
# Updates for T_2^{in}
new_nodes = set([])
for node in GM.core_2:
- new_nodes.update([predecessor for predecessor in GM.G2.predecessors(node) if predecessor not in GM.core_2])
+ new_nodes.update([predecessor for predecessor in GM.G2.predecessors(node)
+ if predecessor not in GM.core_2])
for node in new_nodes:
if node not in GM.in_2:
GM.in_2[node] = self.depth
@@ -974,4 +973,3 @@ class DiGMState(object):
for node in list(vector.keys()):
if vector[node] == self.depth:
del vector[node]
-
diff --git a/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/networkx/algorithms/isomorphism/temporalisomorphvf2.py
index 23228cb5..1b288122 100644
--- a/networkx/algorithms/isomorphism/temporalisomorphvf2.py
+++ b/networkx/algorithms/isomorphism/temporalisomorphvf2.py
@@ -258,7 +258,8 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
maintain the self.tests if needed, to keep the match() method
functional. Implementations should consider multigraphs.
"""
- pred, succ = [n for n in self.G1.predecessors(G1_node) if n in self.core_1], [n for n in self.G1.successors(G1_node) if n in self.core_1]
+ pred, succ = [n for n in self.G1.predecessors(G1_node) if n in self.core_1], [
+ n for n in self.G1.successors(G1_node) if n in self.core_1]
if not self.one_hop(self.G1, G1_node, self.core_1, pred, succ): # Fail fast on first node.
return False
if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
diff --git a/networkx/algorithms/isomorphism/tests/test_isomorphism.py b/networkx/algorithms/isomorphism/tests/test_isomorphism.py
index b9c7e638..5061129b 100644
--- a/networkx/algorithms/isomorphism/tests/test_isomorphism.py
+++ b/networkx/algorithms/isomorphism/tests/test_isomorphism.py
@@ -3,30 +3,31 @@ from nose.tools import *
import networkx as nx
from networkx.algorithms import isomorphism as iso
+
class TestIsomorph:
def setUp(self):
- self.G1=nx.Graph()
- self.G2=nx.Graph()
- self.G3=nx.Graph()
- self.G4=nx.Graph()
- self.G1.add_edges_from([ [1,2],[1,3],[1,5],[2,3] ])
- self.G2.add_edges_from([ [10,20],[20,30],[10,30],[10,50] ])
- self.G3.add_edges_from([ [1,2],[1,3],[1,5],[2,5] ])
- self.G4.add_edges_from([ [1,2],[1,3],[1,5],[2,4] ])
+ self.G1 = nx.Graph()
+ self.G2 = nx.Graph()
+ self.G3 = nx.Graph()
+ self.G4 = nx.Graph()
+ self.G1.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 3]])
+ self.G2.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50]])
+ self.G3.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 5]])
+ self.G4.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 4]])
def test_could_be_isomorphic(self):
- assert_true(iso.could_be_isomorphic(self.G1,self.G2))
- assert_true(iso.could_be_isomorphic(self.G1,self.G3))
- assert_false(iso.could_be_isomorphic(self.G1,self.G4))
- assert_true(iso.could_be_isomorphic(self.G3,self.G2))
+ assert_true(iso.could_be_isomorphic(self.G1, self.G2))
+ assert_true(iso.could_be_isomorphic(self.G1, self.G3))
+ assert_false(iso.could_be_isomorphic(self.G1, self.G4))
+ assert_true(iso.could_be_isomorphic(self.G3, self.G2))
def test_fast_could_be_isomorphic(self):
- assert_true(iso.fast_could_be_isomorphic(self.G3,self.G2))
+ assert_true(iso.fast_could_be_isomorphic(self.G3, self.G2))
def test_faster_could_be_isomorphic(self):
- assert_true(iso.faster_could_be_isomorphic(self.G3,self.G2))
+ assert_true(iso.faster_could_be_isomorphic(self.G3, self.G2))
def test_is_isomorphic(self):
- assert_true(iso.is_isomorphic(self.G1,self.G2))
- assert_false(iso.is_isomorphic(self.G1,self.G4))
+ assert_true(iso.is_isomorphic(self.G1, self.G2))
+ assert_false(iso.is_isomorphic(self.G1, self.G4))
diff --git a/networkx/algorithms/isomorphism/tests/test_match_helpers.py b/networkx/algorithms/isomorphism/tests/test_match_helpers.py
index 81053544..69d58e6c 100644
--- a/networkx/algorithms/isomorphism/tests/test_match_helpers.py
+++ b/networkx/algorithms/isomorphism/tests/test_match_helpers.py
@@ -5,13 +5,13 @@ from networkx.algorithms import isomorphism as iso
def test_categorical_node_match():
- nm = iso.categorical_node_match(['x', 'y', 'z'], [None]*3)
+ nm = iso.categorical_node_match(['x', 'y', 'z'], [None] * 3)
assert_true(nm(dict(x=1, y=2, z=3), dict(x=1, y=2, z=3)))
assert_true(not nm(dict(x=1, y=2, z=2), dict(x=1, y=2, z=1)))
-
+
class TestGenericMultiEdgeMatch:
-
+
def setup(self):
self.G1 = nx.MultiDiGraph()
self.G2 = nx.MultiDiGraph()
@@ -24,17 +24,17 @@ class TestGenericMultiEdgeMatch:
attr_dict5 = {'id': 'edge5', 'minFlow': 8, 'maxFlow': 12}
attr_dict6 = {'id': 'edge6', 'minFlow': 8, 'maxFlow': 12}
for attr_dict in [attr_dict1, attr_dict2, attr_dict3, attr_dict4, attr_dict5, attr_dict6]:
- self.G1.add_edge(1 ,2, **attr_dict)
+ self.G1.add_edge(1, 2, **attr_dict)
for attr_dict in [attr_dict5, attr_dict3, attr_dict6, attr_dict1, attr_dict4, attr_dict2]:
self.G2.add_edge(2, 3, **attr_dict)
for attr_dict in [attr_dict3, attr_dict5]:
self.G3.add_edge(3, 4, **attr_dict)
for attr_dict in [attr_dict6, attr_dict4]:
self.G4.add_edge(4, 5, **attr_dict)
-
+
def test_generic_multiedge_match(self):
- full_match = iso.generic_multiedge_match(['id', 'flowMin', 'flowMax'], [None]*3, [eq]*3)
- flow_match = iso.generic_multiedge_match(['flowMin', 'flowMax'], [None]*2, [eq]*2)
+ full_match = iso.generic_multiedge_match(['id', 'flowMin', 'flowMax'], [None] * 3, [eq] * 3)
+ flow_match = iso.generic_multiedge_match(['flowMin', 'flowMax'], [None] * 2, [eq] * 2)
min_flow_match = iso.generic_multiedge_match('flowMin', None, eq)
id_match = iso.generic_multiedge_match('id', None, eq)
assert_true(flow_match(self.G1[1][2], self.G2[2][3]))
diff --git a/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py b/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py
index 67a1bb19..8257a8ae 100644
--- a/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py
+++ b/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py
@@ -6,19 +6,23 @@ import networkx as nx
from networkx.algorithms import isomorphism as iso
from datetime import date, datetime, timedelta
+
def provide_g1_edgelist():
return [(0, 1), (0, 2), (1, 2), (2, 4), (1, 3), (3, 4), (4, 5)]
+
def put_same_time(G, att_name):
for e in G.edges(data=True):
e[2][att_name] = date(2015, 1, 1)
return G
+
def put_same_datetime(G, att_name):
for e in G.edges(data=True):
e[2][att_name] = datetime(2015, 1, 1)
return G
+
def put_sequence_time(G, att_name):
current_date = date(2015, 1, 1)
for e in G.edges(data=True):
@@ -26,6 +30,7 @@ def put_sequence_time(G, att_name):
e[2][att_name] = current_date
return G
+
def put_time_config_0(G, att_name):
G[0][1][att_name] = date(2015, 1, 2)
G[0][2][att_name] = date(2015, 1, 2)
@@ -36,6 +41,7 @@ def put_time_config_0(G, att_name):
G[4][5][att_name] = date(2015, 1, 3)
return G
+
def put_time_config_1(G, att_name):
G[0][1][att_name] = date(2015, 1, 2)
G[0][2][att_name] = date(2015, 1, 1)
@@ -46,6 +52,7 @@ def put_time_config_1(G, att_name):
G[4][5][att_name] = date(2015, 1, 3)
return G
+
def put_time_config_2(G, att_name):
G[0][1][att_name] = date(2015, 1, 1)
G[0][2][att_name] = date(2015, 1, 1)
@@ -127,7 +134,7 @@ class TestTimeRespectingGraphMatcher(object):
gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
count_match = len(list(gm.subgraph_isomorphisms_iter()))
assert_true(count_match == 4)
-
+
def test_timdelta_one_config2_returns_ten_embeddings(self):
G1 = self.provide_g1_topology()
temporal_name = 'date'
@@ -138,7 +145,7 @@ class TestTimeRespectingGraphMatcher(object):
L = list(gm.subgraph_isomorphisms_iter())
count_match = len(list(gm.subgraph_isomorphisms_iter()))
assert_true(count_match == 10)
-
+
class TestDiTimeRespectingGraphMatcher(object):
"""
@@ -192,7 +199,7 @@ class TestDiTimeRespectingGraphMatcher(object):
gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
count_match = len(list(gm.subgraph_isomorphisms_iter()))
assert_true(count_match == 1)
-
+
def test_timdelta_one_config2_returns_two_embeddings(self):
G1 = self.provide_g1_topology()
temporal_name = 'date'
@@ -202,4 +209,3 @@ class TestDiTimeRespectingGraphMatcher(object):
gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
count_match = len(list(gm.subgraph_isomorphisms_iter()))
assert_true(count_match == 2)
-
diff --git a/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py b/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
index b48f1bca..97e0502a 100644
--- a/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
+++ b/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
@@ -25,7 +25,7 @@ def test_simple():
em = iso.numerical_multiedge_match('weight', 1)
else:
em = iso.numerical_edge_match('weight', 1)
- assert_true( nx.is_isomorphic(g1, g2, edge_match=em) )
+ assert_true(nx.is_isomorphic(g1, g2, edge_match=em))
for mod1, mod2 in [(False, True), (True, False), (True, True)]:
# mod1 tests a regular edge
@@ -63,18 +63,19 @@ def test_weightkey():
g1 = nx.DiGraph()
g2 = nx.DiGraph()
- g1.add_edge('A','B', weight=1)
- g2.add_edge('C','D', weight=0)
+ g1.add_edge('A', 'B', weight=1)
+ g2.add_edge('C', 'D', weight=0)
- assert_true( nx.is_isomorphic(g1, g2) )
+ assert_true(nx.is_isomorphic(g1, g2))
em = iso.numerical_edge_match('nonexistent attribute', 1)
- assert_true( nx.is_isomorphic(g1, g2, edge_match=em) )
+ assert_true(nx.is_isomorphic(g1, g2, edge_match=em))
em = iso.numerical_edge_match('weight', 1)
- assert_false( nx.is_isomorphic(g1, g2, edge_match=em) )
+ assert_false(nx.is_isomorphic(g1, g2, edge_match=em))
g2 = nx.DiGraph()
- g2.add_edge('C','D')
- assert_true( nx.is_isomorphic(g1, g2, edge_match=em) )
+ g2.add_edge('C', 'D')
+ assert_true(nx.is_isomorphic(g1, g2, edge_match=em))
+
class TestNodeMatch_Graph(object):
def setUp(self):
@@ -90,25 +91,25 @@ class TestNodeMatch_Graph(object):
self.g1.add_node('A', color='red')
self.g2.add_node('C', color='blue')
- self.g1.add_edge('A','B', weight=1)
- self.g2.add_edge('C','D', weight=1)
+ self.g1.add_edge('A', 'B', weight=1)
+ self.g2.add_edge('C', 'D', weight=1)
def test_noweight_nocolor(self):
- assert_true( nx.is_isomorphic(self.g1, self.g2) )
+ assert_true(nx.is_isomorphic(self.g1, self.g2))
def test_color1(self):
- assert_false( nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) )
+ assert_false(nx.is_isomorphic(self.g1, self.g2, node_match=self.nm))
def test_color2(self):
self.g1.nodes['A']['color'] = 'blue'
- assert_true( nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) )
+ assert_true(nx.is_isomorphic(self.g1, self.g2, node_match=self.nm))
def test_weight1(self):
- assert_true( nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) )
+ assert_true(nx.is_isomorphic(self.g1, self.g2, edge_match=self.em))
def test_weight2(self):
self.g1.add_edge('A', 'B', weight=2)
- assert_false( nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) )
+ assert_false(nx.is_isomorphic(self.g1, self.g2, edge_match=self.em))
def test_colorsandweights1(self):
iso = nx.is_isomorphic(self.g1, self.g2,
@@ -124,8 +125,9 @@ class TestNodeMatch_Graph(object):
def test_colorsandweights3(self):
# make the weights disagree
self.g1.add_edge('A', 'B', weight=2)
- assert_false( nx.is_isomorphic(self.g1, self.g2,
- node_match=self.nm, edge_match=self.em) )
+ assert_false(nx.is_isomorphic(self.g1, self.g2,
+ node_match=self.nm, edge_match=self.em))
+
class TestEdgeMatch_MultiGraph(object):
def setUp(self):
@@ -152,32 +154,34 @@ class TestEdgeMatch_MultiGraph(object):
self.emc = iso.categorical_multiedge_match('color', '')
self.emcm = iso.categorical_multiedge_match(['color', 'weight'], ['', 1])
self.emg1 = iso.generic_multiedge_match('color', 'red', eq)
- self.emg2 = iso.generic_multiedge_match(['color', 'weight', 'size'], ['red', 1, .5], [eq, eq, iso.matchhelpers.close])
+ self.emg2 = iso.generic_multiedge_match(['color', 'weight', 'size'], ['red', 1, .5], [
+ eq, eq, iso.matchhelpers.close])
else:
self.em = iso.numerical_edge_match('weight', 1)
self.emc = iso.categorical_edge_match('color', '')
self.emcm = iso.categorical_edge_match(['color', 'weight'], ['', 1])
self.emg1 = iso.generic_multiedge_match('color', 'red', eq)
- self.emg2 = iso.generic_edge_match(['color', 'weight', 'size'], ['red', 1, .5], [eq, eq, iso.matchhelpers.close])
+ self.emg2 = iso.generic_edge_match(['color', 'weight', 'size'], ['red', 1, .5], [
+ eq, eq, iso.matchhelpers.close])
def test_weights_only(self):
- assert_true( nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) )
+ assert_true(nx.is_isomorphic(self.g1, self.g2, edge_match=self.em))
def test_colors_only(self):
gm = self.GM(self.g1, self.g2, edge_match=self.emc)
- assert_true( gm.is_isomorphic() )
+ assert_true(gm.is_isomorphic())
def test_colorsandweights(self):
gm = self.GM(self.g1, self.g2, edge_match=self.emcm)
- assert_false( gm.is_isomorphic() )
+ assert_false(gm.is_isomorphic())
def test_generic1(self):
gm = self.GM(self.g1, self.g2, edge_match=self.emg1)
- assert_true( gm.is_isomorphic() )
+ assert_true(gm.is_isomorphic())
def test_generic2(self):
gm = self.GM(self.g1, self.g2, edge_match=self.emg2)
- assert_false( gm.is_isomorphic() )
+ assert_false(gm.is_isomorphic())
class TestEdgeMatch_DiGraph(TestNodeMatch_Graph):
@@ -186,10 +190,10 @@ class TestEdgeMatch_DiGraph(TestNodeMatch_Graph):
self.g2 = nx.DiGraph()
self.build()
+
class TestEdgeMatch_MultiDiGraph(TestEdgeMatch_MultiGraph):
def setUp(self):
self.g1 = nx.MultiDiGraph()
self.g2 = nx.MultiDiGraph()
self.GM = iso.MultiDiGraphMatcher
self.build()
-
diff --git a/networkx/algorithms/isomorphism/vf2userfunc.py b/networkx/algorithms/isomorphism/vf2userfunc.py
index ce75402d..708880f4 100644
--- a/networkx/algorithms/isomorphism/vf2userfunc.py
+++ b/networkx/algorithms/isomorphism/vf2userfunc.py
@@ -38,7 +38,7 @@ __all__ = ['GraphMatcher',
'DiGraphMatcher',
'MultiGraphMatcher',
'MultiDiGraphMatcher',
- ]
+ ]
def _semantic_feasibility(self, G1_node, G2_node):
@@ -77,6 +77,7 @@ def _semantic_feasibility(self, G1_node, G2_node):
class GraphMatcher(vf2.GraphMatcher):
"""VF2 isomorphism checker for undirected graphs.
"""
+
def __init__(self, G1, G2, node_match=None, edge_match=None):
"""Initialize graph matcher.
@@ -124,6 +125,7 @@ class GraphMatcher(vf2.GraphMatcher):
class DiGraphMatcher(vf2.DiGraphMatcher):
"""VF2 isomorphism checker for directed graphs.
"""
+
def __init__(self, G1, G2, node_match=None, edge_match=None):
"""Initialize graph matcher.
@@ -165,7 +167,6 @@ class DiGraphMatcher(vf2.DiGraphMatcher):
self.G1_adj = self.G1.adj
self.G2_adj = self.G2.adj
-
def semantic_feasibility(self, G1_node, G2_node):
"""Returns True if mapping G1_node to G2_node is semantically feasible."""
@@ -183,15 +184,16 @@ class DiGraphMatcher(vf2.DiGraphMatcher):
return feasible
-## The "semantics" of edge_match are different for multi(di)graphs, but
-## the implementation is the same. So, technically we do not need to
-## provide "multi" versions, but we do so to match NetworkX's base classes.
+# The "semantics" of edge_match are different for multi(di)graphs, but
+# the implementation is the same. So, technically we do not need to
+# provide "multi" versions, but we do so to match NetworkX's base classes.
+
class MultiGraphMatcher(GraphMatcher):
"""VF2 isomorphism checker for undirected multigraphs. """
pass
+
class MultiDiGraphMatcher(DiGraphMatcher):
"""VF2 isomorphism checker for directed multigraphs. """
pass
-
diff --git a/networkx/algorithms/link_analysis/pagerank_alg.py b/networkx/algorithms/link_analysis/pagerank_alg.py
index b1d83a11..87283998 100644
--- a/networkx/algorithms/link_analysis/pagerank_alg.py
+++ b/networkx/algorithms/link_analysis/pagerank_alg.py
@@ -137,7 +137,7 @@ def pagerank(G, alpha=0.85, personalization=None,
dangling_weights = p
else:
s = float(sum(dangling.values()))
- dangling_weights = dict((k, v/s) for k, v in dangling.items())
+ dangling_weights = dict((k, v / s) for k, v in dangling.items())
dangling_nodes = [n for n in W if W.out_degree(n, weight=weight) == 0.0]
# power iteration: make up to max_iter iterations
@@ -150,10 +150,10 @@ def pagerank(G, alpha=0.85, personalization=None,
# doing a left multiply x^T=xlast^T*W
for nbr in W[n]:
x[nbr] += alpha * xlast[n] * W[n][nbr][weight]
- x[n] += danglesum * dangling_weights.get(n,0) + (1.0 - alpha) * p.get(n,0)
+ x[n] += danglesum * dangling_weights.get(n, 0) + (1.0 - alpha) * p.get(n, 0)
# check convergence, l1 norm
err = sum([abs(x[n] - xlast[n]) for n in x])
- if err < N*tol:
+ if err < N * tol:
return x
raise nx.PowerIterationFailedConvergence(max_iter)
diff --git a/networkx/algorithms/link_analysis/tests/test_hits.py b/networkx/algorithms/link_analysis/tests/test_hits.py
index b49a2904..80dbd54d 100644
--- a/networkx/algorithms/link_analysis/tests/test_hits.py
+++ b/networkx/algorithms/link_analysis/tests/test_hits.py
@@ -12,35 +12,34 @@ import networkx
class TestHITS:
def setUp(self):
-
- G=networkx.DiGraph()
-
- edges=[(1,3),(1,5),\
- (2,1),\
- (3,5),\
- (5,4),(5,3),\
- (6,5)]
-
- G.add_edges_from(edges,weight=1)
- self.G=G
- self.G.a=dict(zip(sorted(G),[0.000000, 0.000000, 0.366025,
- 0.133975, 0.500000, 0.000000]))
- self.G.h=dict(zip(sorted(G),[ 0.366025, 0.000000, 0.211325,
- 0.000000, 0.211325, 0.211325]))
+ G = networkx.DiGraph()
+
+ edges = [(1, 3), (1, 5),
+ (2, 1),
+ (3, 5),
+ (5, 4), (5, 3),
+ (6, 5)]
+
+ G.add_edges_from(edges, weight=1)
+ self.G = G
+ self.G.a = dict(zip(sorted(G), [0.000000, 0.000000, 0.366025,
+ 0.133975, 0.500000, 0.000000]))
+ self.G.h = dict(zip(sorted(G), [0.366025, 0.000000, 0.211325,
+ 0.000000, 0.211325, 0.211325]))
def test_hits(self):
- G=self.G
- h,a=networkx.hits(G,tol=1.e-08)
+ G = self.G
+ h, a = networkx.hits(G, tol=1.e-08)
for n in G:
- assert_almost_equal(h[n],G.h[n],places=4)
+ assert_almost_equal(h[n], G.h[n], places=4)
for n in G:
- assert_almost_equal(a[n],G.a[n],places=4)
+ assert_almost_equal(a[n], G.a[n], places=4)
def test_hits_nstart(self):
G = self.G
- nstart = dict([(i, 1./2) for i in G])
- h, a = networkx.hits(G, nstart = nstart)
+ nstart = dict([(i, 1. / 2) for i in G])
+ h, a = networkx.hits(G, nstart=nstart)
@attr('numpy')
def test_hits_numpy(self):
@@ -49,14 +48,12 @@ class TestHITS:
except ImportError:
raise SkipTest('NumPy not available.')
-
- G=self.G
- h,a=networkx.hits_numpy(G)
+ G = self.G
+ h, a = networkx.hits_numpy(G)
for n in G:
- assert_almost_equal(h[n],G.h[n],places=4)
+ assert_almost_equal(h[n], G.h[n], places=4)
for n in G:
- assert_almost_equal(a[n],G.a[n],places=4)
-
+ assert_almost_equal(a[n], G.a[n], places=4)
def test_hits_scipy(self):
try:
@@ -64,13 +61,12 @@ class TestHITS:
except ImportError:
raise SkipTest('SciPy not available.')
- G=self.G
- h,a=networkx.hits_scipy(G,tol=1.e-08)
+ G = self.G
+ h, a = networkx.hits_scipy(G, tol=1.e-08)
for n in G:
- assert_almost_equal(h[n],G.h[n],places=4)
+ assert_almost_equal(h[n], G.h[n], places=4)
for n in G:
- assert_almost_equal(a[n],G.a[n],places=4)
-
+ assert_almost_equal(a[n], G.a[n], places=4)
@attr('numpy')
def test_empty(self):
@@ -78,20 +74,19 @@ class TestHITS:
import numpy
except ImportError:
raise SkipTest('numpy not available.')
- G=networkx.Graph()
- assert_equal(networkx.hits(G),({},{}))
- assert_equal(networkx.hits_numpy(G),({},{}))
- assert_equal(networkx.authority_matrix(G).shape,(0,0))
- assert_equal(networkx.hub_matrix(G).shape,(0,0))
+ G = networkx.Graph()
+ assert_equal(networkx.hits(G), ({}, {}))
+ assert_equal(networkx.hits_numpy(G), ({}, {}))
+ assert_equal(networkx.authority_matrix(G).shape, (0, 0))
+ assert_equal(networkx.hub_matrix(G).shape, (0, 0))
def test_empty_scipy(self):
try:
import scipy
except ImportError:
raise SkipTest('scipy not available.')
- G=networkx.Graph()
- assert_equal(networkx.hits_scipy(G),({},{}))
-
+ G = networkx.Graph()
+ assert_equal(networkx.hits_scipy(G), ({}, {}))
@raises(networkx.PowerIterationFailedConvergence)
def test_hits_not_convergent(self):
diff --git a/networkx/algorithms/link_analysis/tests/test_pagerank.py b/networkx/algorithms/link_analysis/tests/test_pagerank.py
index 01b815c9..d0f2a9d9 100644
--- a/networkx/algorithms/link_analysis/tests/test_pagerank.py
+++ b/networkx/algorithms/link_analysis/tests/test_pagerank.py
@@ -70,7 +70,7 @@ class TestPageRank(object):
p = numpy.array(ev[:, 0] / ev[:, 0].sum())[:, 0]
for (a, b) in zip(p, self.G.pagerank.values()):
assert_almost_equal(a, b)
-
+
def test_personalization(self):
G = networkx.complete_graph(4)
personalize = {0: 1, 1: 1, 2: 4, 3: 4}
@@ -83,7 +83,7 @@ class TestPageRank(object):
G = networkx.complete_graph(4)
personalize = {0: 0, 1: 0, 2: 0, 3: 0}
assert_raises(ZeroDivisionError, networkx.pagerank, G,
- personalization=personalize)
+ personalization=personalize)
def test_one_nonzero_personalization_value(self):
G = networkx.complete_graph(4)
@@ -92,7 +92,7 @@ class TestPageRank(object):
p = networkx.pagerank(G, alpha=0.85, personalization=personalize)
for n in G:
assert_almost_equal(p[n], answer[n], places=4)
-
+
def test_incomplete_personalization(self):
G = networkx.complete_graph(4)
personalize = {3: 1}
@@ -100,7 +100,7 @@ class TestPageRank(object):
p = networkx.pagerank(G, alpha=0.85, personalization=personalize)
for n in G:
assert_almost_equal(p[n], answer[n], places=4)
-
+
def test_dangling_matrix(self):
"""
Tests that the google_matrix doesn't change except for the dangling
@@ -131,8 +131,6 @@ class TestPageRank(object):
for n in self.G:
assert_almost_equal(pr[n], self.G.dangling_pagerank[n], places=4)
-
-
def test_empty(self):
G = networkx.Graph()
assert_equal(networkx.pagerank(G), {})
@@ -140,7 +138,6 @@ class TestPageRank(object):
assert_equal(networkx.google_matrix(G).shape, (0, 0))
-
class TestPageRankScipy(TestPageRank):
@classmethod
diff --git a/networkx/algorithms/lowest_common_ancestors.py b/networkx/algorithms/lowest_common_ancestors.py
index dc915ed2..4169a827 100644
--- a/networkx/algorithms/lowest_common_ancestors.py
+++ b/networkx/algorithms/lowest_common_ancestors.py
@@ -15,7 +15,7 @@ from itertools import chain, count
import networkx as nx
from networkx.utils import arbitrary_element, not_implemented_for, \
- UnionFind, generate_unique_node
+ UnionFind, generate_unique_node
__all__ = ["all_pairs_lowest_common_ancestor",
"tree_all_pairs_lowest_common_ancestor",
@@ -332,7 +332,7 @@ def all_pairs_lowest_common_ancestor(G, pairs=None):
else:
ans = tree_lca[tree_node2, tree_node1]
if not dry_run and (best is None or
- root_distance[ans] > best_root_distance):
+ root_distance[ans] > best_root_distance):
best_root_distance = root_distance[ans]
best = ans
diff --git a/networkx/algorithms/matching.py b/networkx/algorithms/matching.py
index 3fa6be9c..ac9cde23 100644
--- a/networkx/algorithms/matching.py
+++ b/networkx/algorithms/matching.py
@@ -70,7 +70,7 @@ def matching_dict_to_set(matching):
# only the (frozen)set `{u, v}` appears as an element in the
# returned set.
- return set((u,v) for (u,v) in set(map(frozenset, matching.items())))
+ return set((u, v) for (u, v) in set(map(frozenset, matching.items())))
def is_matching(G, matching):
@@ -247,7 +247,7 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'):
# Get a list of vertices.
gnodes = list(G)
if not gnodes:
- return set( ) # don't bother with empty graphs
+ return set() # don't bother with empty graphs
# Find the maximum edge weight.
maxweight = 0
@@ -640,7 +640,7 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'):
bs = inblossom[s]
assert label[bs] == 1
assert (
- labeledge[bs] is None and blossombase[bs] not in mate)\
+ labeledge[bs] is None and blossombase[bs] not in mate)\
or (labeledge[bs][0] == mate[blossombase[bs]])
# Augment through the S-blossom from s to base.
if isinstance(bs, Blossom):
@@ -802,14 +802,14 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'):
# keep track of the least-slack non-allowable edge to
# a different S-blossom.
if bestedge.get(bv) is None or \
- kslack < slack(*bestedge[bv]):
+ kslack < slack(*bestedge[bv]):
bestedge[bv] = (v, w)
elif label.get(w) is None:
# w is a free vertex (or an unreached vertex inside
# a T-blossom) but we can not reach it yet;
# keep track of the least-slack edge that reaches w.
if bestedge.get(w) is None or \
- kslack < slack(*bestedge[w]):
+ kslack < slack(*bestedge[w]):
bestedge[w] = (v, w)
if augmented:
@@ -831,7 +831,7 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'):
# an S-vertex and a free vertex.
for v in G.nodes():
if label.get(inblossom[v]) is None and \
- bestedge.get(v) is not None:
+ bestedge.get(v) is not None:
d = slack(*bestedge[v])
if deltatype == -1 or d < delta:
delta = d
diff --git a/networkx/algorithms/minors.py b/networkx/algorithms/minors.py
index 3f9700b4..9cd3ee83 100644
--- a/networkx/algorithms/minors.py
+++ b/networkx/algorithms/minors.py
@@ -382,6 +382,7 @@ def contracted_nodes(G, u, v, self_loops=True):
H.nodes[u]['contraction'] = {v: v_data}
return H
+
identified_nodes = contracted_nodes
diff --git a/networkx/algorithms/mis.py b/networkx/algorithms/mis.py
index ad48a537..7811e745 100644
--- a/networkx/algorithms/mis.py
+++ b/networkx/algorithms/mis.py
@@ -71,11 +71,11 @@ def maximal_independent_set(G, nodes=None):
nodes = set(nodes)
if not nodes.issubset(G):
raise nx.NetworkXUnfeasible(
- "%s is not a subset of the nodes of G" % nodes)
+ "%s is not a subset of the nodes of G" % nodes)
neighbors = set.union(*[set(G.adj[v]) for v in nodes])
if set.intersection(neighbors, nodes):
raise nx.NetworkXUnfeasible(
- "%s is not an independent set of G" % nodes)
+ "%s is not an independent set of G" % nodes)
indep_nodes = list(nodes)
available_nodes = set(G.nodes()).difference(neighbors.union(nodes))
while available_nodes:
diff --git a/networkx/algorithms/operators/tests/test_all.py b/networkx/algorithms/operators/tests/test_all.py
index c63b47de..d8ce7976 100644
--- a/networkx/algorithms/operators/tests/test_all.py
+++ b/networkx/algorithms/operators/tests/test_all.py
@@ -2,6 +2,7 @@ from nose.tools import *
import networkx as nx
from networkx.testing import *
+
def test_union_all_attributes():
g = nx.Graph()
g.add_node(0, x=4)
@@ -20,32 +21,31 @@ def test_union_all_attributes():
j.nodes[0]['x'] = 7
ghj = nx.union_all([g, h, j], rename=('g', 'h', 'j'))
- assert_equal( set(ghj.nodes()) , set(['h0', 'h1', 'g0', 'g1', 'j0', 'j1']) )
+ assert_equal(set(ghj.nodes()), set(['h0', 'h1', 'g0', 'g1', 'j0', 'j1']))
for n in ghj:
graph, node = n
- assert_equal( ghj.nodes[n], eval(graph).nodes[int(node)] )
-
- assert_equal(ghj.graph['attr'],'attr')
- assert_equal(ghj.graph['name'],'j') # j graph attributes take precendent
+ assert_equal(ghj.nodes[n], eval(graph).nodes[int(node)])
+ assert_equal(ghj.graph['attr'], 'attr')
+ assert_equal(ghj.graph['name'], 'j') # j graph attributes take precendent
def test_intersection_all():
- G=nx.Graph()
- H=nx.Graph()
- R=nx.Graph()
- G.add_nodes_from([1,2,3,4])
- G.add_edge(1,2)
- G.add_edge(2,3)
- H.add_nodes_from([1,2,3,4])
- H.add_edge(2,3)
- H.add_edge(3,4)
- R.add_nodes_from([1,2,3,4])
- R.add_edge(2,3)
- R.add_edge(4,1)
- I=nx.intersection_all([G,H,R])
- assert_equal( set(I.nodes()) , set([1,2,3,4]) )
- assert_equal( sorted(I.edges()) , [(2,3)] )
+ G = nx.Graph()
+ H = nx.Graph()
+ R = nx.Graph()
+ G.add_nodes_from([1, 2, 3, 4])
+ G.add_edge(1, 2)
+ G.add_edge(2, 3)
+ H.add_nodes_from([1, 2, 3, 4])
+ H.add_edge(2, 3)
+ H.add_edge(3, 4)
+ R.add_nodes_from([1, 2, 3, 4])
+ R.add_edge(2, 3)
+ R.add_edge(4, 1)
+ I = nx.intersection_all([G, H, R])
+ assert_equal(set(I.nodes()), set([1, 2, 3, 4]))
+ assert_equal(sorted(I.edges()), [(2, 3)])
def test_intersection_all_attributes():
@@ -61,13 +61,14 @@ def test_intersection_all_attributes():
h.nodes[0]['x'] = 7
gh = nx.intersection_all([g, h])
- assert_equal( set(gh.nodes()) , set(g.nodes()) )
- assert_equal( set(gh.nodes()) , set(h.nodes()) )
- assert_equal( sorted(gh.edges()) , sorted(g.edges()) )
+ assert_equal(set(gh.nodes()), set(g.nodes()))
+ assert_equal(set(gh.nodes()), set(h.nodes()))
+ assert_equal(sorted(gh.edges()), sorted(g.edges()))
h.remove_node(0)
assert_raises(nx.NetworkXError, nx.intersection, g, h)
+
def test_intersection_all_multigraph_attributes():
g = nx.MultiGraph()
g.add_edge(0, 1, key=0)
@@ -77,94 +78,95 @@ def test_intersection_all_multigraph_attributes():
h.add_edge(0, 1, key=0)
h.add_edge(0, 1, key=3)
gh = nx.intersection_all([g, h])
- assert_equal( set(gh.nodes()) , set(g.nodes()) )
- assert_equal( set(gh.nodes()) , set(h.nodes()) )
- assert_equal( sorted(gh.edges()) , [(0,1)] )
- assert_equal( sorted(gh.edges(keys=True)) , [(0,1,0)] )
+ assert_equal(set(gh.nodes()), set(g.nodes()))
+ assert_equal(set(gh.nodes()), set(h.nodes()))
+ assert_equal(sorted(gh.edges()), [(0, 1)])
+ assert_equal(sorted(gh.edges(keys=True)), [(0, 1, 0)])
+
def test_union_all_and_compose_all():
- K3=nx.complete_graph(3)
- P3=nx.path_graph(3)
-
- G1=nx.DiGraph()
- G1.add_edge('A','B')
- G1.add_edge('A','C')
- G1.add_edge('A','D')
- G2=nx.DiGraph()
- G2.add_edge('1','2')
- G2.add_edge('1','3')
- G2.add_edge('1','4')
-
- G=nx.union_all([G1,G2])
- H=nx.compose_all([G1,G2])
- assert_edges_equal(G.edges(),H.edges())
- assert_false(G.has_edge('A','1'))
+ K3 = nx.complete_graph(3)
+ P3 = nx.path_graph(3)
+
+ G1 = nx.DiGraph()
+ G1.add_edge('A', 'B')
+ G1.add_edge('A', 'C')
+ G1.add_edge('A', 'D')
+ G2 = nx.DiGraph()
+ G2.add_edge('1', '2')
+ G2.add_edge('1', '3')
+ G2.add_edge('1', '4')
+
+ G = nx.union_all([G1, G2])
+ H = nx.compose_all([G1, G2])
+ assert_edges_equal(G.edges(), H.edges())
+ assert_false(G.has_edge('A', '1'))
assert_raises(nx.NetworkXError, nx.union, K3, P3)
- H1=nx.union_all([H,G1],rename=('H','G1'))
+ H1 = nx.union_all([H, G1], rename=('H', 'G1'))
assert_equal(sorted(H1.nodes()),
- ['G1A', 'G1B', 'G1C', 'G1D',
- 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])
+ ['G1A', 'G1B', 'G1C', 'G1D',
+ 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])
- H2=nx.union_all([H,G2],rename=("H",""))
+ H2 = nx.union_all([H, G2], rename=("H", ""))
assert_equal(sorted(H2.nodes()),
- ['1', '2', '3', '4',
- 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])
+ ['1', '2', '3', '4',
+ 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])
- assert_false(H1.has_edge('NB','NA'))
+ assert_false(H1.has_edge('NB', 'NA'))
- G=nx.compose_all([G,G])
- assert_edges_equal(G.edges(),H.edges())
+ G = nx.compose_all([G, G])
+ assert_edges_equal(G.edges(), H.edges())
- G2=nx.union_all([G2,G2],rename=('','copy'))
+ G2 = nx.union_all([G2, G2], rename=('', 'copy'))
assert_equal(sorted(G2.nodes()),
- ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4'])
-
- assert_equal(sorted(G2.neighbors('copy4')),[])
- assert_equal(sorted(G2.neighbors('copy1')),['copy2', 'copy3', 'copy4'])
- assert_equal(len(G),8)
- assert_equal(nx.number_of_edges(G),6)
-
- E=nx.disjoint_union_all([G,G])
- assert_equal(len(E),16)
- assert_equal(nx.number_of_edges(E),12)
-
- E=nx.disjoint_union_all([G1,G2])
- assert_equal(sorted(E.nodes()),[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
-
- G1=nx.DiGraph()
- G1.add_edge('A','B')
- G2=nx.DiGraph()
- G2.add_edge(1,2)
- G3=nx.DiGraph()
- G3.add_edge(11,22)
- G4=nx.union_all([G1,G2,G3],rename=("G1","G2","G3"))
+ ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4'])
+
+ assert_equal(sorted(G2.neighbors('copy4')), [])
+ assert_equal(sorted(G2.neighbors('copy1')), ['copy2', 'copy3', 'copy4'])
+ assert_equal(len(G), 8)
+ assert_equal(nx.number_of_edges(G), 6)
+
+ E = nx.disjoint_union_all([G, G])
+ assert_equal(len(E), 16)
+ assert_equal(nx.number_of_edges(E), 12)
+
+ E = nx.disjoint_union_all([G1, G2])
+ assert_equal(sorted(E.nodes()), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
+
+ G1 = nx.DiGraph()
+ G1.add_edge('A', 'B')
+ G2 = nx.DiGraph()
+ G2.add_edge(1, 2)
+ G3 = nx.DiGraph()
+ G3.add_edge(11, 22)
+ G4 = nx.union_all([G1, G2, G3], rename=("G1", "G2", "G3"))
assert_equal(sorted(G4.nodes()),
- ['G1A', 'G1B', 'G21', 'G22',
- 'G311', 'G322'])
+ ['G1A', 'G1B', 'G21', 'G22',
+ 'G311', 'G322'])
def test_union_all_multigraph():
- G=nx.MultiGraph()
- G.add_edge(1,2,key=0)
- G.add_edge(1,2,key=1)
- H=nx.MultiGraph()
- H.add_edge(3,4,key=0)
- H.add_edge(3,4,key=1)
- GH=nx.union_all([G,H])
- assert_equal( set(GH) , set(G)|set(H))
- assert_equal( set(GH.edges(keys=True)) ,
- set(G.edges(keys=True))|set(H.edges(keys=True)))
+ G = nx.MultiGraph()
+ G.add_edge(1, 2, key=0)
+ G.add_edge(1, 2, key=1)
+ H = nx.MultiGraph()
+ H.add_edge(3, 4, key=0)
+ H.add_edge(3, 4, key=1)
+ GH = nx.union_all([G, H])
+ assert_equal(set(GH), set(G) | set(H))
+ assert_equal(set(GH.edges(keys=True)),
+ set(G.edges(keys=True)) | set(H.edges(keys=True)))
def test_input_output():
- l = [nx.Graph([(1,2)]),nx.Graph([(3,4)])]
+ l = [nx.Graph([(1, 2)]), nx.Graph([(3, 4)])]
U = nx.disjoint_union_all(l)
- assert_equal(len(l),2)
+ assert_equal(len(l), 2)
C = nx.compose_all(l)
- assert_equal(len(l),2)
- l = [nx.Graph([(1,2)]),nx.Graph([(1,2)])]
+ assert_equal(len(l), 2)
+ l = [nx.Graph([(1, 2)]), nx.Graph([(1, 2)])]
R = nx.intersection_all(l)
- assert_equal(len(l),2)
+ assert_equal(len(l), 2)
@raises(nx.NetworkXError)
@@ -172,25 +174,28 @@ def test_mixed_type_union():
G = nx.Graph()
H = nx.MultiGraph()
I = nx.Graph()
- U = nx.union_all([G,H,I])
+ U = nx.union_all([G, H, I])
+
@raises(nx.NetworkXError)
def test_mixed_type_disjoint_union():
G = nx.Graph()
H = nx.MultiGraph()
I = nx.Graph()
- U = nx.disjoint_union_all([G,H,I])
+ U = nx.disjoint_union_all([G, H, I])
+
@raises(nx.NetworkXError)
def test_mixed_type_intersection():
G = nx.Graph()
H = nx.MultiGraph()
I = nx.Graph()
- U = nx.intersection_all([G,H,I])
+ U = nx.intersection_all([G, H, I])
+
@raises(nx.NetworkXError)
def test_mixed_type_compose():
G = nx.Graph()
H = nx.MultiGraph()
I = nx.Graph()
- U = nx.compose_all([G,H,I])
+ U = nx.compose_all([G, H, I])
diff --git a/networkx/algorithms/operators/tests/test_binary.py b/networkx/algorithms/operators/tests/test_binary.py
index 1dfd396f..d8590745 100644
--- a/networkx/algorithms/operators/tests/test_binary.py
+++ b/networkx/algorithms/operators/tests/test_binary.py
@@ -3,6 +3,7 @@ import networkx as nx
from networkx import *
from networkx.testing import *
+
def test_union_attributes():
g = nx.Graph()
g.add_node(0, x=4)
@@ -16,26 +17,27 @@ def test_union_attributes():
h.nodes[0]['x'] = 7
gh = nx.union(g, h, rename=('g', 'h'))
- assert_equal( set(gh.nodes()) , set(['h0', 'h1', 'g0', 'g1']) )
+ assert_equal(set(gh.nodes()), set(['h0', 'h1', 'g0', 'g1']))
for n in gh:
graph, node = n
- assert_equal( gh.nodes[n], eval(graph).nodes[int(node)] )
+ assert_equal(gh.nodes[n], eval(graph).nodes[int(node)])
+
+ assert_equal(gh.graph['attr'], 'attr')
+ assert_equal(gh.graph['name'], 'h') # h graph attributes take precendent
- assert_equal(gh.graph['attr'],'attr')
- assert_equal(gh.graph['name'],'h') # h graph attributes take precendent
def test_intersection():
- G=nx.Graph()
- H=nx.Graph()
- G.add_nodes_from([1,2,3,4])
- G.add_edge(1,2)
- G.add_edge(2,3)
- H.add_nodes_from([1,2,3,4])
- H.add_edge(2,3)
- H.add_edge(3,4)
- I=nx.intersection(G,H)
- assert_equal( set(I.nodes()) , set([1,2,3,4]) )
- assert_equal( sorted(I.edges()) , [(2,3)] )
+ G = nx.Graph()
+ H = nx.Graph()
+ G.add_nodes_from([1, 2, 3, 4])
+ G.add_edge(1, 2)
+ G.add_edge(2, 3)
+ H.add_nodes_from([1, 2, 3, 4])
+ H.add_edge(2, 3)
+ H.add_edge(3, 4)
+ I = nx.intersection(G, H)
+ assert_equal(set(I.nodes()), set([1, 2, 3, 4]))
+ assert_equal(sorted(I.edges()), [(2, 3)])
def test_intersection_attributes():
@@ -51,15 +53,14 @@ def test_intersection_attributes():
h.nodes[0]['x'] = 7
gh = nx.intersection(g, h)
- assert_equal( set(gh.nodes()) , set(g.nodes()) )
- assert_equal( set(gh.nodes()) , set(h.nodes()) )
- assert_equal( sorted(gh.edges()) , sorted(g.edges()) )
+ assert_equal(set(gh.nodes()), set(g.nodes()))
+ assert_equal(set(gh.nodes()), set(h.nodes()))
+ assert_equal(sorted(gh.edges()), sorted(g.edges()))
h.remove_node(0)
assert_raises(nx.NetworkXError, nx.intersection, g, h)
-
def test_intersection_multigraph_attributes():
g = nx.MultiGraph()
g.add_edge(0, 1, key=0)
@@ -69,50 +70,50 @@ def test_intersection_multigraph_attributes():
h.add_edge(0, 1, key=0)
h.add_edge(0, 1, key=3)
gh = nx.intersection(g, h)
- assert_equal( set(gh.nodes()) , set(g.nodes()) )
- assert_equal( set(gh.nodes()) , set(h.nodes()) )
- assert_equal( sorted(gh.edges()) , [(0,1)] )
- assert_equal( sorted(gh.edges(keys=True)) , [(0,1,0)] )
+ assert_equal(set(gh.nodes()), set(g.nodes()))
+ assert_equal(set(gh.nodes()), set(h.nodes()))
+ assert_equal(sorted(gh.edges()), [(0, 1)])
+ assert_equal(sorted(gh.edges(keys=True)), [(0, 1, 0)])
def test_difference():
- G=nx.Graph()
- H=nx.Graph()
- G.add_nodes_from([1,2,3,4])
- G.add_edge(1,2)
- G.add_edge(2,3)
- H.add_nodes_from([1,2,3,4])
- H.add_edge(2,3)
- H.add_edge(3,4)
- D=nx.difference(G,H)
- assert_equal( set(D.nodes()) , set([1,2,3,4]) )
- assert_equal( sorted(D.edges()) , [(1,2)] )
- D=nx.difference(H,G)
- assert_equal( set(D.nodes()) , set([1,2,3,4]) )
- assert_equal( sorted(D.edges()) , [(3,4)] )
- D=nx.symmetric_difference(G,H)
- assert_equal( set(D.nodes()) , set([1,2,3,4]) )
- assert_equal( sorted(D.edges()) , [(1,2),(3,4)] )
+ G = nx.Graph()
+ H = nx.Graph()
+ G.add_nodes_from([1, 2, 3, 4])
+ G.add_edge(1, 2)
+ G.add_edge(2, 3)
+ H.add_nodes_from([1, 2, 3, 4])
+ H.add_edge(2, 3)
+ H.add_edge(3, 4)
+ D = nx.difference(G, H)
+ assert_equal(set(D.nodes()), set([1, 2, 3, 4]))
+ assert_equal(sorted(D.edges()), [(1, 2)])
+ D = nx.difference(H, G)
+ assert_equal(set(D.nodes()), set([1, 2, 3, 4]))
+ assert_equal(sorted(D.edges()), [(3, 4)])
+ D = nx.symmetric_difference(G, H)
+ assert_equal(set(D.nodes()), set([1, 2, 3, 4]))
+ assert_equal(sorted(D.edges()), [(1, 2), (3, 4)])
def test_difference2():
- G=nx.Graph()
- H=nx.Graph()
- G.add_nodes_from([1,2,3,4])
- H.add_nodes_from([1,2,3,4])
- G.add_edge(1,2)
- H.add_edge(1,2)
- G.add_edge(2,3)
- D=nx.difference(G,H)
- assert_equal( set(D.nodes()) , set([1,2,3,4]) )
- assert_equal( sorted(D.edges()) , [(2,3)] )
- D=nx.difference(H,G)
- assert_equal( set(D.nodes()) , set([1,2,3,4]) )
- assert_equal( sorted(D.edges()) , [] )
- H.add_edge(3,4)
- D=nx.difference(H,G)
- assert_equal( set(D.nodes()) , set([1,2,3,4]) )
- assert_equal( sorted(D.edges()) , [(3,4)] )
+ G = nx.Graph()
+ H = nx.Graph()
+ G.add_nodes_from([1, 2, 3, 4])
+ H.add_nodes_from([1, 2, 3, 4])
+ G.add_edge(1, 2)
+ H.add_edge(1, 2)
+ G.add_edge(2, 3)
+ D = nx.difference(G, H)
+ assert_equal(set(D.nodes()), set([1, 2, 3, 4]))
+ assert_equal(sorted(D.edges()), [(2, 3)])
+ D = nx.difference(H, G)
+ assert_equal(set(D.nodes()), set([1, 2, 3, 4]))
+ assert_equal(sorted(D.edges()), [])
+ H.add_edge(3, 4)
+ D = nx.difference(H, G)
+ assert_equal(set(D.nodes()), set([1, 2, 3, 4]))
+ assert_equal(sorted(D.edges()), [(3, 4)])
def test_difference_attributes():
@@ -128,13 +129,14 @@ def test_difference_attributes():
h.nodes[0]['x'] = 7
gh = nx.difference(g, h)
- assert_equal( set(gh.nodes()) , set(g.nodes()) )
- assert_equal( set(gh.nodes()) , set(h.nodes()) )
- assert_equal( sorted(gh.edges()) , [])
+ assert_equal(set(gh.nodes()), set(g.nodes()))
+ assert_equal(set(gh.nodes()), set(h.nodes()))
+ assert_equal(sorted(gh.edges()), [])
h.remove_node(0)
assert_raises(nx.NetworkXError, nx.intersection, g, h)
+
def test_difference_multigraph_attributes():
g = nx.MultiGraph()
g.add_edge(0, 1, key=0)
@@ -144,10 +146,10 @@ def test_difference_multigraph_attributes():
h.add_edge(0, 1, key=0)
h.add_edge(0, 1, key=3)
gh = nx.difference(g, h)
- assert_equal( set(gh.nodes()) , set(g.nodes()) )
- assert_equal( set(gh.nodes()) , set(h.nodes()) )
- assert_equal( sorted(gh.edges()) , [(0,1),(0,1)] )
- assert_equal( sorted(gh.edges(keys=True)) , [(0,1,1),(0,1,2)] )
+ assert_equal(set(gh.nodes()), set(g.nodes()))
+ assert_equal(set(gh.nodes()), set(h.nodes()))
+ assert_equal(sorted(gh.edges()), [(0, 1), (0, 1)])
+ assert_equal(sorted(gh.edges(keys=True)), [(0, 1, 1), (0, 1, 2)])
@raises(nx.NetworkXError)
@@ -156,6 +158,7 @@ def test_difference_raise():
H = nx.path_graph(3)
GH = nx.difference(G, H)
+
def test_symmetric_difference_multigraph():
g = nx.MultiGraph()
g.add_edge(0, 1, key=0)
@@ -165,11 +168,12 @@ def test_symmetric_difference_multigraph():
h.add_edge(0, 1, key=0)
h.add_edge(0, 1, key=3)
gh = nx.symmetric_difference(g, h)
- assert_equal( set(gh.nodes()) , set(g.nodes()) )
- assert_equal( set(gh.nodes()) , set(h.nodes()) )
- assert_equal( sorted(gh.edges()) , 3*[(0,1)] )
- assert_equal( sorted(sorted(e) for e in gh.edges(keys=True)),
- [[0,1,1],[0,1,2],[0,1,3]] )
+ assert_equal(set(gh.nodes()), set(g.nodes()))
+ assert_equal(set(gh.nodes()), set(h.nodes()))
+ assert_equal(sorted(gh.edges()), 3 * [(0, 1)])
+ assert_equal(sorted(sorted(e) for e in gh.edges(keys=True)),
+ [[0, 1, 1], [0, 1, 2], [0, 1, 3]])
+
@raises(nx.NetworkXError)
def test_symmetric_difference_raise():
@@ -177,54 +181,55 @@ def test_symmetric_difference_raise():
H = nx.path_graph(3)
GH = nx.symmetric_difference(G, H)
+
def test_union_and_compose():
- K3=complete_graph(3)
- P3=path_graph(3)
-
- G1=nx.DiGraph()
- G1.add_edge('A','B')
- G1.add_edge('A','C')
- G1.add_edge('A','D')
- G2=nx.DiGraph()
- G2.add_edge('1','2')
- G2.add_edge('1','3')
- G2.add_edge('1','4')
-
- G=union(G1,G2)
- H=compose(G1,G2)
- assert_edges_equal(G.edges(),H.edges())
- assert_false(G.has_edge('A',1))
+ K3 = complete_graph(3)
+ P3 = path_graph(3)
+
+ G1 = nx.DiGraph()
+ G1.add_edge('A', 'B')
+ G1.add_edge('A', 'C')
+ G1.add_edge('A', 'D')
+ G2 = nx.DiGraph()
+ G2.add_edge('1', '2')
+ G2.add_edge('1', '3')
+ G2.add_edge('1', '4')
+
+ G = union(G1, G2)
+ H = compose(G1, G2)
+ assert_edges_equal(G.edges(), H.edges())
+ assert_false(G.has_edge('A', 1))
assert_raises(nx.NetworkXError, nx.union, K3, P3)
- H1=union(H,G1,rename=('H','G1'))
+ H1 = union(H, G1, rename=('H', 'G1'))
assert_equal(sorted(H1.nodes()),
- ['G1A', 'G1B', 'G1C', 'G1D',
+ ['G1A', 'G1B', 'G1C', 'G1D',
'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])
- H2=union(H,G2,rename=("H",""))
+ H2 = union(H, G2, rename=("H", ""))
assert_equal(sorted(H2.nodes()),
- ['1', '2', '3', '4',
+ ['1', '2', '3', '4',
'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])
- assert_false(H1.has_edge('NB','NA'))
+ assert_false(H1.has_edge('NB', 'NA'))
- G=compose(G,G)
- assert_edges_equal(G.edges(),H.edges())
+ G = compose(G, G)
+ assert_edges_equal(G.edges(), H.edges())
- G2=union(G2,G2,rename=('','copy'))
+ G2 = union(G2, G2, rename=('', 'copy'))
assert_equal(sorted(G2.nodes()),
['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4'])
- assert_equal(sorted(G2.neighbors('copy4')),[])
- assert_equal(sorted(G2.neighbors('copy1')),['copy2', 'copy3', 'copy4'])
- assert_equal(len(G),8)
- assert_equal(number_of_edges(G),6)
+ assert_equal(sorted(G2.neighbors('copy4')), [])
+ assert_equal(sorted(G2.neighbors('copy1')), ['copy2', 'copy3', 'copy4'])
+ assert_equal(len(G), 8)
+ assert_equal(number_of_edges(G), 6)
- E=disjoint_union(G,G)
- assert_equal(len(E),16)
- assert_equal(number_of_edges(E),12)
+ E = disjoint_union(G, G)
+ assert_equal(len(E), 16)
+ assert_equal(number_of_edges(E), 12)
- E=disjoint_union(G1,G2)
- assert_equal(sorted(E.nodes()),[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
+ E = disjoint_union(G1, G2)
+ assert_equal(sorted(E.nodes()), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
G = nx.Graph()
H = nx.Graph()
@@ -235,81 +240,86 @@ def test_union_and_compose():
def test_union_multigraph():
- G=nx.MultiGraph()
- G.add_edge(1,2,key=0)
- G.add_edge(1,2,key=1)
- H=nx.MultiGraph()
- H.add_edge(3,4,key=0)
- H.add_edge(3,4,key=1)
- GH=nx.union(G,H)
- assert_equal( set(GH) , set(G)|set(H))
- assert_equal( set(GH.edges(keys=True)) ,
- set(G.edges(keys=True))|set(H.edges(keys=True)))
+ G = nx.MultiGraph()
+ G.add_edge(1, 2, key=0)
+ G.add_edge(1, 2, key=1)
+ H = nx.MultiGraph()
+ H.add_edge(3, 4, key=0)
+ H.add_edge(3, 4, key=1)
+ GH = nx.union(G, H)
+ assert_equal(set(GH), set(G) | set(H))
+ assert_equal(set(GH.edges(keys=True)),
+ set(G.edges(keys=True)) | set(H.edges(keys=True)))
+
def test_disjoint_union_multigraph():
- G=nx.MultiGraph()
- G.add_edge(0,1,key=0)
- G.add_edge(0,1,key=1)
- H=nx.MultiGraph()
- H.add_edge(2,3,key=0)
- H.add_edge(2,3,key=1)
- GH=nx.disjoint_union(G,H)
- assert_equal( set(GH) , set(G)|set(H))
- assert_equal( set(GH.edges(keys=True)) ,
- set(G.edges(keys=True))|set(H.edges(keys=True)))
+ G = nx.MultiGraph()
+ G.add_edge(0, 1, key=0)
+ G.add_edge(0, 1, key=1)
+ H = nx.MultiGraph()
+ H.add_edge(2, 3, key=0)
+ H.add_edge(2, 3, key=1)
+ GH = nx.disjoint_union(G, H)
+ assert_equal(set(GH), set(G) | set(H))
+ assert_equal(set(GH.edges(keys=True)),
+ set(G.edges(keys=True)) | set(H.edges(keys=True)))
def test_compose_multigraph():
- G=nx.MultiGraph()
- G.add_edge(1,2,key=0)
- G.add_edge(1,2,key=1)
- H=nx.MultiGraph()
- H.add_edge(3,4,key=0)
- H.add_edge(3,4,key=1)
- GH=nx.compose(G,H)
- assert_equal( set(GH) , set(G)|set(H))
- assert_equal( set(GH.edges(keys=True)) ,
- set(G.edges(keys=True))|set(H.edges(keys=True)))
- H.add_edge(1,2,key=2)
- GH=nx.compose(G,H)
- assert_equal( set(GH) , set(G)|set(H))
- assert_equal( set(GH.edges(keys=True)) ,
- set(G.edges(keys=True))|set(H.edges(keys=True)))
+ G = nx.MultiGraph()
+ G.add_edge(1, 2, key=0)
+ G.add_edge(1, 2, key=1)
+ H = nx.MultiGraph()
+ H.add_edge(3, 4, key=0)
+ H.add_edge(3, 4, key=1)
+ GH = nx.compose(G, H)
+ assert_equal(set(GH), set(G) | set(H))
+ assert_equal(set(GH.edges(keys=True)),
+ set(G.edges(keys=True)) | set(H.edges(keys=True)))
+ H.add_edge(1, 2, key=2)
+ GH = nx.compose(G, H)
+ assert_equal(set(GH), set(G) | set(H))
+ assert_equal(set(GH.edges(keys=True)),
+ set(G.edges(keys=True)) | set(H.edges(keys=True)))
@raises(nx.NetworkXError)
def test_mixed_type_union():
G = nx.Graph()
H = nx.MultiGraph()
- U = nx.union(G,H)
+ U = nx.union(G, H)
+
@raises(nx.NetworkXError)
def test_mixed_type_disjoint_union():
G = nx.Graph()
H = nx.MultiGraph()
- U = nx.disjoint_union(G,H)
+ U = nx.disjoint_union(G, H)
+
@raises(nx.NetworkXError)
def test_mixed_type_intersection():
G = nx.Graph()
H = nx.MultiGraph()
- U = nx.intersection(G,H)
+ U = nx.intersection(G, H)
+
@raises(nx.NetworkXError)
def test_mixed_type_difference():
G = nx.Graph()
H = nx.MultiGraph()
- U = nx.difference(G,H)
+ U = nx.difference(G, H)
@raises(nx.NetworkXError)
def test_mixed_type_symmetric_difference():
G = nx.Graph()
H = nx.MultiGraph()
- U = nx.symmetric_difference(G,H)
+ U = nx.symmetric_difference(G, H)
+
@raises(nx.NetworkXError)
def test_mixed_type_compose():
G = nx.Graph()
H = nx.MultiGraph()
- U = nx.compose(G,H)
+ U = nx.compose(G, H)
diff --git a/networkx/algorithms/operators/tests/test_unary.py b/networkx/algorithms/operators/tests/test_unary.py
index ea10d75c..de76bc81 100644
--- a/networkx/algorithms/operators/tests/test_unary.py
+++ b/networkx/algorithms/operators/tests/test_unary.py
@@ -4,44 +4,45 @@ from networkx import *
def test_complement():
- null=null_graph()
- empty1=empty_graph(1)
- empty10=empty_graph(10)
- K3=complete_graph(3)
- K5=complete_graph(5)
- K10=complete_graph(10)
- P2=path_graph(2)
- P3=path_graph(3)
- P5=path_graph(5)
- P10=path_graph(10)
- #complement of the complete graph is empty
+ null = null_graph()
+ empty1 = empty_graph(1)
+ empty10 = empty_graph(10)
+ K3 = complete_graph(3)
+ K5 = complete_graph(5)
+ K10 = complete_graph(10)
+ P2 = path_graph(2)
+ P3 = path_graph(3)
+ P5 = path_graph(5)
+ P10 = path_graph(10)
+ # complement of the complete graph is empty
- G=complement(K3)
- assert_true(is_isomorphic(G,empty_graph(3)))
- G=complement(K5)
- assert_true(is_isomorphic(G,empty_graph(5)))
+ G = complement(K3)
+ assert_true(is_isomorphic(G, empty_graph(3)))
+ G = complement(K5)
+ assert_true(is_isomorphic(G, empty_graph(5)))
# for any G, G=complement(complement(G))
- P3cc=complement(complement(P3))
- assert_true(is_isomorphic(P3,P3cc))
- nullcc=complement(complement(null))
- assert_true(is_isomorphic(null,nullcc))
- b=bull_graph()
- bcc=complement(complement(b))
- assert_true(is_isomorphic(b,bcc))
+ P3cc = complement(complement(P3))
+ assert_true(is_isomorphic(P3, P3cc))
+ nullcc = complement(complement(null))
+ assert_true(is_isomorphic(null, nullcc))
+ b = bull_graph()
+ bcc = complement(complement(b))
+ assert_true(is_isomorphic(b, bcc))
+
def test_complement_2():
- G1=nx.DiGraph()
- G1.add_edge('A','B')
- G1.add_edge('A','C')
- G1.add_edge('A','D')
- G1C=complement(G1)
+ G1 = nx.DiGraph()
+ G1.add_edge('A', 'B')
+ G1.add_edge('A', 'C')
+ G1.add_edge('A', 'D')
+ G1C = complement(G1)
assert_equal(sorted(G1C.edges()),
[('B', 'A'), ('B', 'C'),
('B', 'D'), ('C', 'A'), ('C', 'B'),
('C', 'D'), ('D', 'A'), ('D', 'B'), ('D', 'C')])
+
def test_reverse1():
# Other tests for reverse are done by the DiGraph and MultiDigraph.
- G1=nx.Graph()
+ G1 = nx.Graph()
assert_raises(nx.NetworkXError, nx.reverse, G1)
-
diff --git a/networkx/algorithms/operators/unary.py b/networkx/algorithms/operators/unary.py
index 069f0d84..71a6303f 100644
--- a/networkx/algorithms/operators/unary.py
+++ b/networkx/algorithms/operators/unary.py
@@ -8,8 +8,8 @@
import networkx as nx
from networkx.utils import not_implemented_for
__author__ = """\n""".join(['Aric Hagberg <aric.hagberg@gmail.com>',
- 'Pieter Swart (swart@lanl.gov)',
- 'Dan Schult(dschult@colgate.edu)'])
+ 'Pieter Swart (swart@lanl.gov)',
+ 'Dan Schult(dschult@colgate.edu)'])
__all__ = ['complement', 'reverse']
diff --git a/networkx/algorithms/shortest_paths/__init__.py b/networkx/algorithms/shortest_paths/__init__.py
index 64846eb4..eb0d91ce 100644
--- a/networkx/algorithms/shortest_paths/__init__.py
+++ b/networkx/algorithms/shortest_paths/__init__.py
@@ -3,4 +3,3 @@ from networkx.algorithms.shortest_paths.unweighted import *
from networkx.algorithms.shortest_paths.weighted import *
from networkx.algorithms.shortest_paths.astar import *
from networkx.algorithms.shortest_paths.dense import *
-
diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py
index 069e9c0f..0c62bed7 100644
--- a/networkx/algorithms/shortest_paths/generic.py
+++ b/networkx/algorithms/shortest_paths/generic.py
@@ -241,7 +241,7 @@ def shortest_path_length(G, source=None, target=None, weight=None):
paths = path_length(G, target, weight=weight)
else:
if source not in G:
- raise nx.NodeNotFound("Source {} not in G".format(source));
+ raise nx.NodeNotFound("Source {} not in G".format(source))
if target is None:
# Find paths to all nodes accessible from the source.
@@ -254,7 +254,7 @@ def shortest_path_length(G, source=None, target=None, weight=None):
# Find shortest source-target path.
if weight is None:
p = nx.bidirectional_shortest_path(G, source, target)
- paths = len(p)-1
+ paths = len(p) - 1
else:
paths = nx.dijkstra_path_length(G, source, target, weight)
return paths
@@ -324,10 +324,11 @@ def average_shortest_path_length(G, weight=None):
raise nx.NetworkXError("Graph is not connected.")
# Compute all-pairs shortest paths.
if weight is None:
- path_length = lambda v: nx.single_source_shortest_path_length(G, v)
+ def path_length(v): return nx.single_source_shortest_path_length(G, v)
else:
ssdpl = nx.single_source_dijkstra_path_length
- path_length = lambda v: ssdpl(G, v, weight=weight)
+
+ def path_length(v): return ssdpl(G, v, weight=weight)
# Sum the distances for each (ordered) pair of source and target node.
s = sum(l for u in G for l in path_length(u).values())
return s / (n * (n - 1))
@@ -380,7 +381,7 @@ def all_shortest_paths(G, source, target, weight=None):
else:
pred = nx.predecessor(G, source)
- if source not in G :
+ if source not in G:
raise nx.NodeNotFound('Source {} is not in G'.format(source))
if target not in pred:
@@ -391,7 +392,7 @@ def all_shortest_paths(G, source, target, weight=None):
while top >= 0:
node, i = stack[top]
if node == source:
- yield [p for p, n in reversed(stack[:top+1])]
+ yield [p for p, n in reversed(stack[:top + 1])]
if len(pred[node]) > i:
top += 1
if top == len(stack):
@@ -399,5 +400,5 @@ def all_shortest_paths(G, source, target, weight=None):
else:
stack[top] = [pred[node][i], 0]
else:
- stack[top-1][1] += 1
+ stack[top - 1][1] += 1
top -= 1
diff --git a/networkx/algorithms/shortest_paths/tests/test_dense.py b/networkx/algorithms/shortest_paths/tests/test_dense.py
index 97eb2561..b4628fd4 100644
--- a/networkx/algorithms/shortest_paths/tests/test_dense.py
+++ b/networkx/algorithms/shortest_paths/tests/test_dense.py
@@ -2,20 +2,21 @@
from nose.tools import *
import networkx as nx
+
class TestFloyd:
def setUp(self):
pass
def test_floyd_warshall_predecessor_and_distance(self):
- XG=nx.DiGraph()
- XG.add_weighted_edges_from([('s','u',10) ,('s','x',5) ,
- ('u','v',1) ,('u','x',2) ,
- ('v','y',1) ,('x','u',3) ,
- ('x','v',5) ,('x','y',2) ,
- ('y','s',7) ,('y','v',6)])
- path, dist =nx.floyd_warshall_predecessor_and_distance(XG)
- assert_equal(dist['s']['v'],9)
- assert_equal(path['s']['v'],'u')
+ XG = nx.DiGraph()
+ XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5),
+ ('u', 'v', 1), ('u', 'x', 2),
+ ('v', 'y', 1), ('x', 'u', 3),
+ ('x', 'v', 5), ('x', 'y', 2),
+ ('y', 's', 7), ('y', 'v', 6)])
+ path, dist = nx.floyd_warshall_predecessor_and_distance(XG)
+ assert_equal(dist['s']['v'], 9)
+ assert_equal(path['s']['v'], 'u')
assert_equal(dist,
{'y': {'y': 0, 'x': 12, 's': 7, 'u': 15, 'v': 6},
'x': {'y': 2, 'x': 0, 's': 9, 'u': 3, 'v': 4},
@@ -23,81 +24,80 @@ class TestFloyd:
'u': {'y': 2, 'x': 2, 's': 9, 'u': 0, 'v': 1},
'v': {'y': 1, 'x': 13, 's': 8, 'u': 16, 'v': 0}})
-
- GG=XG.to_undirected()
+ GG = XG.to_undirected()
# make sure we get lower weight
# to_undirected might choose either edge with weight 2 or weight 3
- GG['u']['x']['weight']=2
+ GG['u']['x']['weight'] = 2
path, dist = nx.floyd_warshall_predecessor_and_distance(GG)
- assert_equal(dist['s']['v'],8)
+ assert_equal(dist['s']['v'], 8)
# skip this test, could be alternate path s-u-v
# assert_equal(path['s']['v'],'y')
- G=nx.DiGraph() # no weights
- G.add_edges_from([('s','u'), ('s','x'),
- ('u','v'), ('u','x'),
- ('v','y'), ('x','u'),
- ('x','v'), ('x','y'),
- ('y','s'), ('y','v')])
+ G = nx.DiGraph() # no weights
+ G.add_edges_from([('s', 'u'), ('s', 'x'),
+ ('u', 'v'), ('u', 'x'),
+ ('v', 'y'), ('x', 'u'),
+ ('x', 'v'), ('x', 'y'),
+ ('y', 's'), ('y', 'v')])
path, dist = nx.floyd_warshall_predecessor_and_distance(G)
- assert_equal(dist['s']['v'],2)
+ assert_equal(dist['s']['v'], 2)
# skip this test, could be alternate path s-u-v
# assert_equal(path['s']['v'],'x')
# alternate interface
dist = nx.floyd_warshall(G)
- assert_equal(dist['s']['v'],2)
+ assert_equal(dist['s']['v'], 2)
def test_cycle(self):
path, dist = nx.floyd_warshall_predecessor_and_distance(nx.cycle_graph(7))
- assert_equal(dist[0][3],3)
- assert_equal(path[0][3],2)
- assert_equal(dist[0][4],3)
+ assert_equal(dist[0][3], 3)
+ assert_equal(path[0][3], 2)
+ assert_equal(dist[0][4], 3)
def test_weighted(self):
- XG3=nx.Graph()
- XG3.add_weighted_edges_from([ [0,1,2],[1,2,12],[2,3,1],
- [3,4,5],[4,5,1],[5,0,10] ])
+ XG3 = nx.Graph()
+ XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], [2, 3, 1],
+ [3, 4, 5], [4, 5, 1], [5, 0, 10]])
path, dist = nx.floyd_warshall_predecessor_and_distance(XG3)
- assert_equal(dist[0][3],15)
- assert_equal(path[0][3],2)
+ assert_equal(dist[0][3], 15)
+ assert_equal(path[0][3], 2)
def test_weighted2(self):
- XG4=nx.Graph()
- XG4.add_weighted_edges_from([ [0,1,2],[1,2,2],[2,3,1],
- [3,4,1],[4,5,1],[5,6,1],
- [6,7,1],[7,0,1] ])
+ XG4 = nx.Graph()
+ XG4.add_weighted_edges_from([[0, 1, 2], [1, 2, 2], [2, 3, 1],
+ [3, 4, 1], [4, 5, 1], [5, 6, 1],
+ [6, 7, 1], [7, 0, 1]])
path, dist = nx.floyd_warshall_predecessor_and_distance(XG4)
- assert_equal(dist[0][2],4)
- assert_equal(path[0][2],1)
+ assert_equal(dist[0][2], 4)
+ assert_equal(path[0][2], 1)
def test_weight_parameter(self):
XG4 = nx.Graph()
- XG4.add_edges_from([ (0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}),
- (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}),
- (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}),
- (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1}) ])
+ XG4.add_edges_from([(0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}),
+ (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}),
+ (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}),
+ (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1})])
path, dist = nx.floyd_warshall_predecessor_and_distance(XG4,
- weight='heavy')
+ weight='heavy')
assert_equal(dist[0][2], 4)
assert_equal(path[0][2], 1)
def test_zero_distance(self):
- XG=nx.DiGraph()
- XG.add_weighted_edges_from([('s','u',10) ,('s','x',5) ,
- ('u','v',1) ,('u','x',2) ,
- ('v','y',1) ,('x','u',3) ,
- ('x','v',5) ,('x','y',2) ,
- ('y','s',7) ,('y','v',6)])
- path, dist =nx.floyd_warshall_predecessor_and_distance(XG)
+ XG = nx.DiGraph()
+ XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5),
+ ('u', 'v', 1), ('u', 'x', 2),
+ ('v', 'y', 1), ('x', 'u', 3),
+ ('x', 'v', 5), ('x', 'y', 2),
+ ('y', 's', 7), ('y', 'v', 6)])
+ path, dist = nx.floyd_warshall_predecessor_and_distance(XG)
for u in XG:
assert_equal(dist[u][u], 0)
- GG=XG.to_undirected()
+ GG = XG.to_undirected()
# make sure we get lower weight
# to_undirected might choose either edge with weight 2 or weight 3
- GG['u']['x']['weight']=2
+ GG['u']['x']['weight'] = 2
path, dist = nx.floyd_warshall_predecessor_and_distance(GG)
for u in GG:
@@ -105,13 +105,13 @@ class TestFloyd:
def test_zero_weight(self):
G = nx.DiGraph()
- edges = [(1,2,-2), (2,3,-4), (1,5,1), (5,4,0), (4,3,-5), (2,5,-7)]
+ edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)]
G.add_weighted_edges_from(edges)
dist = nx.floyd_warshall(G)
assert_equal(dist[1][3], -14)
G = nx.MultiDiGraph()
- edges.append( (2,5,-7) )
+ edges.append((2, 5, -7))
G.add_weighted_edges_from(edges)
dist = nx.floyd_warshall(G)
assert_equal(dist[1][3], -14)
diff --git a/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py b/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py
index 9628b7eb..96aefbcf 100644
--- a/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py
+++ b/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py
@@ -3,8 +3,10 @@ from nose.tools import *
from nose import SkipTest
import networkx as nx
+
class TestFloydNumpy(object):
- numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+ numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
+
@classmethod
def setupClass(cls):
global numpy
@@ -12,56 +14,55 @@ class TestFloydNumpy(object):
global assert_almost_equal
try:
import numpy
- from numpy.testing import assert_equal,assert_almost_equal
+ from numpy.testing import assert_equal, assert_almost_equal
except ImportError:
- raise SkipTest('NumPy not available.')
+ raise SkipTest('NumPy not available.')
def test_cycle_numpy(self):
dist = nx.floyd_warshall_numpy(nx.cycle_graph(7))
- assert_equal(dist[0,3],3)
- assert_equal(dist[0,4],3)
+ assert_equal(dist[0, 3], 3)
+ assert_equal(dist[0, 4], 3)
def test_weighted_numpy(self):
- XG3=nx.Graph()
- XG3.add_weighted_edges_from([ [0,1,2],[1,2,12],[2,3,1],
- [3,4,5],[4,5,1],[5,0,10] ])
+ XG3 = nx.Graph()
+ XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], [2, 3, 1],
+ [3, 4, 5], [4, 5, 1], [5, 0, 10]])
dist = nx.floyd_warshall_numpy(XG3)
- assert_equal(dist[0,3],15)
+ assert_equal(dist[0, 3], 15)
def test_weighted_numpy(self):
- XG4=nx.Graph()
- XG4.add_weighted_edges_from([ [0,1,2],[1,2,2],[2,3,1],
- [3,4,1],[4,5,1],[5,6,1],
- [6,7,1],[7,0,1] ])
+ XG4 = nx.Graph()
+ XG4.add_weighted_edges_from([[0, 1, 2], [1, 2, 2], [2, 3, 1],
+ [3, 4, 1], [4, 5, 1], [5, 6, 1],
+ [6, 7, 1], [7, 0, 1]])
dist = nx.floyd_warshall_numpy(XG4)
- assert_equal(dist[0,2],4)
+ assert_equal(dist[0, 2], 4)
def test_weight_parameter_numpy(self):
XG4 = nx.Graph()
- XG4.add_edges_from([ (0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}),
- (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}),
- (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}),
- (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1}) ])
+ XG4.add_edges_from([(0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}),
+ (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}),
+ (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}),
+ (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1})])
dist = nx.floyd_warshall_numpy(XG4, weight='heavy')
assert_equal(dist[0, 2], 4)
def test_directed_cycle_numpy(self):
G = nx.DiGraph()
nx.add_cycle(G, [0, 1, 2, 3])
- pred,dist = nx.floyd_warshall_predecessor_and_distance(G)
+ pred, dist = nx.floyd_warshall_predecessor_and_distance(G)
D = nx.utils.dict_to_numpy_array(dist)
- assert_equal(nx.floyd_warshall_numpy(G),D)
+ assert_equal(nx.floyd_warshall_numpy(G), D)
def test_zero_weight(self):
G = nx.DiGraph()
- edges = [(1,2,-2), (2,3,-4), (1,5,1), (5,4,0), (4,3,-5), (2,5,-7)]
+ edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)]
G.add_weighted_edges_from(edges)
dist = nx.floyd_warshall_numpy(G)
assert_equal(int(numpy.min(dist)), -14)
G = nx.MultiDiGraph()
- edges.append( (2,5,-7) )
+ edges.append((2, 5, -7))
G.add_weighted_edges_from(edges)
dist = nx.floyd_warshall_numpy(G)
assert_equal(int(numpy.min(dist)), -14)
-
diff --git a/networkx/algorithms/shortest_paths/tests/test_generic.py b/networkx/algorithms/shortest_paths/tests/test_generic.py
index 1d992412..e8f36126 100644
--- a/networkx/algorithms/shortest_paths/tests/test_generic.py
+++ b/networkx/algorithms/shortest_paths/tests/test_generic.py
@@ -25,25 +25,25 @@ def validate_grid_path(r, c, s, t, p):
for u, v in zip(p[:-1], p[1:]):
ok_((abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)])
+
class TestGenericPath:
def setUp(self):
from networkx import convert_node_labels_to_integers as cnlti
- self.grid=cnlti(nx.grid_2d_graph(4,4),first_label=1,ordering="sorted")
- self.cycle=nx.cycle_graph(7)
- self.directed_cycle=nx.cycle_graph(7,create_using=nx.DiGraph())
-
+ self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")
+ self.cycle = nx.cycle_graph(7)
+ self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
def test_shortest_path(self):
- assert_equal(nx.shortest_path(self.cycle,0,3),[0, 1, 2, 3])
- assert_equal(nx.shortest_path(self.cycle,0,4),[0, 6, 5, 4])
- validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid,1,12))
- assert_equal(nx.shortest_path(self.directed_cycle,0,3),[0, 1, 2, 3])
+ assert_equal(nx.shortest_path(self.cycle, 0, 3), [0, 1, 2, 3])
+ assert_equal(nx.shortest_path(self.cycle, 0, 4), [0, 6, 5, 4])
+ validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12))
+ assert_equal(nx.shortest_path(self.directed_cycle, 0, 3), [0, 1, 2, 3])
# now with weights
- assert_equal(nx.shortest_path(self.cycle,0,3,weight='weight'),[0, 1, 2, 3])
- assert_equal(nx.shortest_path(self.cycle,0,4,weight='weight'),[0, 6, 5, 4])
- validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid,1,12,weight='weight'))
- assert_equal(nx.shortest_path(self.directed_cycle,0,3,weight='weight'),
+ assert_equal(nx.shortest_path(self.cycle, 0, 3, weight='weight'), [0, 1, 2, 3])
+ assert_equal(nx.shortest_path(self.cycle, 0, 4, weight='weight'), [0, 6, 5, 4])
+ validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12, weight='weight'))
+ assert_equal(nx.shortest_path(self.directed_cycle, 0, 3, weight='weight'),
[0, 1, 2, 3])
def test_shortest_path_target(self):
@@ -51,13 +51,13 @@ class TestGenericPath:
assert_equal(sp, {0: [0, 1], 1: [1], 2: [2, 1]})
def test_shortest_path_length(self):
- assert_equal(nx.shortest_path_length(self.cycle,0,3),3)
- assert_equal(nx.shortest_path_length(self.grid,1,12),5)
- assert_equal(nx.shortest_path_length(self.directed_cycle,0,4),4)
+ assert_equal(nx.shortest_path_length(self.cycle, 0, 3), 3)
+ assert_equal(nx.shortest_path_length(self.grid, 1, 12), 5)
+ assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4), 4)
# now with weights
- assert_equal(nx.shortest_path_length(self.cycle,0,3,weight='weight'),3)
- assert_equal(nx.shortest_path_length(self.grid,1,12,weight='weight'),5)
- assert_equal(nx.shortest_path_length(self.directed_cycle,0,4,weight='weight'),4)
+ assert_equal(nx.shortest_path_length(self.cycle, 0, 3, weight='weight'), 3)
+ assert_equal(nx.shortest_path_length(self.grid, 1, 12, weight='weight'), 5)
+ assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4, weight='weight'), 4)
def test_shortest_path_length_target(self):
sp = dict(nx.shortest_path_length(nx.path_graph(3), target=1))
@@ -66,25 +66,24 @@ class TestGenericPath:
assert_equal(sp[2], 1)
def test_single_source_shortest_path(self):
- p=nx.shortest_path(self.cycle,0)
- assert_equal(p[3],[0,1,2,3])
- assert_equal(p,nx.single_source_shortest_path(self.cycle,0))
- p=nx.shortest_path(self.grid,1)
+ p = nx.shortest_path(self.cycle, 0)
+ assert_equal(p[3], [0, 1, 2, 3])
+ assert_equal(p, nx.single_source_shortest_path(self.cycle, 0))
+ p = nx.shortest_path(self.grid, 1)
validate_grid_path(4, 4, 1, 12, p[12])
# now with weights
- p=nx.shortest_path(self.cycle,0,weight='weight')
- assert_equal(p[3],[0,1,2,3])
- assert_equal(p,nx.single_source_dijkstra_path(self.cycle,0))
- p=nx.shortest_path(self.grid,1,weight='weight')
+ p = nx.shortest_path(self.cycle, 0, weight='weight')
+ assert_equal(p[3], [0, 1, 2, 3])
+ assert_equal(p, nx.single_source_dijkstra_path(self.cycle, 0))
+ p = nx.shortest_path(self.grid, 1, weight='weight')
validate_grid_path(4, 4, 1, 12, p[12])
-
def test_single_source_shortest_path_length(self):
- l = dict(nx.shortest_path_length(self.cycle,0))
- assert_equal(l,{0:0,1:1,2:2,3:3,4:3,5:2,6:1})
- assert_equal(l, dict(nx.single_source_shortest_path_length(self.cycle,0)))
- l = dict(nx.shortest_path_length(self.grid,1))
- assert_equal(l[16],6)
+ l = dict(nx.shortest_path_length(self.cycle, 0))
+ assert_equal(l, {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1})
+ assert_equal(l, dict(nx.single_source_shortest_path_length(self.cycle, 0)))
+ l = dict(nx.shortest_path_length(self.grid, 1))
+ assert_equal(l[16], 6)
# now with weights
l = dict(nx.shortest_path_length(self.cycle, 0, weight='weight'))
assert_equal(l, {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1})
@@ -93,27 +92,25 @@ class TestGenericPath:
l = dict(nx.shortest_path_length(self.grid, 1, weight='weight'))
assert_equal(l[16], 6)
-
def test_all_pairs_shortest_path(self):
- p=nx.shortest_path(self.cycle)
+ p = nx.shortest_path(self.cycle)
assert_equal(p[0][3], [0, 1, 2, 3])
assert_equal(p, dict(nx.all_pairs_shortest_path(self.cycle)))
- p=nx.shortest_path(self.grid)
+ p = nx.shortest_path(self.grid)
validate_grid_path(4, 4, 1, 12, p[1][12])
# now with weights
- p=nx.shortest_path(self.cycle,weight='weight')
- assert_equal(p[0][3],[0, 1, 2, 3])
+ p = nx.shortest_path(self.cycle, weight='weight')
+ assert_equal(p[0][3], [0, 1, 2, 3])
assert_equal(p, dict(nx.all_pairs_dijkstra_path(self.cycle)))
- p=nx.shortest_path(self.grid,weight='weight')
+ p = nx.shortest_path(self.grid, weight='weight')
validate_grid_path(4, 4, 1, 12, p[1][12])
-
def test_all_pairs_shortest_path_length(self):
- l=dict(nx.shortest_path_length(self.cycle))
- assert_equal(l[0],{0:0,1:1,2:2,3:3,4:3,5:2,6:1})
+ l = dict(nx.shortest_path_length(self.cycle))
+ assert_equal(l[0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1})
assert_equal(l, dict(nx.all_pairs_shortest_path_length(self.cycle)))
- l=dict(nx.shortest_path_length(self.grid))
- assert_equal(l[1][16],6)
+ l = dict(nx.shortest_path_length(self.grid))
+ assert_equal(l[1][16], 6)
# now with weights
l = dict(nx.shortest_path_length(self.cycle, weight='weight'))
assert_equal(l[0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1})
@@ -125,21 +122,21 @@ class TestGenericPath:
G = nx.Graph()
nx.add_path(G, range(3))
nx.add_path(G, range(3, 5))
- assert_true(nx.has_path(G,0,2))
- assert_false(nx.has_path(G,0,4))
+ assert_true(nx.has_path(G, 0, 2))
+ assert_false(nx.has_path(G, 0, 4))
def test_all_shortest_paths(self):
G = nx.Graph()
nx.add_path(G, [0, 1, 2, 3])
nx.add_path(G, [0, 10, 20, 3])
- assert_equal([[0,1,2,3],[0,10,20,3]],
- sorted(nx.all_shortest_paths(G,0,3)))
+ assert_equal([[0, 1, 2, 3], [0, 10, 20, 3]],
+ sorted(nx.all_shortest_paths(G, 0, 3)))
@raises(nx.NetworkXNoPath)
def test_all_shortest_paths_raise(self):
G = nx.path_graph(4)
G.add_node(4)
- paths = list(nx.all_shortest_paths(G,0,4))
+ paths = list(nx.all_shortest_paths(G, 0, 4))
class TestAverageShortestPathLength(object):
diff --git a/networkx/algorithms/shortest_paths/tests/test_unweighted.py b/networkx/algorithms/shortest_paths/tests/test_unweighted.py
index 0b876c21..bef300e8 100644
--- a/networkx/algorithms/shortest_paths/tests/test_unweighted.py
+++ b/networkx/algorithms/shortest_paths/tests/test_unweighted.py
@@ -2,6 +2,7 @@
from nose.tools import *
import networkx as nx
+
def validate_grid_path(r, c, s, t, p):
ok_(isinstance(p, list))
assert_equal(p[0], s)
@@ -16,33 +17,32 @@ def validate_grid_path(r, c, s, t, p):
for u, v in zip(p[:-1], p[1:]):
ok_((abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)])
+
class TestUnweightedPath:
def setUp(self):
from networkx import convert_node_labels_to_integers as cnlti
- self.grid=cnlti(nx.grid_2d_graph(4,4),first_label=1,ordering="sorted")
- self.cycle=nx.cycle_graph(7)
- self.directed_cycle=nx.cycle_graph(7,create_using=nx.DiGraph())
-
+ self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")
+ self.cycle = nx.cycle_graph(7)
+ self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
def test_bidirectional_shortest_path(self):
- assert_equal(nx.bidirectional_shortest_path(self.cycle,0,3),
+ assert_equal(nx.bidirectional_shortest_path(self.cycle, 0, 3),
[0, 1, 2, 3])
- assert_equal(nx.bidirectional_shortest_path(self.cycle,0,4),
+ assert_equal(nx.bidirectional_shortest_path(self.cycle, 0, 4),
[0, 6, 5, 4])
- validate_grid_path(4, 4, 1, 12, nx.bidirectional_shortest_path(self.grid,1,12))
- assert_equal(nx.bidirectional_shortest_path(self.directed_cycle,0,3),
+ validate_grid_path(4, 4, 1, 12, nx.bidirectional_shortest_path(self.grid, 1, 12))
+ assert_equal(nx.bidirectional_shortest_path(self.directed_cycle, 0, 3),
[0, 1, 2, 3])
def test_shortest_path_length(self):
- assert_equal(nx.shortest_path_length(self.cycle,0,3),3)
- assert_equal(nx.shortest_path_length(self.grid,1,12),5)
- assert_equal(nx.shortest_path_length(self.directed_cycle,0,4),4)
+ assert_equal(nx.shortest_path_length(self.cycle, 0, 3), 3)
+ assert_equal(nx.shortest_path_length(self.grid, 1, 12), 5)
+ assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4), 4)
# now with weights
- assert_equal(nx.shortest_path_length(self.cycle,0,3,weight=True),3)
- assert_equal(nx.shortest_path_length(self.grid,1,12,weight=True),5)
- assert_equal(nx.shortest_path_length(self.directed_cycle,0,4,weight=True),4)
-
+ assert_equal(nx.shortest_path_length(self.cycle, 0, 3, weight=True), 3)
+ assert_equal(nx.shortest_path_length(self.grid, 1, 12, weight=True), 5)
+ assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4, weight=True), 4)
def test_single_source_shortest_path(self):
p = nx.single_source_shortest_path(self.directed_cycle, 3)
@@ -50,14 +50,14 @@ class TestUnweightedPath:
p = nx.single_source_shortest_path(self.cycle, 0)
assert_equal(p[3], [0, 1, 2, 3])
p = nx.single_source_shortest_path(self.cycle, 0, cutoff=0)
- assert_equal(p,{0 : [0]})
+ assert_equal(p, {0: [0]})
def test_single_source_shortest_path_length(self):
pl = nx.single_source_shortest_path_length
lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
- assert_equal(dict(pl(self.cycle,0)), lengths)
+ assert_equal(dict(pl(self.cycle, 0)), lengths)
lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6}
- assert_equal(dict(pl(self.directed_cycle,0)), lengths)
+ assert_equal(dict(pl(self.directed_cycle, 0)), lengths)
def test_single_target_shortest_path(self):
p = nx.single_target_shortest_path(self.directed_cycle, 0)
@@ -65,7 +65,7 @@ class TestUnweightedPath:
p = nx.single_target_shortest_path(self.cycle, 0)
assert_equal(p[3], [3, 2, 1, 0])
p = nx.single_target_shortest_path(self.cycle, 0, cutoff=0)
- assert_equal(p,{0 : [0]})
+ assert_equal(p, {0: [0]})
def test_single_target_shortest_path_length(self):
pl = nx.single_target_shortest_path_length
@@ -75,44 +75,44 @@ class TestUnweightedPath:
assert_equal(dict(pl(self.directed_cycle, 0)), lengths)
def test_all_pairs_shortest_path(self):
- p=dict(nx.all_pairs_shortest_path(self.cycle))
- assert_equal(p[0][3],[0,1,2,3])
- p=dict(nx.all_pairs_shortest_path(self.grid))
+ p = dict(nx.all_pairs_shortest_path(self.cycle))
+ assert_equal(p[0][3], [0, 1, 2, 3])
+ p = dict(nx.all_pairs_shortest_path(self.grid))
validate_grid_path(4, 4, 1, 12, p[1][12])
def test_all_pairs_shortest_path_length(self):
l = dict(nx.all_pairs_shortest_path_length(self.cycle))
- assert_equal(l[0],{0:0,1:1,2:2,3:3,4:3,5:2,6:1})
+ assert_equal(l[0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1})
l = dict(nx.all_pairs_shortest_path_length(self.grid))
- assert_equal(l[1][16],6)
+ assert_equal(l[1][16], 6)
def test_predecessor_path(self):
G = nx.path_graph(4)
- assert_equal(nx.predecessor(G,0),{0: [], 1: [0], 2: [1], 3: [2]})
- assert_equal(nx.predecessor(G,0,3),[2])
+ assert_equal(nx.predecessor(G, 0), {0: [], 1: [0], 2: [1], 3: [2]})
+ assert_equal(nx.predecessor(G, 0, 3), [2])
def test_predecessor_cycle(self):
G = nx.cycle_graph(4)
- pred = nx.predecessor(G,0)
- assert_equal(pred[0],[])
- assert_equal(pred[1],[0])
- assert_true(pred[2] in [[1,3],[3,1]])
- assert_equal(pred[3],[0])
+ pred = nx.predecessor(G, 0)
+ assert_equal(pred[0], [])
+ assert_equal(pred[1], [0])
+ assert_true(pred[2] in [[1, 3], [3, 1]])
+ assert_equal(pred[3], [0])
def test_predecessor_cutoff(self):
- G=nx.path_graph(4)
- p = nx.predecessor(G,0,3)
+ G = nx.path_graph(4)
+ p = nx.predecessor(G, 0, 3)
assert_false(4 in p)
def test_predecessor_target(self):
- G=nx.path_graph(4)
- p = nx.predecessor(G,0,3)
- assert_equal(p,[2])
- p = nx.predecessor(G,0,3,cutoff=2)
- assert_equal(p,[])
- p,s = nx.predecessor(G,0,3,return_seen=True)
- assert_equal(p,[2])
- assert_equal(s,3)
- p,s = nx.predecessor(G,0,3,cutoff=2,return_seen=True)
- assert_equal(p,[])
- assert_equal(s,-1)
+ G = nx.path_graph(4)
+ p = nx.predecessor(G, 0, 3)
+ assert_equal(p, [2])
+ p = nx.predecessor(G, 0, 3, cutoff=2)
+ assert_equal(p, [])
+ p, s = nx.predecessor(G, 0, 3, return_seen=True)
+ assert_equal(p, [2])
+ assert_equal(s, 3)
+ p, s = nx.predecessor(G, 0, 3, cutoff=2, return_seen=True)
+ assert_equal(p, [])
+ assert_equal(s, -1)
diff --git a/networkx/algorithms/shortest_paths/tests/test_weighted.py b/networkx/algorithms/shortest_paths/tests/test_weighted.py
index 0ba63cc5..b4f732c2 100644
--- a/networkx/algorithms/shortest_paths/tests/test_weighted.py
+++ b/networkx/algorithms/shortest_paths/tests/test_weighted.py
@@ -155,14 +155,15 @@ class TestWeightedPath(WeightedTestBase):
G = nx.path_graph(4)
assert_equal(nx.dijkstra_predecessor_and_distance(G, 0),
({0: [], 1: [0], 2: [1], 3: [2]}, {0: 0, 1: 1, 2: 2, 3: 3}))
+
def test_dijkstra_predecessor2(self):
# 4-cycle
- G = nx.Graph([(0,1),(1,2),(2,3),(3,0)])
+ G = nx.Graph([(0, 1), (1, 2), (2, 3), (3, 0)])
pred, dist = nx.dijkstra_predecessor_and_distance(G, (0))
- assert_equal(pred[0],[])
- assert_equal(pred[1],[0])
- assert_true(pred[2] in [[1,3],[3,1]])
- assert_equal(pred[3],[0])
+ assert_equal(pred[0], [])
+ assert_equal(pred[1], [0])
+ assert_true(pred[2] in [[1, 3], [3, 1]])
+ assert_equal(pred[3], [0])
assert_equal(dist, {0: 0, 1: 1, 2: 2, 3: 1})
def test_dijkstra_predecessor3(self):
@@ -227,7 +228,8 @@ class TestWeightedPath(WeightedTestBase):
# The weight function will take the multiplicative inverse of
# the weights on the edges. This way, weights that were large
# before now become small and vice versa.
- weight = lambda u, v, d: 1 / d['weight']
+
+ def weight(u, v, d): return 1 / d['weight']
# The shortest path from 0 to 2 using the actual weights on the
# edges should be [0, 1, 2].
distance, path = nx.single_source_dijkstra(G, 0, 2)
@@ -240,16 +242,16 @@ class TestWeightedPath(WeightedTestBase):
assert_equal(path, [0, 2])
def test_all_pairs_dijkstra_path(self):
- cycle=nx.cycle_graph(7)
- p=dict(nx.all_pairs_dijkstra_path(cycle))
+ cycle = nx.cycle_graph(7)
+ p = dict(nx.all_pairs_dijkstra_path(cycle))
assert_equal(p[0][3], [0, 1, 2, 3])
cycle[1][2]['weight'] = 10
- p=dict(nx.all_pairs_dijkstra_path(cycle))
+ p = dict(nx.all_pairs_dijkstra_path(cycle))
assert_equal(p[0][3], [0, 6, 5, 4, 3])
def test_all_pairs_dijkstra_path_length(self):
- cycle=nx.cycle_graph(7)
+ cycle = nx.cycle_graph(7)
pl = dict(nx.all_pairs_dijkstra_path_length(cycle))
assert_equal(pl[0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1})
@@ -258,7 +260,7 @@ class TestWeightedPath(WeightedTestBase):
assert_equal(pl[0], {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1})
def test_all_pairs_dijkstra(self):
- cycle=nx.cycle_graph(7)
+ cycle = nx.cycle_graph(7)
out = dict(nx.all_pairs_dijkstra(cycle))
assert_equal(out[0][0], {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1})
assert_equal(out[0][1][3], [0, 1, 2, 3])
@@ -289,7 +291,8 @@ class TestDijkstraPathLength(object):
# The weight function will take the multiplicative inverse of
# the weights on the edges. This way, weights that were large
# before now become small and vice versa.
- weight = lambda u, v, d: 1 / d['weight']
+
+ def weight(u, v, d): return 1 / d['weight']
# The shortest path from 0 to 2 using the actual weights on the
# edges should be [0, 1, 2]. However, with the above weight
# function, the shortest path should be [0, 2], since that has a
@@ -332,7 +335,7 @@ class TestMultiSourceDijkstra(object):
lengths = nx.multi_source_dijkstra_path_length(G, [0])
assert_equal(lengths, {n: n for n in G})
paths = nx.multi_source_dijkstra_path(G, [0])
- assert_equal(paths, {n: list(range(n+1)) for n in G})
+ assert_equal(paths, {n: list(range(n + 1)) for n in G})
class TestBellmanFordAndGoldbergRadzik(WeightedTestBase):
@@ -494,30 +497,29 @@ class TestBellmanFordAndGoldbergRadzik(WeightedTestBase):
def test_4_cycle(self):
# 4-cycle
- G = nx.Graph([(0,1),(1,2),(2,3),(3,0)])
+ G = nx.Graph([(0, 1), (1, 2), (2, 3), (3, 0)])
dist, path = nx.single_source_bellman_ford(G, 0)
assert_equal(dist, {0: 0, 1: 1, 2: 2, 3: 1})
- assert_equal(path[0],[0])
- assert_equal(path[1],[0,1])
- assert_true(path[2] in [[0,1,2],[0,3,2]])
- assert_equal(path[3],[0,3])
+ assert_equal(path[0], [0])
+ assert_equal(path[1], [0, 1])
+ assert_true(path[2] in [[0, 1, 2], [0, 3, 2]])
+ assert_equal(path[3], [0, 3])
pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0)
- assert_equal(pred[0],[None])
- assert_equal(pred[1],[0])
- assert_true(pred[2] in [[1,3],[3,1]])
- assert_equal(pred[3],[0])
+ assert_equal(pred[0], [None])
+ assert_equal(pred[1], [0])
+ assert_true(pred[2] in [[1, 3], [3, 1]])
+ assert_equal(pred[3], [0])
assert_equal(dist, {0: 0, 1: 1, 2: 2, 3: 1})
pred, dist = nx.goldberg_radzik(G, 0)
- assert_equal(pred[0],None)
- assert_equal(pred[1],0)
- assert_true(pred[2] in [1,3])
- assert_equal(pred[3],0)
+ assert_equal(pred[0], None)
+ assert_equal(pred[1], 0)
+ assert_true(pred[2] in [1, 3])
+ assert_equal(pred[3], 0)
assert_equal(dist, {0: 0, 1: 1, 2: 2, 3: 1})
-
class TestJohnsonAlgorithm(WeightedTestBase):
@raises(nx.NetworkXError)
@@ -529,26 +531,25 @@ class TestJohnsonAlgorithm(WeightedTestBase):
def test_negative_cycle(self):
G = nx.DiGraph()
G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), ('1', '0', -5),
- ('0', '2', 2), ('1', '2', 4),
- ('2', '3', 1)])
+ ('0', '2', 2), ('1', '2', 4),
+ ('2', '3', 1)])
assert_raises(nx.NetworkXUnbounded, nx.johnson, G)
G = nx.Graph()
G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), ('1', '0', -5),
- ('0', '2', 2), ('1', '2', 4),
- ('2', '3', 1)])
+ ('0', '2', 2), ('1', '2', 4),
+ ('2', '3', 1)])
assert_raises(nx.NetworkXUnbounded, nx.johnson, G)
-
def test_negative_weights(self):
G = nx.DiGraph()
G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5),
- ('0', '2', 2), ('1', '2', 4),
- ('2', '3', 1)])
+ ('0', '2', 2), ('1', '2', 4),
+ ('2', '3', 1)])
paths = nx.johnson(G)
assert_equal(paths, {'1': {'1': ['1'], '3': ['1', '2', '3'],
- '2': ['1', '2']}, '0': {'1': ['0', '1'],
- '0': ['0'], '3': ['0', '1', '2', '3'],
- '2': ['0', '1', '2']}, '3': {'3': ['3']},
+ '2': ['1', '2']}, '0': {'1': ['0', '1'],
+ '0': ['0'], '3': ['0', '1', '2', '3'],
+ '2': ['0', '1', '2']}, '3': {'3': ['3']},
'2': {'3': ['2', '3'], '2': ['2']}})
@raises(nx.NetworkXError)
@@ -563,4 +564,3 @@ class TestJohnsonAlgorithm(WeightedTestBase):
validate_path(self.XG3, 0, 3, 15, nx.johnson(self.XG3)[0][3])
validate_path(self.XG4, 0, 2, 4, nx.johnson(self.XG4)[0][2])
validate_path(self.MXG4, 0, 2, 4, nx.johnson(self.MXG4)[0][2])
-
diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py
index a1fd1a72..77141d97 100644
--- a/networkx/algorithms/similarity.py
+++ b/networkx/algorithms/similarity.py
@@ -14,9 +14,11 @@ __all__ = [
'optimize_edit_paths'
]
+
def debug_print(*args, **kwargs):
print(*args, **kwargs)
+
def graph_edit_distance(G1, G2, node_match=None, edge_match=None,
node_subst_cost=None, node_del_cost=None, node_ins_cost=None,
edge_subst_cost=None, edge_del_cost=None, edge_ins_cost=None,
@@ -589,13 +591,13 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None,
#assert(C.shape == (m + n, m + n))
row_ind = [k in i or k - m in j for k in range(m + n)]
col_ind = [k in j or k - n in i for k in range(m + n)]
- return C[row_ind,:][:,col_ind]
+ return C[row_ind, :][:, col_ind]
def reduce_C(C, i, j, m, n):
#assert(C.shape == (m + n, m + n))
row_ind = [k not in i and k - m not in j for k in range(m + n)]
col_ind = [k not in j and k - n not in i for k in range(m + n)]
- return C[row_ind,:][:,col_ind]
+ return C[row_ind, :][:, col_ind]
def reduce_ind(ind, i):
#assert set(ind) == set(range(len(ind)))
@@ -639,7 +641,7 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None,
#assert C.shape == (m + n, m + n)
# Forbid structurally invalid matches
- inf = min(min(Ce.C.sum(axis = 0)), min(Ce.C.sum(axis = 1))) + 1
+ inf = min(min(Ce.C.sum(axis=0)), min(Ce.C.sum(axis=1))) + 1
for k, i in zip(range(m), g_ind):
g = pending_g[i]
for l, j in zip(range(n), h_ind):
@@ -744,7 +746,7 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None,
other.append(((i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls))
# yield from
- for t in sorted(other, key = lambda t: t[4] + t[1].ls + t[3].ls):
+ for t in sorted(other, key=lambda t: t[4] + t[1].ls + t[3].ls):
yield t
def get_edit_paths(matched_uv, pending_u, pending_v, Cv,
@@ -779,15 +781,15 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None,
#debug_print('matched-cost:', matched_cost)
#debug_print('pending-u:', pending_u)
#debug_print('pending-v:', pending_v)
- #debug_print(Cv.C)
+ # debug_print(Cv.C)
#assert list(sorted(G1.nodes)) == list(sorted(list(u for u, v in matched_uv if u is not None) + pending_u))
#assert list(sorted(G2.nodes)) == list(sorted(list(v for u, v in matched_uv if v is not None) + pending_v))
#debug_print('pending-g:', pending_g)
#debug_print('pending-h:', pending_h)
- #debug_print(Ce.C)
+ # debug_print(Ce.C)
#assert list(sorted(G1.edges)) == list(sorted(list(g for g, h in matched_gh if g is not None) + pending_g))
#assert list(sorted(G2.edges)) == list(sorted(list(h for g, h in matched_gh if h is not None) + pending_h))
- #debug_print()
+ # debug_print()
if prune(matched_cost + Cv.ls + Ce.ls):
return
@@ -802,7 +804,7 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None,
else:
edit_ops = get_edit_ops(matched_uv, pending_u, pending_v, Cv,
- pending_g, pending_h, Ce, matched_cost)
+ pending_g, pending_h, Ce, matched_cost)
for ij, Cv_ij, xy, Ce_xy, edit_cost in edit_ops:
i, j = ij
#assert Cv.C[i, j] + sum(Ce.C[t] for t in xy) == edit_cost
@@ -844,7 +846,6 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None,
for t in xy:
matched_gh.pop()
-
# Initialization
pending_u = list(G1.nodes)
@@ -867,21 +868,21 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None,
if node_del_cost:
del_costs = [node_del_cost(G1.nodes[u]) for u in pending_u]
else:
- del_costs = [1]*len(pending_u)
+ del_costs = [1] * len(pending_u)
#assert not m or min(del_costs) >= 0
if node_ins_cost:
ins_costs = [node_ins_cost(G2.nodes[v]) for v in pending_v]
else:
- ins_costs = [1]*len(pending_v)
+ ins_costs = [1] * len(pending_v)
#assert not n or min(ins_costs) >= 0
inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1
- C[0:m, n:n+m] = np.array([del_costs[i] if i == j else inf
- for i in range(m) for j in range(m)]).reshape(m, m)
- C[m:m+n, 0:n] = np.array([ins_costs[i] if i == j else inf
- for i in range(n) for j in range(n)]).reshape(n, n)
+ C[0:m, n:n + m] = np.array([del_costs[i] if i == j else inf
+ for i in range(m) for j in range(m)]).reshape(m, m)
+ C[m:m + n, 0:n] = np.array([ins_costs[i] if i == j else inf
+ for i in range(n) for j in range(n)]).reshape(n, n)
Cv = make_CostMatrix(C, m, n)
#debug_print('Cv: {} x {}'.format(m, n))
- #debug_print(Cv.C)
+ # debug_print(Cv.C)
pending_g = list(G1.edges)
pending_h = list(G2.edges)
@@ -903,22 +904,22 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None,
if edge_del_cost:
del_costs = [edge_del_cost(G1.edges[g]) for g in pending_g]
else:
- del_costs = [1]*len(pending_g)
+ del_costs = [1] * len(pending_g)
#assert not m or min(del_costs) >= 0
if edge_ins_cost:
ins_costs = [edge_ins_cost(G2.edges[h]) for h in pending_h]
else:
- ins_costs = [1]*len(pending_h)
+ ins_costs = [1] * len(pending_h)
#assert not n or min(ins_costs) >= 0
inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1
- C[0:m, n:n+m] = np.array([del_costs[i] if i == j else inf
- for i in range(m) for j in range(m)]).reshape(m, m)
- C[m:m+n, 0:n] = np.array([ins_costs[i] if i == j else inf
- for i in range(n) for j in range(n)]).reshape(n, n)
+ C[0:m, n:n + m] = np.array([del_costs[i] if i == j else inf
+ for i in range(m) for j in range(m)]).reshape(m, m)
+ C[m:m + n, 0:n] = np.array([ins_costs[i] if i == j else inf
+ for i in range(n) for j in range(n)]).reshape(n, n)
Ce = make_CostMatrix(C, m, n)
#debug_print('Ce: {} x {}'.format(m, n))
- #debug_print(Ce.C)
- #debug_print()
+ # debug_print(Ce.C)
+ # debug_print()
class MaxCost:
def __init__(self):
@@ -936,7 +937,6 @@ def optimize_edit_paths(G1, G2, node_match=None, edge_match=None,
elif strictly_decreasing and cost >= maxcost.value:
return True
-
# Now go!
for vertex_path, edge_path, cost in \
diff --git a/networkx/algorithms/tests/test_cycles.py b/networkx/algorithms/tests/test_cycles.py
index b914d489..dabe0eb9 100644
--- a/networkx/algorithms/tests/test_cycles.py
+++ b/networkx/algorithms/tests/test_cycles.py
@@ -24,7 +24,7 @@ class TestCycles:
if len(b) != n:
return False
l = a + a
- return any(l[i:i+n] == b for i in range(2 * n - n + 1))
+ return any(l[i:i + n] == b for i in range(2 * n - n + 1))
def test_cycle_basis(self):
G = self.G
@@ -42,7 +42,7 @@ class TestCycles:
cy = networkx.cycle_basis(G, 9)
sort_cy = sorted(sorted(c) for c in cy[:-1]) + [sorted(cy[-1])]
assert_equal(sort_cy, [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5],
- ['A', 'B', 'C']])
+ ['A', 'B', 'C']])
@raises(nx.NetworkXNotImplemented)
def test_cycle_basis(self):
@@ -100,18 +100,18 @@ class TestCycles:
# see figure 1 in Johnson's paper
# this graph has excactly 3k simple cycles
G = nx.DiGraph()
- for n in range(2, k+2):
+ for n in range(2, k + 2):
G.add_edge(1, n)
- G.add_edge(n, k+2)
- G.add_edge(2*k+1, 1)
- for n in range(k+2, 2*k+2):
- G.add_edge(n, 2*k+2)
- G.add_edge(n, n+1)
- G.add_edge(2*k+3, k+2)
- for n in range(2*k+3, 3*k+3):
- G.add_edge(2*k+2, n)
- G.add_edge(n, 3*k+3)
- G.add_edge(3*k+3, 2*k+2)
+ G.add_edge(n, k + 2)
+ G.add_edge(2 * k + 1, 1)
+ for n in range(k + 2, 2 * k + 2):
+ G.add_edge(n, 2 * k + 2)
+ G.add_edge(n, n + 1)
+ G.add_edge(2 * k + 3, k + 2)
+ for n in range(2 * k + 3, 3 * k + 3):
+ G.add_edge(2 * k + 2, n)
+ G.add_edge(n, 3 * k + 3)
+ G.add_edge(3 * k + 3, 2 * k + 2)
return G
def test_worst_case_graph(self):
@@ -119,7 +119,7 @@ class TestCycles:
for k in range(3, 10):
G = self.worst_case_graph(k)
l = len(list(nx.simple_cycles(G)))
- assert_equal(l, 3*k)
+ assert_equal(l, 3 * k)
def test_recursive_simple_and_not(self):
for k in range(2, 10):
diff --git a/networkx/algorithms/tests/test_dag.py b/networkx/algorithms/tests/test_dag.py
index f247bee6..d184d755 100644
--- a/networkx/algorithms/tests/test_dag.py
+++ b/networkx/algorithms/tests/test_dag.py
@@ -17,6 +17,7 @@ from networkx.utils import pairwise
class TestDagLongestPath(object):
"""Unit tests computing the longest path in a directed acyclic graph."""
+
def test_empty(self):
G = nx.DiGraph()
assert_equal(nx.dag_longest_path(G), [])
@@ -251,7 +252,7 @@ class TestDAG:
assert_equal(G.get_edge_data(u, v), H.get_edge_data(u, v))
k = 10
- G = nx.DiGraph((i, i+1, {"foo": "bar", "weight": i}) for i in range(k))
+ G = nx.DiGraph((i, i + 1, {"foo": "bar", "weight": i}) for i in range(k))
H = transitive_closure(G)
for u, v in G.edges():
assert_equal(G.get_edge_data(u, v), H.get_edge_data(u, v))
diff --git a/networkx/algorithms/tests/test_link_prediction.py b/networkx/algorithms/tests/test_link_prediction.py
index 63239dcb..e725738e 100644
--- a/networkx/algorithms/tests/test_link_prediction.py
+++ b/networkx/algorithms/tests/test_link_prediction.py
@@ -161,7 +161,7 @@ class TestAdamicAdarIndex():
G = nx.Graph()
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
self.test(G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)),
- (1, 3, 0)])
+ (1, 3, 0)])
class TestPreferentialAttachment():
diff --git a/networkx/algorithms/tests/test_matching.py b/networkx/algorithms/tests/test_matching.py
index 0eb0f1c3..e7da3532 100644
--- a/networkx/algorithms/tests/test_matching.py
+++ b/networkx/algorithms/tests/test_matching.py
@@ -8,6 +8,7 @@ import networkx as nx
from networkx.algorithms.matching import matching_dict_to_set
from networkx.testing import assert_edges_equal
+
class TestMaxWeightMatching(object):
"""Unit tests for the
:func:`~networkx.algorithms.matching.max_weight_matching` function.
@@ -30,7 +31,7 @@ class TestMaxWeightMatching(object):
G = nx.Graph()
G.add_edge(0, 1)
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({0: 1, 1: 0}))
+ matching_dict_to_set({0: 1, 1: 0}))
def test_trivial4(self):
"""Small graph"""
@@ -38,7 +39,7 @@ class TestMaxWeightMatching(object):
G.add_edge('one', 'two', weight=10)
G.add_edge('two', 'three', weight=11)
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({'three': 'two', 'two': 'three'}))
+ matching_dict_to_set({'three': 'two', 'two': 'three'}))
def test_trivial5(self):
"""Path"""
@@ -47,9 +48,9 @@ class TestMaxWeightMatching(object):
G.add_edge(2, 3, weight=11)
G.add_edge(3, 4, weight=5)
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({2: 3, 3: 2}))
+ matching_dict_to_set({2: 3, 3: 2}))
assert_edges_equal(nx.max_weight_matching(G, 1),
- matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3}))
+ matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3}))
def test_trivial6(self):
"""Small graph with arbitrary weight attribute"""
@@ -57,7 +58,7 @@ class TestMaxWeightMatching(object):
G.add_edge('one', 'two', weight=10, abcd=11)
G.add_edge('two', 'three', weight=11, abcd=10)
assert_edges_equal(nx.max_weight_matching(G, weight='abcd'),
- matching_dict_to_set({'one': 'two', 'two': 'one'}))
+ matching_dict_to_set({'one': 'two', 'two': 'one'}))
def test_floating_point_weights(self):
"""Floating point weights"""
@@ -67,7 +68,7 @@ class TestMaxWeightMatching(object):
G.add_edge(1, 3, weight=3.0)
G.add_edge(1, 4, weight=math.sqrt(2.0))
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1}))
+ matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1}))
def test_negative_weights(self):
"""Negative weights"""
@@ -78,9 +79,9 @@ class TestMaxWeightMatching(object):
G.add_edge(2, 4, weight=-1)
G.add_edge(3, 4, weight=-6)
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 2, 2: 1}))
+ matching_dict_to_set({1: 2, 2: 1}))
assert_edges_equal(nx.max_weight_matching(G, 1),
- matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2}))
+ matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2}))
def test_s_blossom(self):
"""Create S-blossom and use it for augmentation:"""
@@ -88,11 +89,11 @@ class TestMaxWeightMatching(object):
G.add_weighted_edges_from([(1, 2, 8), (1, 3, 9),
(2, 3, 10), (3, 4, 7)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3}))
+ matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3}))
G.add_weighted_edges_from([(1, 6, 5), (4, 5, 6)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}))
+ matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}))
def test_s_t_blossom(self):
"""Create S-blossom, relabel as T-blossom, use for augmentation:"""
@@ -100,15 +101,15 @@ class TestMaxWeightMatching(object):
G.add_weighted_edges_from([(1, 2, 9), (1, 3, 8), (2, 3, 10),
(1, 4, 5), (4, 5, 4), (1, 6, 3)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}))
+ matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}))
G.add_edge(4, 5, weight=3)
G.add_edge(1, 6, weight=4)
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}))
+ matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}))
G.remove_edge(1, 6)
G.add_edge(3, 6, weight=4)
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 2, 2: 1, 3: 6, 4: 5, 5: 4, 6: 3}))
+ matching_dict_to_set({1: 2, 2: 1, 3: 6, 4: 5, 5: 4, 6: 3}))
def test_nested_s_blossom(self):
"""Create nested S-blossom, use for augmentation:"""
@@ -127,7 +128,7 @@ class TestMaxWeightMatching(object):
(3, 4, 20), (3, 5, 20), (4, 5, 25),
(5, 6, 10), (6, 7, 10), (7, 8, 8)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3, 5: 6, 6: 5, 7: 8, 8: 7}))
+ matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3, 5: 6, 6: 5, 7: 8, 8: 7}))
def test_nested_s_blossom_expand(self):
"""Create nested S-blossom, augment, expand recursively:"""
@@ -137,7 +138,7 @@ class TestMaxWeightMatching(object):
(4, 6, 12), (5, 7, 12), (6, 7, 14),
(7, 8, 12)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 6, 5: 3, 6: 4, 7: 8, 8: 7}))
+ matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 6, 5: 3, 6: 4, 7: 8, 8: 7}))
def test_s_blossom_relabel_expand(self):
"""Create S-blossom, relabel as T, expand:"""
@@ -146,7 +147,7 @@ class TestMaxWeightMatching(object):
(2, 3, 25), (3, 4, 22), (4, 5, 25),
(4, 8, 14), (5, 7, 13)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4}))
+ matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4}))
def test_nested_s_blossom_relabel_expand(self):
"""Create nested S-blossom, relabel as T, expand:"""
@@ -155,7 +156,7 @@ class TestMaxWeightMatching(object):
(2, 3, 25), (2, 4, 18), (3, 5, 18),
(4, 5, 13), (4, 7, 7), (5, 6, 7)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 7, 5: 6, 6: 5, 7: 4, 8: 1}))
+ matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 7, 5: 6, 6: 5, 7: 4, 8: 1}))
def test_nasty_blossom1(self):
"""Create blossom, relabel as T in more than one way, expand,
@@ -167,8 +168,8 @@ class TestMaxWeightMatching(object):
(3, 9, 35), (4, 8, 35), (5, 7, 26),
(9, 10, 5)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7,
- 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}))
+ matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7,
+ 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}))
def test_nasty_blossom2(self):
"""Again but slightly different:"""
@@ -178,8 +179,8 @@ class TestMaxWeightMatching(object):
(3, 9, 35), (4, 8, 26), (5, 7, 40),
(9, 10, 5)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7,
- 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}))
+ matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7,
+ 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}))
def test_nasty_blossom_least_slack(self):
"""Create blossom, relabel as T, expand such that a new
@@ -191,8 +192,8 @@ class TestMaxWeightMatching(object):
(3, 9, 35), (4, 8, 28), (5, 7, 26),
(9, 10, 5)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7,
- 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}))
+ matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7,
+ 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}))
def test_nasty_blossom_augmenting(self):
"""Create nested blossom, relabel as T in more than one way"""
@@ -205,8 +206,8 @@ class TestMaxWeightMatching(object):
(3, 11, 35), (5, 9, 36), (7, 10, 26),
(11, 12, 5)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 6, 5: 9, 6: 4,
- 7: 10, 8: 1, 9: 5, 10: 7, 11: 12, 12: 11}))
+ matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 6, 5: 9, 6: 4,
+ 7: 10, 8: 1, 9: 5, 10: 7, 11: 12, 12: 11}))
def test_nasty_blossom_expand_recursively(self):
"""Create nested S-blossom, relabel as S, expand recursively:"""
@@ -216,8 +217,8 @@ class TestMaxWeightMatching(object):
(1, 8, 15), (5, 7, 30), (7, 6, 10),
(8, 10, 10), (4, 9, 30)])
assert_edges_equal(nx.max_weight_matching(G),
- matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 9, 5: 3,
- 6: 7, 7: 6, 8: 10, 9: 4, 10: 8}))
+ matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 9, 5: 3,
+ 6: 7, 7: 6, 8: 10, 9: 4, 10: 8}))
class TestIsMatching(object):
diff --git a/networkx/algorithms/tests/test_mis.py b/networkx/algorithms/tests/test_mis.py
index 51c3aed8..d787899c 100644
--- a/networkx/algorithms/tests/test_mis.py
+++ b/networkx/algorithms/tests/test_mis.py
@@ -84,7 +84,7 @@ class TestMaximalIndependantSet(object):
"""Generate 50 random graphs of different types and sizes and
make sure that all sets are independent and maximal."""
for i in range(0, 50, 10):
- G = nx.random_graphs.erdos_renyi_graph(i*10+1, random.random())
+ G = nx.random_graphs.erdos_renyi_graph(i * 10 + 1, random.random())
IS = nx.maximal_independent_set(G)
assert_false(list(G.subgraph(IS).edges()))
neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS))
diff --git a/networkx/algorithms/tests/test_similarity.py b/networkx/algorithms/tests/test_similarity.py
index 8d31e794..17f25d6f 100644
--- a/networkx/algorithms/tests/test_similarity.py
+++ b/networkx/algorithms/tests/test_similarity.py
@@ -5,6 +5,7 @@ import networkx as nx
from networkx.algorithms.similarity import *
from networkx.generators.classic import *
+
class TestSimilarity:
@classmethod
@@ -54,7 +55,7 @@ class TestSimilarity:
for n, attr in G2.nodes.items():
attr['color'] = 'red' if n % 2 == 1 else 'blue'
assert_equal(graph_edit_distance(G1, G2), 0)
- assert_equal(graph_edit_distance(G1, G2, node_match = lambda n1, n2: n1['color'] == n2['color']), 1)
+ assert_equal(graph_edit_distance(G1, G2, node_match=lambda n1, n2: n1['color'] == n2['color']), 1)
def test_graph_edit_distance_edge_match(self):
G1 = path_graph(6)
@@ -64,7 +65,7 @@ class TestSimilarity:
for e, attr in G2.edges.items():
attr['color'] = 'red' if min(e) // 3 == 0 else 'blue'
assert_equal(graph_edit_distance(G1, G2), 0)
- assert_equal(graph_edit_distance(G1, G2, edge_match = lambda e1, e2: e1['color'] == e2['color']), 2)
+ assert_equal(graph_edit_distance(G1, G2, edge_match=lambda e1, e2: e1['color'] == e2['color']), 2)
def test_graph_edit_distance_node_cost(self):
G1 = path_graph(6)
@@ -93,9 +94,9 @@ class TestSimilarity:
return 100
assert_equal(graph_edit_distance(G1, G2,
- node_subst_cost = node_subst_cost,
- node_del_cost = node_del_cost,
- node_ins_cost = node_ins_cost), 6)
+ node_subst_cost=node_subst_cost,
+ node_del_cost=node_del_cost,
+ node_ins_cost=node_ins_cost), 6)
def test_graph_edit_distance_edge_cost(self):
G1 = path_graph(6)
@@ -124,15 +125,15 @@ class TestSimilarity:
return 1.0
assert_equal(graph_edit_distance(G1, G2,
- edge_subst_cost = edge_subst_cost,
- edge_del_cost = edge_del_cost,
- edge_ins_cost = edge_ins_cost), 0.23)
+ edge_subst_cost=edge_subst_cost,
+ edge_del_cost=edge_del_cost,
+ edge_ins_cost=edge_ins_cost), 0.23)
def test_graph_edit_distance_upper_bound(self):
G1 = circular_ladder_graph(2)
G2 = circular_ladder_graph(6)
- assert_equal(graph_edit_distance(G1, G2, upper_bound = 5), None)
- assert_equal(graph_edit_distance(G1, G2, upper_bound = 24), 22)
+ assert_equal(graph_edit_distance(G1, G2, upper_bound=5), None)
+ assert_equal(graph_edit_distance(G1, G2, upper_bound=24), 22)
assert_equal(graph_edit_distance(G1, G2), 22)
def test_optimal_edit_paths(self):
@@ -143,7 +144,7 @@ class TestSimilarity:
assert_equal(len(paths), 6)
def canonical(vertex_path, edge_path):
- return tuple(sorted(vertex_path)), tuple(sorted(edge_path, key = lambda x: (None in x, x)))
+ return tuple(sorted(vertex_path)), tuple(sorted(edge_path, key=lambda x: (None in x, x)))
expected_paths = [([(0, 0), (1, 1), (2, 2)], [((0, 1), (0, 1)), ((1, 2), (1, 2)), (None, (0, 2))]),
([(0, 0), (1, 2), (2, 1)], [((0, 1), (0, 2)), ((1, 2), (1, 2)), (None, (0, 1))]),
diff --git a/networkx/algorithms/tests/test_simple_paths.py b/networkx/algorithms/tests/test_simple_paths.py
index 9463842b..92de987d 100644
--- a/networkx/algorithms/tests/test_simple_paths.py
+++ b/networkx/algorithms/tests/test_simple_paths.py
@@ -228,35 +228,35 @@ def test_directed_weighted_shortest_simple_path():
def test_weighted_shortest_simple_path_issue2427():
G = nx.Graph()
- G.add_edge('IN', 'OUT', weight = 2)
- G.add_edge('IN', 'A', weight = 1)
- G.add_edge('IN', 'B', weight = 2)
- G.add_edge('B', 'OUT', weight = 2)
- assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight = "weight")),
+ G.add_edge('IN', 'OUT', weight=2)
+ G.add_edge('IN', 'A', weight=1)
+ G.add_edge('IN', 'B', weight=2)
+ G.add_edge('B', 'OUT', weight=2)
+ assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")),
[['IN', 'OUT'], ['IN', 'B', 'OUT']])
G = nx.Graph()
- G.add_edge('IN', 'OUT', weight = 10)
- G.add_edge('IN', 'A', weight = 1)
- G.add_edge('IN', 'B', weight = 1)
- G.add_edge('B', 'OUT', weight = 1)
- assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight = "weight")),
+ G.add_edge('IN', 'OUT', weight=10)
+ G.add_edge('IN', 'A', weight=1)
+ G.add_edge('IN', 'B', weight=1)
+ G.add_edge('B', 'OUT', weight=1)
+ assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")),
[['IN', 'B', 'OUT'], ['IN', 'OUT']])
def test_directed_weighted_shortest_simple_path_issue2427():
G = nx.DiGraph()
- G.add_edge('IN', 'OUT', weight = 2)
- G.add_edge('IN', 'A', weight = 1)
- G.add_edge('IN', 'B', weight = 2)
- G.add_edge('B', 'OUT', weight = 2)
- assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight = "weight")),
+ G.add_edge('IN', 'OUT', weight=2)
+ G.add_edge('IN', 'A', weight=1)
+ G.add_edge('IN', 'B', weight=2)
+ G.add_edge('B', 'OUT', weight=2)
+ assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")),
[['IN', 'OUT'], ['IN', 'B', 'OUT']])
G = nx.DiGraph()
- G.add_edge('IN', 'OUT', weight = 10)
- G.add_edge('IN', 'A', weight = 1)
- G.add_edge('IN', 'B', weight = 1)
- G.add_edge('B', 'OUT', weight = 1)
- assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight = "weight")),
+ G.add_edge('IN', 'OUT', weight=10)
+ G.add_edge('IN', 'A', weight=1)
+ G.add_edge('IN', 'B', weight=1)
+ G.add_edge('B', 'OUT', weight=1)
+ assert_equal(list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")),
[['IN', 'B', 'OUT'], ['IN', 'OUT']])
diff --git a/networkx/algorithms/traversal/depth_first_search.py b/networkx/algorithms/traversal/depth_first_search.py
index 4c8cbc4d..75cc2613 100644
--- a/networkx/algorithms/traversal/depth_first_search.py
+++ b/networkx/algorithms/traversal/depth_first_search.py
@@ -73,7 +73,7 @@ def dfs_edges(G, source=None, depth_limit=None):
nodes = [source]
visited = set()
if depth_limit is None:
- depth_limit = len(G)
+ depth_limit = len(G)
for start in nodes:
if start in visited:
continue
@@ -87,7 +87,7 @@ def dfs_edges(G, source=None, depth_limit=None):
yield parent, child
visited.add(child)
if depth_now > 1:
- stack.append((child, depth_now-1, iter(G[child])))
+ stack.append((child, depth_now - 1, iter(G[child])))
except StopIteration:
stack.pop()
@@ -392,7 +392,7 @@ def dfs_labeled_edges(G, source=None, depth_limit=None):
nodes = [source]
visited = set()
if depth_limit is None:
- depth_limit = len(G)
+ depth_limit = len(G)
for start in nodes:
if start in visited:
continue
@@ -409,7 +409,7 @@ def dfs_labeled_edges(G, source=None, depth_limit=None):
yield parent, child, 'forward'
visited.add(child)
if depth_now > 1:
- stack.append((child, depth_now-1, iter(G[child])))
+ stack.append((child, depth_now - 1, iter(G[child])))
except StopIteration:
stack.pop()
if stack:
diff --git a/networkx/algorithms/traversal/edgedfs.py b/networkx/algorithms/traversal/edgedfs.py
index d12d28bb..5434057d 100644
--- a/networkx/algorithms/traversal/edgedfs.py
+++ b/networkx/algorithms/traversal/edgedfs.py
@@ -12,6 +12,7 @@ REVERSE = 'reverse'
__all__ = ['edge_dfs']
+
def helper_funcs(G, orientation):
"""
These are various G-specific functions that help us implement the algorithm
@@ -73,6 +74,7 @@ def helper_funcs(G, orientation):
return out_edges, key, traversed_tailhead
+
def edge_dfs(G, source=None, orientation='original'):
"""
A directed, depth-first traversal of edges in `G`, beginning at `source`.
@@ -182,4 +184,3 @@ def edge_dfs(G, source=None, orientation='original'):
# Mark the traversed "to" node as to-be-explored.
stack.append(tailhead(edge)[1])
yield edge
-
diff --git a/networkx/algorithms/traversal/tests/test_dfs.py b/networkx/algorithms/traversal/tests/test_dfs.py
index b9c9bde4..7d7bc427 100644
--- a/networkx/algorithms/traversal/tests/test_dfs.py
+++ b/networkx/algorithms/traversal/tests/test_dfs.py
@@ -2,36 +2,36 @@
from nose.tools import *
import networkx as nx
+
class TestDFS:
def setUp(self):
# simple graph
- G=nx.Graph()
- G.add_edges_from([(0,1),(1,2),(1,3),(2,4),(3,4)])
- self.G=G
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4)])
+ self.G = G
# simple graph, disconnected
- D=nx.Graph()
- D.add_edges_from([(0,1),(2,3)])
- self.D=D
-
+ D = nx.Graph()
+ D.add_edges_from([(0, 1), (2, 3)])
+ self.D = D
def test_preorder_nodes(self):
- assert_equal(list(nx.dfs_preorder_nodes(self.G,source=0)),
+ assert_equal(list(nx.dfs_preorder_nodes(self.G, source=0)),
[0, 1, 2, 4, 3])
- assert_equal(list(nx.dfs_preorder_nodes(self.D)),[0, 1, 2, 3])
+ assert_equal(list(nx.dfs_preorder_nodes(self.D)), [0, 1, 2, 3])
def test_postorder_nodes(self):
- assert_equal(list(nx.dfs_postorder_nodes(self.G,source=0)),
+ assert_equal(list(nx.dfs_postorder_nodes(self.G, source=0)),
[3, 4, 2, 1, 0])
- assert_equal(list(nx.dfs_postorder_nodes(self.D)),[1, 0, 3, 2])
+ assert_equal(list(nx.dfs_postorder_nodes(self.D)), [1, 0, 3, 2])
def test_successor(self):
- assert_equal(nx.dfs_successors(self.G,source=0),
+ assert_equal(nx.dfs_successors(self.G, source=0),
{0: [1], 1: [2], 2: [4], 4: [3]})
assert_equal(nx.dfs_successors(self.D), {0: [1], 2: [3]})
def test_predecessor(self):
- assert_equal(nx.dfs_predecessors(self.G,source=0),
+ assert_equal(nx.dfs_predecessors(self.G, source=0),
{1: 0, 2: 1, 3: 4, 4: 2})
assert_equal(nx.dfs_predecessors(self.D), {1: 0, 3: 2})
@@ -39,7 +39,7 @@ class TestDFS:
exp_nodes = sorted(self.G.nodes())
exp_edges = [(0, 1), (1, 2), (2, 4), (4, 3)]
# Search from first node
- T=nx.dfs_tree(self.G,source=0)
+ T = nx.dfs_tree(self.G, source=0)
assert_equal(sorted(T.nodes()), exp_nodes)
assert_equal(sorted(T.edges()), exp_edges)
# Check source=None
@@ -52,31 +52,31 @@ class TestDFS:
assert_equal(sorted(T.edges()), exp_edges)
def test_dfs_edges(self):
- edges=nx.dfs_edges(self.G,source=0)
- assert_equal(list(edges),[(0, 1), (1, 2), (2, 4), (4, 3)])
- edges=nx.dfs_edges(self.D)
- assert_equal(list(edges),[(0, 1), (2, 3)])
+ edges = nx.dfs_edges(self.G, source=0)
+ assert_equal(list(edges), [(0, 1), (1, 2), (2, 4), (4, 3)])
+ edges = nx.dfs_edges(self.D)
+ assert_equal(list(edges), [(0, 1), (2, 3)])
def test_dfs_labeled_edges(self):
- edges=list(nx.dfs_labeled_edges(self.G,source=0))
- forward=[(u,v) for (u,v,d) in edges if d == 'forward']
- assert_equal(forward,[(0,0), (0, 1), (1, 2), (2, 4), (4, 3)])
+ edges = list(nx.dfs_labeled_edges(self.G, source=0))
+ forward = [(u, v) for (u, v, d) in edges if d == 'forward']
+ assert_equal(forward, [(0, 0), (0, 1), (1, 2), (2, 4), (4, 3)])
def test_dfs_labeled_disconnected_edges(self):
- edges=list(nx.dfs_labeled_edges(self.D))
- forward=[(u,v) for (u,v,d) in edges if d == 'forward']
- assert_equal(forward,[(0, 0), (0, 1), (2, 2), (2, 3)])
+ edges = list(nx.dfs_labeled_edges(self.D))
+ forward = [(u, v) for (u, v, d) in edges if d == 'forward']
+ assert_equal(forward, [(0, 0), (0, 1), (2, 2), (2, 3)])
def test_dfs_tree_isolates(self):
G = nx.Graph()
G.add_node(1)
G.add_node(2)
- T=nx.dfs_tree(G,source=1)
- assert_equal(sorted(T.nodes()),[1])
- assert_equal(sorted(T.edges()),[])
- T=nx.dfs_tree(G,source=None)
- assert_equal(sorted(T.nodes()),[1, 2])
- assert_equal(sorted(T.edges()),[])
+ T = nx.dfs_tree(G, source=1)
+ assert_equal(sorted(T.nodes()), [1])
+ assert_equal(sorted(T.edges()), [])
+ T = nx.dfs_tree(G, source=None)
+ assert_equal(sorted(T.nodes()), [1, 2])
+ assert_equal(sorted(T.edges()), [])
class TestDepthLimitedSearch:
@@ -95,15 +95,15 @@ class TestDepthLimitedSearch:
def dls_test_preorder_nodes(self):
assert_equal(list(nx.dfs_preorder_nodes(self.G, source=0,
- depth_limit=2)), [0, 1, 2])
+ depth_limit=2)), [0, 1, 2])
assert_equal(list(nx.dfs_preorder_nodes(self.D, source=1,
- depth_limit=2)), ([1, 0]))
+ depth_limit=2)), ([1, 0]))
def dls_test_postorder_nodes(self):
assert_equal(list(nx.dfs_postorder_nodes(self.G,
- source=3, depth_limit=3)), [1, 7, 2, 5, 4, 3])
+ source=3, depth_limit=3)), [1, 7, 2, 5, 4, 3])
assert_equal(list(nx.dfs_postorder_nodes(self.D,
- source=2, depth_limit=2)),([3, 7, 2]))
+ source=2, depth_limit=2)), ([3, 7, 2]))
def dls_test_successor(self):
result = nx.dfs_successors(self.G, source=4, depth_limit=3)
@@ -125,8 +125,8 @@ class TestDepthLimitedSearch:
def test_dls_edges(self):
edges = nx.dfs_edges(self.G, source=9, depth_limit=4)
- assert_equal(list(edges),[(9, 8), (8, 7),
- (7, 2), (2, 1), (2, 3), (9, 10)])
+ assert_equal(list(edges), [(9, 8), (8, 7),
+ (7, 2), (2, 1), (2, 3), (9, 10)])
def test_dls_labeled_edges(self):
edges = list(nx.dfs_labeled_edges(self.G, source=5, depth_limit=1))
diff --git a/networkx/algorithms/traversal/tests/test_edgedfs.py b/networkx/algorithms/traversal/tests/test_edgedfs.py
index 2e3a1afa..2d92eaa5 100644
--- a/networkx/algorithms/traversal/tests/test_edgedfs.py
+++ b/networkx/algorithms/traversal/tests/test_edgedfs.py
@@ -15,6 +15,7 @@ REVERSE = nx.algorithms.edgedfs.REVERSE
# failures due to hash randomization will not occur. For an example of how
# this can fail, see TestEdgeDFS.test_multigraph.
+
class TestEdgeDFS(object):
def setUp(self):
self.nodes = [0, 1, 2, 3]
@@ -34,7 +35,7 @@ class TestEdgeDFS(object):
def test_digraph(self):
G = nx.DiGraph(self.edges)
x = list(edge_dfs(G, self.nodes))
- x_= [(0, 1), (1, 0), (2, 1), (3, 1)]
+ x_ = [(0, 1), (1, 0), (2, 1), (3, 1)]
assert_equal(x, x_)
def test_digraph2(self):
@@ -47,8 +48,8 @@ class TestEdgeDFS(object):
def test_digraph_rev(self):
G = nx.DiGraph(self.edges)
x = list(edge_dfs(G, self.nodes, orientation='reverse'))
- x_= [(1, 0, REVERSE), (0, 1, REVERSE),
- (2, 1, REVERSE), (3, 1, REVERSE)]
+ x_ = [(1, 0, REVERSE), (0, 1, REVERSE),
+ (2, 1, REVERSE), (3, 1, REVERSE)]
assert_equal(x, x_)
def test_digraph_rev2(self):
@@ -107,5 +108,3 @@ class TestEdgeDFS(object):
(1, 0, 1, REVERSE), (2, 1, 0, REVERSE),
(3, 1, 0, REVERSE)]
assert_equal(x, x_)
-
-
diff --git a/networkx/algorithms/tree/branchings.py b/networkx/algorithms/tree/branchings.py
index 433c53a0..bee23a0e 100644
--- a/networkx/algorithms/tree/branchings.py
+++ b/networkx/algorithms/tree/branchings.py
@@ -56,16 +56,20 @@ STYLES = {
INF = float('inf')
+
def random_string(L=15, seed=None):
random.seed(seed)
return ''.join([random.choice(string.ascii_letters) for n in range(L)])
+
def _min_weight(weight):
return -weight
+
def _max_weight(weight):
return weight
+
def branching_weight(G, attr='weight', default=1):
"""
Returns the total weight of a branching.
@@ -73,6 +77,7 @@ def branching_weight(G, attr='weight', default=1):
"""
return sum(edge[2].get(attr, default) for edge in G.edges(data=True))
+
def greedy_branching(G, attr='weight', default=1, kind='max'):
"""
Returns a branching obtained through a greedy algorithm.
@@ -149,6 +154,7 @@ def greedy_branching(G, attr='weight', default=1, kind='max'):
return B
+
class MultiDiGraph_EdgeKey(nx.MultiDiGraph):
"""
MultiDiGraph which assigns unique keys to every edge.
@@ -165,6 +171,7 @@ class MultiDiGraph_EdgeKey(nx.MultiDiGraph):
of edges. We must reliably track edges across graph mutations.
"""
+
def __init__(self, incoming_graph_data=None, **attr):
cls = super(MultiDiGraph_EdgeKey, self)
cls.__init__(incoming_graph_data=incoming_graph_data, **attr)
@@ -212,7 +219,7 @@ class MultiDiGraph_EdgeKey(nx.MultiDiGraph):
def remove_edge_with_key(self, key):
try:
- u, v, _ = self.edge_index[key]
+ u, v, _ = self.edge_index[key]
except KeyError:
raise KeyError('Invalid edge key {0!r}'.format(key))
else:
@@ -245,11 +252,13 @@ def get_path(G, u, v):
edges = [first_key(i, vv) for i, vv in enumerate(nodes[1:])]
return nodes, edges
+
class Edmonds(object):
"""
Edmonds algorithm for finding optimal branchings and spanning arborescences.
"""
+
def __init__(self, G, seed=None):
self.G_original = G
@@ -372,15 +381,15 @@ class Edmonds(object):
while True:
# (I1): Choose a node v in G^i not in D^i.
try:
- v = next(nodes)
+ v = next(nodes)
except StopIteration:
# If there are no more new nodes to consider, then we *should*
# meet the break condition (b) from the paper:
# (b) every node of G^i is in D^i and E^i is a branching
# Construction guarantees that it's a branching.
- assert( len(G) == len(B) )
+ assert(len(G) == len(B))
if len(B):
- assert( is_branching(B) )
+ assert(is_branching(B))
if self.store:
self.graphs.append(G.copy())
@@ -454,7 +463,7 @@ class Edmonds(object):
u, v, data = B.edge_index[edge_key]
w = data[attr]
Q_incoming_weight[v] = w
- if w < minweight:
+ if w < minweight:
minweight = w
minedge = edge_key
@@ -509,10 +518,8 @@ class Edmonds(object):
nodes = iter(list(G.nodes()))
self.level += 1
-
-
# (I3) Branch construction.
- #print(self.level)
+ # print(self.level)
H = self.G_original.fresh_copy()
def is_root(G, u, edgekeys):
@@ -549,7 +556,7 @@ class Edmonds(object):
# The circuit at level i that was merged as a node the graph
# at level i+1.
circuit = self.circuits[self.level]
- #print
+ # print
#print(merged_node, self.level, circuit)
#print("before", edges)
# Note, we ask if it is a root in the full graph, not the branching.
@@ -575,7 +582,7 @@ class Edmonds(object):
#print("circuit is: ", circuit)
# The branching at level i
G = self.graphs[self.level]
- #print(G.edge_index)
+ # print(G.edge_index)
target = G.edge_index[edgekey][1]
for edgekey in circuit:
u, v, data = G.edge_index[edgekey]
@@ -599,16 +606,19 @@ class Edmonds(object):
return H
+
def maximum_branching(G, attr='weight', default=1):
ed = Edmonds(G)
B = ed.find_optimum(attr, default, kind='max', style='branching')
return B
+
def minimum_branching(G, attr='weight', default=1):
ed = Edmonds(G)
B = ed.find_optimum(attr, default, kind='min', style='branching')
return B
+
def maximum_spanning_arborescence(G, attr='weight', default=1):
ed = Edmonds(G)
B = ed.find_optimum(attr, default, kind='max', style='arborescence')
@@ -617,6 +627,7 @@ def maximum_spanning_arborescence(G, attr='weight', default=1):
raise nx.exception.NetworkXException(msg)
return B
+
def minimum_spanning_arborescence(G, attr='weight', default=1):
ed = Edmonds(G)
B = ed.find_optimum(attr, default, kind='min', style='arborescence')
@@ -625,6 +636,7 @@ def minimum_spanning_arborescence(G, attr='weight', default=1):
raise nx.exception.NetworkXException(msg)
return B
+
docstring_branching = """
Returns a {kind} {style} from G.
diff --git a/networkx/algorithms/tree/recognition.py b/networkx/algorithms/tree/recognition.py
index 78c7a8b1..e9d9dc58 100644
--- a/networkx/algorithms/tree/recognition.py
+++ b/networkx/algorithms/tree/recognition.py
@@ -84,6 +84,7 @@ __author__ = """\n""".join([
__all__ = ['is_arborescence', 'is_branching', 'is_forest', 'is_tree']
+
@nx.utils.not_implemented_for('undirected')
def is_arborescence(G):
"""
diff --git a/networkx/algorithms/tree/tests/test_operations.py b/networkx/algorithms/tree/tests/test_operations.py
index 5caad622..3d99835a 100644
--- a/networkx/algorithms/tree/tests/test_operations.py
+++ b/networkx/algorithms/tree/tests/test_operations.py
@@ -16,6 +16,7 @@ import networkx as nx
from networkx.testing import assert_nodes_equal
from networkx.testing import assert_edges_equal
+
class TestJoin(object):
"""Unit tests for the :func:`networkx.tree.join` function."""
diff --git a/networkx/algorithms/tree/tests/test_recognition.py b/networkx/algorithms/tree/tests/test_recognition.py
index 39ab0db5..08d289ec 100644
--- a/networkx/algorithms/tree/tests/test_recognition.py
+++ b/networkx/algorithms/tree/tests/test_recognition.py
@@ -17,32 +17,32 @@ class TestTreeRecognition(object):
self.T3 = self.graph()
self.T3.add_nodes_from(range(5))
- edges = [(i,i+1) for i in range(4)]
+ edges = [(i, i + 1) for i in range(4)]
self.T3.add_edges_from(edges)
self.T5 = self.multigraph()
self.T5.add_nodes_from(range(5))
- edges = [(i,i+1) for i in range(4)]
+ edges = [(i, i + 1) for i in range(4)]
self.T5.add_edges_from(edges)
self.T6 = self.graph()
- self.T6.add_nodes_from([6,7])
- self.T6.add_edge(6,7)
+ self.T6.add_nodes_from([6, 7])
+ self.T6.add_edge(6, 7)
self.F1 = nx.compose(self.T6, self.T3)
self.N4 = self.graph()
self.N4.add_node(1)
- self.N4.add_edge(1,1)
+ self.N4.add_edge(1, 1)
self.N5 = self.graph()
self.N5.add_nodes_from(range(5))
self.N6 = self.graph()
self.N6.add_nodes_from(range(3))
- self.N6.add_edges_from([(0,1),(1,2),(2,0)])
+ self.N6.add_edges_from([(0, 1), (1, 2), (2, 0)])
- self.NF1 = nx.compose(self.T6,self.N6)
+ self.NF1 = nx.compose(self.T6, self.N6)
@raises(nx.NetworkXPointlessConcept)
def test_null_tree(self):
@@ -76,10 +76,12 @@ class TestTreeRecognition(object):
assert_false(nx.is_forest(self.N6))
assert_false(nx.is_forest(self.NF1))
+
class TestDirectedTreeRecognition(TestTreeRecognition):
graph = nx.DiGraph
multigraph = nx.MultiDiGraph
+
def test_disconnected_graph():
# https://github.com/networkx/networkx/issues/1144
G = nx.Graph()
@@ -90,54 +92,62 @@ def test_disconnected_graph():
G.add_edges_from([(0, 1), (1, 2), (2, 0), (3, 4)])
assert_false(nx.is_tree(G))
+
def test_dag_nontree():
G = nx.DiGraph()
- G.add_edges_from([(0,1), (0,2), (1,2)])
+ G.add_edges_from([(0, 1), (0, 2), (1, 2)])
assert_false(nx.is_tree(G))
assert_true(nx.is_directed_acyclic_graph(G))
+
def test_multicycle():
G = nx.MultiDiGraph()
- G.add_edges_from([(0,1), (0,1)])
+ G.add_edges_from([(0, 1), (0, 1)])
assert_false(nx.is_tree(G))
assert_true(nx.is_directed_acyclic_graph(G))
+
def test_emptybranch():
G = nx.DiGraph()
G.add_nodes_from(range(10))
assert_true(nx.is_branching(G))
assert_false(nx.is_arborescence(G))
+
def test_path():
G = nx.DiGraph()
nx.add_path(G, range(5))
assert_true(nx.is_branching(G))
assert_true(nx.is_arborescence(G))
+
def test_notbranching1():
# Acyclic violation.
G = nx.MultiDiGraph()
G.add_nodes_from(range(10))
- G.add_edges_from([(0,1),(1,0)])
+ G.add_edges_from([(0, 1), (1, 0)])
assert_false(nx.is_branching(G))
assert_false(nx.is_arborescence(G))
+
def test_notbranching2():
# In-degree violation.
G = nx.MultiDiGraph()
G.add_nodes_from(range(10))
- G.add_edges_from([(0,1),(0,2),(3,2)])
+ G.add_edges_from([(0, 1), (0, 2), (3, 2)])
assert_false(nx.is_branching(G))
assert_false(nx.is_arborescence(G))
+
def test_notarborescence1():
# Not an arborescence due to not spanning.
G = nx.MultiDiGraph()
G.add_nodes_from(range(10))
- G.add_edges_from([(0,1),(0,2),(1,3),(5,6)])
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (5, 6)])
assert_true(nx.is_branching(G))
assert_false(nx.is_arborescence(G))
+
def test_notarborescence2():
# Not an arborescence due to in-degree violation.
G = nx.MultiDiGraph()
diff --git a/networkx/classes/coreviews.py b/networkx/classes/coreviews.py
index 798ced88..6c49f08a 100644
--- a/networkx/classes/coreviews.py
+++ b/networkx/classes/coreviews.py
@@ -337,7 +337,7 @@ class FilterAdjacency(Mapping): # edgedict
def copy(self):
if hasattr(self.NODE_OK, 'nodes'):
return {u: {v: d for v, d in self._atlas[u].items()
- if self.NODE_OK(v) if self.EDGE_OK(u,v)}
+ if self.NODE_OK(v) if self.EDGE_OK(u, v)}
for u in self.NODE_OK.nodes if u in self._atlas}
return {u: {v: d for v, d in nbrs.items() if self.NODE_OK(v)
if self.EDGE_OK(u, v)}
diff --git a/networkx/classes/graphviews.py b/networkx/classes/graphviews.py
index c49b2b09..6dfea2e1 100644
--- a/networkx/classes/graphviews.py
+++ b/networkx/classes/graphviews.py
@@ -49,9 +49,9 @@ from collections import Mapping
from networkx.classes import Graph, DiGraph, MultiGraph, MultiDiGraph
from networkx.classes.coreviews import ReadOnlyGraph, \
- AtlasView, AdjacencyView, MultiAdjacencyView, \
- FilterAtlas, FilterAdjacency, FilterMultiAdjacency, \
- UnionAdjacency, UnionMultiAdjacency
+ AtlasView, AdjacencyView, MultiAdjacencyView, \
+ FilterAtlas, FilterAdjacency, FilterMultiAdjacency, \
+ UnionAdjacency, UnionMultiAdjacency
from networkx.classes.filters import no_filter, show_nodes, show_edges
from networkx.exception import NetworkXError, NetworkXNotImplemented
from networkx.utils import not_implemented_for
diff --git a/networkx/classes/reportviews.py b/networkx/classes/reportviews.py
index 0218b388..d19769ce 100644
--- a/networkx/classes/reportviews.py
+++ b/networkx/classes/reportviews.py
@@ -330,6 +330,7 @@ class DiDegreeView(object):
>>> DVnbunch = G.degree(nbunch=(1, 2))
>>> assert(len(list(DVnbunch)) == 2) # iteration over nbunch only
"""
+
def __init__(self, G, nbunch=None, weight=None):
self._graph = G
self._succ = G._succ if hasattr(G, "_succ") else G._adj
@@ -429,6 +430,7 @@ class DegreeView(DiDegreeView):
>>> DVnbunch = G.degree(nbunch=(1, 2))
>>> assert(len(list(DVnbunch)) == 2) # iteration over nbunch only
"""
+
def __getitem__(self, n):
weight = self._weight
nbrs = self._succ[n]
@@ -453,6 +455,7 @@ class DegreeView(DiDegreeView):
class OutDegreeView(DiDegreeView):
"""A DegreeView class to report out_degree for a DiGraph; See DegreeView"""
+
def __getitem__(self, n):
weight = self._weight
nbrs = self._succ[n]
@@ -475,6 +478,7 @@ class OutDegreeView(DiDegreeView):
class InDegreeView(DiDegreeView):
"""A DegreeView class to report in_degree for a DiGraph; See DegreeView"""
+
def __getitem__(self, n):
weight = self._weight
nbrs = self._pred[n]
@@ -497,12 +501,13 @@ class InDegreeView(DiDegreeView):
class MultiDegreeView(DiDegreeView):
"""A DegreeView class for undirected multigraphs; See DegreeView"""
+
def __getitem__(self, n):
weight = self._weight
nbrs = self._succ[n]
if weight is None:
return sum(len(keys) for keys in nbrs.values()) + \
- (n in nbrs and len(nbrs[n]))
+ (n in nbrs and len(nbrs[n]))
# edge weighted graph - degree is sum of nbr edge weights
deg = sum(d.get(weight, 1) for key_dict in nbrs.values()
for d in key_dict.values())
@@ -530,6 +535,7 @@ class MultiDegreeView(DiDegreeView):
class DiMultiDegreeView(DiDegreeView):
"""A DegreeView class for MultiDiGraph; See DegreeView"""
+
def __getitem__(self, n):
weight = self._weight
succs = self._succ[n]
@@ -566,6 +572,7 @@ class DiMultiDegreeView(DiDegreeView):
class InMultiDegreeView(DiDegreeView):
"""A DegreeView class for inward degree of MultiDiGraph; See DegreeView"""
+
def __getitem__(self, n):
weight = self._weight
nbrs = self._pred[n]
@@ -592,6 +599,7 @@ class InMultiDegreeView(DiDegreeView):
class OutMultiDegreeView(DiDegreeView):
"""A DegreeView class for outward degree of MultiDiGraph; See DegreeView"""
+
def __getitem__(self, n):
weight = self._weight
nbrs = self._succ[n]
@@ -649,7 +657,7 @@ class OutEdgeDataView(object):
self._report = lambda n, nbr, dd: (n, nbr)
else: # data is attribute name
self._report = lambda n, nbr, dd: \
- (n, nbr, dd[data]) if data in dd else (n, nbr, default)
+ (n, nbr, dd[data]) if data in dd else (n, nbr, default)
def __len__(self):
return sum(len(nbrs) for n, nbrs in self._nodes_nbrs())
@@ -785,10 +793,10 @@ class OutMultiEdgeDataView(OutEdgeDataView):
else: # data is attribute name
if keys is True:
self._report = lambda n, nbr, k, dd: (n, nbr, k, dd[data]) \
- if data in dd else (n, nbr, k, default)
+ if data in dd else (n, nbr, k, default)
else:
self._report = lambda n, nbr, k, dd: (n, nbr, dd[data]) \
- if data in dd else (n, nbr, default)
+ if data in dd else (n, nbr, default)
def __len__(self):
return sum(1 for e in self)
diff --git a/networkx/classes/tests/test_multigraph.py b/networkx/classes/tests/test_multigraph.py
index 5523e998..e103d2c5 100644
--- a/networkx/classes/tests/test_multigraph.py
+++ b/networkx/classes/tests/test_multigraph.py
@@ -143,7 +143,7 @@ class BaseMultiGraphTester(BaseAttrGraphTester):
G.adj[1][2][0]['weight'] = 20
assert_edges_equal(G.edges(data=True),
[(1, 2, {'data': 21, 'spam': 'bar', 'bar': 'foo',
- 'listdata': [20, 200], 'weight':20})])
+ 'listdata': [20, 200], 'weight':20})])
class TestMultiGraph(BaseMultiGraphTester, TestGraph):
diff --git a/networkx/classes/tests/test_reportviews.py b/networkx/classes/tests/test_reportviews.py
index 7fd96d4d..ce07bfd1 100644
--- a/networkx/classes/tests/test_reportviews.py
+++ b/networkx/classes/tests/test_reportviews.py
@@ -250,7 +250,7 @@ class TestEdgeDataView(object):
def test_str(self):
ev = self.eview(self.G)(data=True)
- rep = str([(n, n+1, {}) for n in range(8)])
+ rep = str([(n, n + 1, {}) for n in range(8)])
assert_equal(str(ev), rep)
def test_repr(self):
@@ -444,7 +444,7 @@ class TestEdgeView(object):
def test_str(self):
ev = self.eview(self.G)
- rep = str([(n, n+1) for n in range(8)])
+ rep = str([(n, n + 1) for n in range(8)])
assert_equal(str(ev), rep)
def test_repr(self):
@@ -587,7 +587,7 @@ class TestMultiEdgeView(TestEdgeView):
def test_str(self):
ev = self.eview(self.G)
- replist = [(n, n+1, 0) for n in range(8)]
+ replist = [(n, n + 1, 0) for n in range(8)]
replist.insert(2, (1, 2, 3))
rep = str(replist)
assert_equal(str(ev), rep)
diff --git a/networkx/classes/tests/test_subgraphviews.py b/networkx/classes/tests/test_subgraphviews.py
index a469ccdf..d63f59b8 100644
--- a/networkx/classes/tests/test_subgraphviews.py
+++ b/networkx/classes/tests/test_subgraphviews.py
@@ -1,5 +1,5 @@
from nose.tools import assert_equal, assert_not_equal, \
- assert_is, assert_true, assert_raises
+ assert_is, assert_true, assert_raises
import networkx as nx
diff --git a/networkx/drawing/layout.py b/networkx/drawing/layout.py
index c5c00394..8febb0ff 100644
--- a/networkx/drawing/layout.py
+++ b/networkx/drawing/layout.py
@@ -397,15 +397,15 @@ def _fruchterman_reingold(A, k=None, pos=None, fixed=None, iterations=50,
# optimal distance between nodes
if k is None:
- k = np.sqrt(1.0/nnodes)
+ k = np.sqrt(1.0 / nnodes)
# the initial "temperature" is about .1 of domain area (=1x1)
# this is the largest step allowed in the dynamics.
# We need to calculate this in case our fixed positions force our domain
# to be much bigger than 1x1
- t = max(max(pos.T[0]) - min(pos.T[0]), max(pos.T[1]) - min(pos.T[1]))*0.1
+ t = max(max(pos.T[0]) - min(pos.T[0]), max(pos.T[1]) - min(pos.T[1])) * 0.1
# simple cooling scheme.
# linearly step down by dt on each iteration so last iteration is size dt.
- dt = t/float(iterations+1)
+ dt = t / float(iterations + 1)
delta = np.zeros((pos.shape[0], pos.shape[0], pos.shape[1]), dtype=A.dtype)
# the inscrutable (but fast) version
# this is still O(V^2)
@@ -431,7 +431,7 @@ def _fruchterman_reingold(A, k=None, pos=None, fixed=None, iterations=50,
pos += delta_pos
# cool temperature
t -= dt
- err = np.linalg.norm(delta_pos)/nnodes
+ err = np.linalg.norm(delta_pos) / nnodes
if err < threshold:
break
return pos
@@ -478,13 +478,13 @@ def _sparse_fruchterman_reingold(A, k=None, pos=None, fixed=None,
# optimal distance between nodes
if k is None:
- k = np.sqrt(1.0/nnodes)
+ k = np.sqrt(1.0 / nnodes)
# the initial "temperature" is about .1 of domain area (=1x1)
# this is the largest step allowed in the dynamics.
t = 0.1
# simple cooling scheme.
# linearly step down by dt on each iteration so last iteration is size dt.
- dt = t / float(iterations+1)
+ dt = t / float(iterations + 1)
displacement = np.zeros((dim, nnodes))
for iteration in range(iterations):
@@ -511,7 +511,7 @@ def _sparse_fruchterman_reingold(A, k=None, pos=None, fixed=None,
pos += delta_pos
# cool temperature
t -= dt
- err = np.linalg.norm(delta_pos)/nnodes
+ err = np.linalg.norm(delta_pos) / nnodes
if err < threshold:
break
return pos
@@ -692,7 +692,7 @@ def spectral_layout(G, weight='weight', scale=1, center=None, dim=2):
elif len(G) == 1:
pos = np.array([center])
else:
- pos = np.array([np.zeros(dim), np.array(center)*2.0])
+ pos = np.array([np.zeros(dim), np.array(center) * 2.0])
return dict(zip(G, pos))
try:
# Sparse matrix
diff --git a/networkx/drawing/nx_agraph.py b/networkx/drawing/nx_agraph.py
index f38a31f2..d58952bc 100644
--- a/networkx/drawing/nx_agraph.py
+++ b/networkx/drawing/nx_agraph.py
@@ -160,19 +160,17 @@ def to_agraph(N):
str_edgedata = {k: str(v) for k, v in edgedata.items() if k != 'key'}
A.add_edge(u, v, key=str(key))
if edgedata is not None:
- a = A.get_edge(u,v)
+ a = A.get_edge(u, v)
a.attr.update(str_edgedata)
-
else:
for u, v, edgedata in N.edges(data=True):
str_edgedata = {k: str(v) for k, v in edgedata.items()}
A.add_edge(u, v)
if edgedata is not None:
- a = A.get_edge(u,v)
+ a = A.get_edge(u, v)
a.attr.update(str_edgedata)
-
return A
diff --git a/networkx/drawing/nx_pylab.py b/networkx/drawing/nx_pylab.py
index b4e71ff9..fe8c14d4 100644
--- a/networkx/drawing/nx_pylab.py
+++ b/networkx/drawing/nx_pylab.py
@@ -576,7 +576,7 @@ def draw_networkx_edges(G, pos,
elif np.alltrue([not is_string_like(c) for c in edge_color]):
# If color specs are given as (rgb) or (rgba) tuples, we're OK
if np.alltrue([cb.iterable(c) and len(c) in (3, 4)
- for c in edge_color]):
+ for c in edge_color]):
edge_colors = tuple(edge_color)
else:
# numbers (which are going to be mapped with a colormap)
diff --git a/networkx/drawing/tests/test_agraph.py b/networkx/drawing/tests/test_agraph.py
index 9d23a74e..b406fec9 100644
--- a/networkx/drawing/tests/test_agraph.py
+++ b/networkx/drawing/tests/test_agraph.py
@@ -24,7 +24,6 @@ class TestAGraph(object):
G.graph['metal'] = 'bronze'
return G
-
def assert_equal(self, G1, G2):
assert_nodes_equal(G1.nodes(), G2.nodes())
assert_edges_equal(G1.edges(), G2.edges())
@@ -87,14 +86,13 @@ class TestAGraph(object):
H = nx.nx_agraph.from_agraph(A)
assert_equal(G.name, 'test')
-
def test_graph_with_reserved_keywords(self):
# test attribute/keyword clash case for #1582
# node: n
# edges: u,v
G = nx.Graph()
G = self.build_graph(G)
- G.node['E']['n']='keyword'
- G.edges[('A','B')]['u']='keyword'
- G.edges[('A','B')]['v']='keyword'
+ G.node['E']['n'] = 'keyword'
+ G.edges[('A', 'B')]['u'] = 'keyword'
+ G.edges[('A', 'B')]['v'] = 'keyword'
A = nx.nx_agraph.to_agraph(G)
diff --git a/networkx/drawing/tests/test_pylab.py b/networkx/drawing/tests/test_pylab.py
index c8b68c9c..57354173 100644
--- a/networkx/drawing/tests/test_pylab.py
+++ b/networkx/drawing/tests/test_pylab.py
@@ -31,10 +31,10 @@ class TestPylab(object):
nx.draw_spring,
nx.draw_shell]
options = [{
- 'node_color': 'black',
- 'node_size': 100,
- 'width': 3,
- }]
+ 'node_color': 'black',
+ 'node_size': 100,
+ 'width': 3,
+ }]
for function, option in itertools.product(functions, options):
function(self.G, **option)
plt.savefig('test.ps')
diff --git a/networkx/generators/geometric.py b/networkx/generators/geometric.py
index 2a9d8901..f9467db9 100644
--- a/networkx/generators/geometric.py
+++ b/networkx/generators/geometric.py
@@ -168,6 +168,7 @@ def random_geometric_graph(n, radius, dim=2, pos=None, p=2):
return G
+
@nodes_or_number(0)
def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None):
"""Returns a soft random geometric graph in the unit cube of dimensions `dim`.
@@ -274,8 +275,8 @@ def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None):
pos = {v: [random.random() for i in range(dim)] for v in nodes}
nx.set_node_attributes(G, pos, 'pos')
- #if p_dist function not supplied the default function is an exponential
- #distribution with rate parameter :math:`\lambda=1`.
+ # if p_dist function not supplied the default function is an exponential
+ # distribution with rate parameter :math:`\lambda=1`.
if p_dist is None:
def p_dist(dist):
@@ -284,10 +285,10 @@ def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None):
def should_join(pair):
u, v = pair
u_pos, v_pos = pos[u], pos[v]
- dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos)))**(1/p)
- #Check if dist is <= radius parameter. This check is redundant if scipy
- #is availible and _fast_edges routine is used, but provides the check incase
- #scipy is not availible and all edge combinations need to be checked
+ dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos)))**(1 / p)
+ # Check if dist is <= radius parameter. This check is redundant if scipy
+ # is availible and _fast_edges routine is used, but provides the check incase
+ # scipy is not availible and all edge combinations need to be checked
if dist <= radius:
return random.random() < p_dist(dist)
else:
@@ -437,7 +438,7 @@ def geographical_threshold_graph(n, theta, dim=2, pos=None,
nx.set_node_attributes(G, weight, 'weight')
nx.set_node_attributes(G, pos, 'pos')
- #if p_dist is not supplied, use default r^-2
+ # if p_dist is not supplied, use default r^-2
if p_dist == None:
def p_dist(r):
return r**-2
@@ -449,7 +450,7 @@ def geographical_threshold_graph(n, theta, dim=2, pos=None,
u, v = pair
u_pos, v_pos = pos[u], pos[v]
u_weight, v_weight = weight[u], weight[v]
- return (u_weight + v_weight)*p_dist(metric(u_pos, v_pos)) >= theta
+ return (u_weight + v_weight) * p_dist(metric(u_pos, v_pos)) >= theta
G.add_edges_from(filter(should_join, combinations(G, 2)))
return G
@@ -556,9 +557,10 @@ def waxman_graph(n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1),
# the Waxman-2 model, join randomly based on random l.
if L is None:
L = max(metric(x, y) for x, y in combinations(pos.values(), 2))
- dist = lambda u, v: metric(pos[u], pos[v])
+
+ def dist(u, v): return metric(pos[u], pos[v])
else:
- dist = lambda u, v: random.random() * L
+ def dist(u, v): return random.random() * L
# `pair` is the pair of nodes to decide whether to join.
def should_join(pair):
@@ -638,6 +640,7 @@ def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None):
G.add_edge(p1, target)
return G
+
@nodes_or_number(0)
def thresholded_random_geometric_graph(n, radius, theta, dim=2, pos=None, weight=None, p=2):
"""Returns a thresholded random geometric graph in the unit cube of dimensions `dim`.
@@ -758,14 +761,14 @@ def thresholded_random_geometric_graph(n, radius, theta, dim=2, pos=None, weight
u, v = pair
u_weight, v_weight = weight[u], weight[v]
u_pos, v_pos = pos[u], pos[v]
- dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos)))**(1/p)
- #Check if dist is <= radius parameter. This check is redundant if scipy
- #is availible and _fast_edges routine is used, but provides the check incase
- #scipy is not availible and all edge combinations need to be checked
+ dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos)))**(1 / p)
+ # Check if dist is <= radius parameter. This check is redundant if scipy
+ # is availible and _fast_edges routine is used, but provides the check incase
+ # scipy is not availible and all edge combinations need to be checked
if dist <= radius:
return theta <= u_weight + v_weight
else:
- return False
+ return False
if _is_scipy_available:
edges = _fast_edges(G, radius, p)
diff --git a/networkx/generators/lattice.py b/networkx/generators/lattice.py
index 42c8fb8a..933e1c19 100644
--- a/networkx/generators/lattice.py
+++ b/networkx/generators/lattice.py
@@ -266,7 +266,7 @@ def triangular_lattice_graph(m, n, periodic=False, with_positions=True,
ii = (i for i in cols for j in rows)
jj = (j for i in cols for j in rows)
xx = (0.5 * (j % 2) + i for i in cols for j in rows)
- h = sqrt(3)/2
+ h = sqrt(3) / 2
if periodic:
yy = (h * j + .01 * i * i for i in cols for j in rows)
else:
@@ -363,9 +363,9 @@ def hexagonal_lattice_graph(m, n, periodic=False, with_positions=True,
jj = (j for i in cols for j in rows)
xx = (0.5 + i + i // 2 + (j % 2) * ((i % 2) - .5)
for i in cols for j in rows)
- h = sqrt(3)/2
+ h = sqrt(3) / 2
if periodic:
- yy = (h * j + .01*i*i for i in cols for j in rows)
+ yy = (h * j + .01 * i * i for i in cols for j in rows)
else:
yy = (h * j for i in cols for j in rows)
# exclude nodes not in G
diff --git a/networkx/generators/line.py b/networkx/generators/line.py
index 5640e559..348335b6 100644
--- a/networkx/generators/line.py
+++ b/networkx/generators/line.py
@@ -478,7 +478,7 @@ def _select_starting_cell(G, starting_edge=None):
if r == 2 and s == 0:
# in this case either triangle works, so just use T
starting_cell = T
- elif r-1 <= s <= r:
+ elif r - 1 <= s <= r:
# check if odd triangles containing e form complete subgraph
# there must be exactly s+2 of them
# and they must all be connected
@@ -486,7 +486,7 @@ def _select_starting_cell(G, starting_edge=None):
for T in odd_triangles:
for x in T:
triangle_nodes.add(x)
- if len(triangle_nodes) == s+2:
+ if len(triangle_nodes) == s + 2:
for u in triangle_nodes:
for v in triangle_nodes:
if u != v and (v not in G.neighbors(u)):
diff --git a/networkx/generators/mycielski.py b/networkx/generators/mycielski.py
index eed5f8b9..10c68270 100644
--- a/networkx/generators/mycielski.py
+++ b/networkx/generators/mycielski.py
@@ -65,12 +65,12 @@ def mycielskian(G, iterations=1):
for i in range(iterations):
n = M.number_of_nodes()
- M.add_nodes_from(range(n, 2*n))
+ M.add_nodes_from(range(n, 2 * n))
old_edges = list(M.edges())
- M.add_edges_from((u, v+n) for u, v in old_edges)
- M.add_edges_from((u+n, v) for u, v in old_edges)
- M.add_node(2*n)
- M.add_edges_from((u+n, 2*n) for u in range(n))
+ M.add_edges_from((u, v + n) for u, v in old_edges)
+ M.add_edges_from((u + n, v) for u, v in old_edges)
+ M.add_node(2 * n)
+ M.add_edges_from((u + n, 2 * n) for u in range(n))
return M
@@ -113,4 +113,4 @@ def mycielski_graph(n):
return nx.empty_graph(1)
else:
- return mycielskian(nx.path_graph(2), n-2)
+ return mycielskian(nx.path_graph(2), n - 2)
diff --git a/networkx/generators/tests/test_expanders.py b/networkx/generators/tests/test_expanders.py
index b2235a25..a71a1a70 100644
--- a/networkx/generators/tests/test_expanders.py
+++ b/networkx/generators/tests/test_expanders.py
@@ -35,7 +35,7 @@ def test_margulis_gabber_galil_graph():
has_scipy = False
for n in 2, 3, 5, 6, 10:
g = margulis_gabber_galil_graph(n)
- assert_equal(number_of_nodes(g), n*n)
+ assert_equal(number_of_nodes(g), n * n)
for node in g:
assert_equal(g.degree(node), 8)
assert_equal(len(node), 2)
@@ -46,7 +46,7 @@ def test_margulis_gabber_galil_graph():
# Eigenvalues are already sorted using the scipy eigvalsh,
# but the implementation in numpy does not guarantee order.
w = sorted(scipy.linalg.eigvalsh(adjacency_matrix(g).A))
- assert_less(w[-2], 5*np.sqrt(2))
+ assert_less(w[-2], 5 * np.sqrt(2))
def test_chordal_cycle_graph():
diff --git a/networkx/generators/tests/test_geometric.py b/networkx/generators/tests/test_geometric.py
index d661cd00..8dd2a0a9 100644
--- a/networkx/generators/tests/test_geometric.py
+++ b/networkx/generators/tests/test_geometric.py
@@ -19,6 +19,7 @@ class TestRandomGeometricGraph(object):
function.
"""
+
def test_number_of_nodes(self):
G = nx.random_geometric_graph(50, 0.25)
assert_equal(len(G), 50)
@@ -76,6 +77,7 @@ class TestRandomGeometricGraph(object):
else:
assert_false(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25)
+
class TestSoftRandomGeometricGraph(object):
"""Unit tests for the :func:`~networkx.soft_random_geometric_graph`
function.
@@ -95,21 +97,20 @@ class TestSoftRandomGeometricGraph(object):
"""
# Use the Euclidean metric, the default according to the
# documentation.
- dist = lambda x, y: sqrt(sum((a - b) ** 2 for a, b in zip(x, y)))
+ def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y)))
G = nx.soft_random_geometric_graph(50, 0.25)
for u, v in combinations(G, 2):
# Adjacent vertices must be within the given distance.
if v in G[u]:
assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25)
-
def test_p(self):
"""Tests for providing an alternate distance metric to the
generator.
"""
# Use the L1 metric.
- dist = lambda x, y: sum(abs(a - b) for a, b in zip(x, y))
+ def dist(x, y): return sum(abs(a - b) for a, b in zip(x, y))
G = nx.soft_random_geometric_graph(50, 0.25, p=1)
for u, v in combinations(G, 2):
# Adjacent vertices must be within the given distance.
@@ -125,7 +126,7 @@ class TestSoftRandomGeometricGraph(object):
G = nx.soft_random_geometric_graph(nodes, 0.25)
assert_equal(len(G), len(nodes))
- dist = lambda x, y: sqrt(sum((a - b) ** 2 for a, b in zip(x, y)))
+ def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y)))
for u, v in combinations(G, 2):
# Adjacent vertices must be within the given distance.
if v in G[u]:
@@ -139,8 +140,8 @@ class TestSoftRandomGeometricGraph(object):
nodes = 50
dim = 2
pos = {v: [random.random() for i in range(dim)] for v in range(nodes)}
- RGG = nx.random_geometric_graph(50, 0.25,pos=pos)
- SRGG = nx.soft_random_geometric_graph(50, 0.25,pos=pos)
+ RGG = nx.random_geometric_graph(50, 0.25, pos=pos)
+ SRGG = nx.soft_random_geometric_graph(50, 0.25, pos=pos)
assert_true(len(SRGG.edges()) <= len(RGG.edges()))
def test_p_dist_zero(self):
@@ -150,7 +151,7 @@ class TestSoftRandomGeometricGraph(object):
def p_dist(dist):
return 0
- G = nx.soft_random_geometric_graph(50, 0.25, p_dist = p_dist)
+ G = nx.soft_random_geometric_graph(50, 0.25, p_dist=p_dist)
assert_true(len(G.edges) == 0)
@@ -169,7 +170,7 @@ def join(G, u, v, theta, alpha, metric):
du, dv = G.nodes[u], G.nodes[v]
u_pos, v_pos = du['pos'], dv['pos']
u_weight, v_weight = du['weight'], dv['weight']
- return (u_weight + v_weight)*metric(u_pos, v_pos) ** alpha >= theta
+ return (u_weight + v_weight) * metric(u_pos, v_pos) ** alpha >= theta
class TestGeographicalThresholdGraph(object):
@@ -224,7 +225,7 @@ class TestGeographicalThresholdGraph(object):
def p_dist(dist):
return 0
- G = nx.geographical_threshold_graph(50, 1, p_dist=p_dist)
+ G = nx.geographical_threshold_graph(50, 1, p_dist=p_dist)
assert_true(len(G.edges) == 0)
@@ -269,6 +270,7 @@ class TestNavigableSmallWorldGraph(object):
gg = nx.grid_graph([5]).to_directed()
assert_true(nx.is_isomorphic(G, gg))
+
class TestThresholdedRandomGeometricGraph(object):
"""Unit tests for the :func:`~networkx.thresholded_random_geometric_graph`
function.
@@ -288,21 +290,20 @@ class TestThresholdedRandomGeometricGraph(object):
"""
# Use the Euclidean metric, the default according to the
# documentation.
- dist = lambda x, y: sqrt(sum((a - b) ** 2 for a, b in zip(x, y)))
+ def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y)))
G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1)
for u, v in combinations(G, 2):
# Adjacent vertices must be within the given distance.
if v in G[u]:
assert_true(dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25)
-
def test_p(self):
"""Tests for providing an alternate distance metric to the
generator.
"""
# Use the L1 metric.
- dist = lambda x, y: sum(abs(a - b) for a, b in zip(x, y))
+ def dist(x, y): return sum(abs(a - b) for a, b in zip(x, y))
G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, p=1)
for u, v in combinations(G, 2):
# Adjacent vertices must be within the given distance.
@@ -318,7 +319,7 @@ class TestThresholdedRandomGeometricGraph(object):
G = nx.thresholded_random_geometric_graph(nodes, 0.25, 0.1)
assert_equal(len(G), len(nodes))
- dist = lambda x, y: sqrt(sum((a - b) ** 2 for a, b in zip(x, y)))
+ def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y)))
for u, v in combinations(G, 2):
# Adjacent vertices must be within the given distance.
if v in G[u]:
diff --git a/networkx/generators/tests/test_lattice.py b/networkx/generators/tests/test_lattice.py
index 4291956a..ab8b826b 100644
--- a/networkx/generators/tests/test_lattice.py
+++ b/networkx/generators/tests/test_lattice.py
@@ -10,6 +10,7 @@ from networkx.testing import assert_edges_equal
class TestGrid2DGraph:
"""Unit tests for :func:`networkx.generators.lattice.grid_2d_graph`"""
+
def test_number_of_vertices(self):
m, n = 5, 6
G = nx.grid_2d_graph(m, n)
@@ -71,6 +72,7 @@ class TestGrid2DGraph:
class TestGridGraph:
"""Unit tests for :func:`networkx.generators.lattice.grid_graph`"""
+
def test_grid_graph(self):
"""grid_graph([n,m]) is a connected simple graph with the
following properties:
@@ -80,14 +82,14 @@ class TestGridGraph:
for n, m in [(3, 5), (5, 3), (4, 5), (5, 4)]:
dim = [n, m]
g = nx.grid_graph(dim)
- assert_equal(len(g), n*m)
+ assert_equal(len(g), n * m)
assert_equal(nx.degree_histogram(g), [0, 0, 4, 2 * (n + m) - 8,
(n - 2) * (m - 2)])
for n, m in [(1, 5), (5, 1)]:
dim = [n, m]
g = nx.grid_graph(dim)
- assert_equal(len(g), n*m)
+ assert_equal(len(g), n * m)
assert_true(nx.is_isomorphic(g, nx.path_graph(5)))
# mg = nx.grid_graph([n,m], create_using=MultiGraph())
@@ -101,6 +103,7 @@ class TestGridGraph:
class TestHypercubeGraph:
"""Unit tests for :func:`networkx.generators.lattice.hypercube_graph`"""
+
def test_special_cases(self):
for n, H in [(0, nx.null_graph()), (1, nx.path_graph(2)),
(2, nx.cycle_graph(4)), (3, nx.cubical_graph())]:
@@ -116,6 +119,7 @@ class TestHypercubeGraph:
class TestTriangularLatticeGraph:
"Tests for :func:`networkx.generators.lattice.triangular_lattice_graph`"
+
def test_lattice_points(self):
"""Tests that the graph is really a triangular lattice."""
for m, n in [(2, 3), (2, 2), (2, 1), (3, 3), (3, 2), (3, 4)]:
@@ -162,6 +166,7 @@ class TestTriangularLatticeGraph:
class TestHexagonalLatticeGraph:
"Tests for :func:`networkx.generators.lattice.hexagonal_lattice_graph`"
+
def test_lattice_points(self):
"""Tests that the graph is really a hexagonal lattice."""
for m, n in [(4, 5), (4, 4), (4, 3), (3, 2), (3, 3), (3, 5)]:
@@ -174,7 +179,7 @@ class TestHexagonalLatticeGraph:
[(1, 1), (1, 2), (1, 3), (2, 1), (2, 2), (2, 3)],
[(2, 0), (2, 1), (2, 2), (3, 0), (3, 1), (3, 2)],
[(2, 2), (2, 3), (2, 4), (3, 2), (3, 3), (3, 4)],
- ]
+ ]
for hexagon in hexagons:
assert_true(nx.is_isomorphic(G.subgraph(hexagon), C_6))
diff --git a/networkx/generators/tests/test_mycielski.py b/networkx/generators/tests/test_mycielski.py
index 9b6ce084..e3786ca7 100644
--- a/networkx/generators/tests/test_mycielski.py
+++ b/networkx/generators/tests/test_mycielski.py
@@ -13,6 +13,7 @@ from nose.tools import assert_true, assert_equal, raises
import networkx as nx
from networkx import *
+
class TestMycielski(object):
def test_construction(self):
@@ -34,4 +35,4 @@ class TestMycielski(object):
G = mycielski_graph(3)
assert_true(is_isomorphic(G, cycle_graph(5)))
G = mycielski_graph(4)
- assert_true(is_isomorphic(G, mycielskian(cycle_graph(5)))) \ No newline at end of file
+ assert_true(is_isomorphic(G, mycielskian(cycle_graph(5))))
diff --git a/networkx/generators/tests/test_trees.py b/networkx/generators/tests/test_trees.py
index 1d7d1b52..fbb74bce 100644
--- a/networkx/generators/tests/test_trees.py
+++ b/networkx/generators/tests/test_trees.py
@@ -4,6 +4,7 @@ import networkx as nx
from networkx.generators.trees import NIL
from networkx.utils import arbitrary_element
+
class TestPrefixTree(object):
"""Unit tests for the prefix tree generator function."""
@@ -12,7 +13,8 @@ class TestPrefixTree(object):
# <https://en.wikipedia.org/wiki/Trie>.
strings = ['a', 'to', 'tea', 'ted', 'ten', 'i', 'in', 'inn']
T, root = nx.prefix_tree(strings)
- source_label = lambda v: T.node[v]['source']
+
+ def source_label(v): return T.node[v]['source']
# First, we check that the tree has the expected
# structure. Recall that each node that corresponds to one of
diff --git a/networkx/linalg/algebraicconnectivity.py b/networkx/linalg/algebraicconnectivity.py
index 9ce4a38e..7915b252 100644
--- a/networkx/linalg/algebraicconnectivity.py
+++ b/networkx/linalg/algebraicconnectivity.py
@@ -52,6 +52,7 @@ class _PCGSolver(object):
Warning: There is no limit on number of iterations.
"""
+
def __init__(self, A, M):
self._A = A
self._M = M or (lambda x: x.copy())
@@ -97,6 +98,7 @@ class _CholeskySolver(object):
optional argument `tol` on solve method is ignored but included
to match _PCGsolver API.
"""
+
def __init__(self, A):
if not self._cholesky:
raise nx.NetworkXError('Cholesky solver unavailable.')
@@ -122,6 +124,7 @@ class _LUSolver(object):
optional argument `tol` on solve method is ignored but included
to match _PCGsolver API.
"""
+
def __init__(self, A):
if not self._splu:
raise nx.NetworkXError('LU solver unavailable.')
diff --git a/networkx/readwrite/edgelist.py b/networkx/readwrite/edgelist.py
index 04687fa8..aa04ce1c 100644
--- a/networkx/readwrite/edgelist.py
+++ b/networkx/readwrite/edgelist.py
@@ -44,6 +44,7 @@ __all__ = ['generate_edgelist',
from networkx.utils import open_file, make_str
import networkx as nx
+
def generate_edgelist(G, delimiter=' ', data=True):
"""Generate a single line of the graph G in edge list format.
@@ -110,25 +111,26 @@ def generate_edgelist(G, delimiter=' ', data=True):
write_adjlist, read_adjlist
"""
if data is True:
- for u,v,d in G.edges(data=True):
- e = u,v,dict(d)
- yield delimiter.join(map(make_str,e))
+ for u, v, d in G.edges(data=True):
+ e = u, v, dict(d)
+ yield delimiter.join(map(make_str, e))
elif data is False:
- for u,v in G.edges(data=False):
- e = u,v
- yield delimiter.join(map(make_str,e))
+ for u, v in G.edges(data=False):
+ e = u, v
+ yield delimiter.join(map(make_str, e))
else:
- for u,v,d in G.edges(data=True):
- e=[u,v]
+ for u, v, d in G.edges(data=True):
+ e = [u, v]
try:
e.extend(d[k] for k in data)
except KeyError:
- pass # missing data for this edge, should warn?
- yield delimiter.join(map(make_str,e))
+ pass # missing data for this edge, should warn?
+ yield delimiter.join(map(make_str, e))
+
-@open_file(1,mode='wb')
+@open_file(1, mode='wb')
def write_edgelist(G, path, comments="#", delimiter=' ', data=True,
- encoding = 'utf-8'):
+ encoding='utf-8'):
"""Write graph as a list of edges.
Parameters
@@ -173,9 +175,10 @@ def write_edgelist(G, path, comments="#", delimiter=' ', data=True,
"""
for line in generate_edgelist(G, delimiter, data):
- line+='\n'
+ line += '\n'
path.write(line.encode(encoding))
+
def parse_edgelist(lines, comments='#', delimiter=None,
create_using=None, nodetype=None, data=True):
"""Parse lines of an edge list representation of a graph.
@@ -244,64 +247,65 @@ def parse_edgelist(lines, comments='#', delimiter=None,
"""
from ast import literal_eval
if create_using is None:
- G=nx.Graph()
+ G = nx.Graph()
else:
try:
- G=create_using
+ G = create_using
G.clear()
except:
raise TypeError("create_using input is not a NetworkX graph type")
for line in lines:
- p=line.find(comments)
- if p>=0:
+ p = line.find(comments)
+ if p >= 0:
line = line[:p]
if not len(line):
continue
# split line, should have 2 or more
- s=line.strip().split(delimiter)
- if len(s)<2:
+ s = line.strip().split(delimiter)
+ if len(s) < 2:
continue
- u=s.pop(0)
- v=s.pop(0)
- d=s
+ u = s.pop(0)
+ v = s.pop(0)
+ d = s
if nodetype is not None:
try:
- u=nodetype(u)
- v=nodetype(v)
+ u = nodetype(u)
+ v = nodetype(v)
except:
raise TypeError("Failed to convert nodes %s,%s to type %s."
- %(u,v,nodetype))
+ % (u, v, nodetype))
- if len(d)==0 or data is False:
+ if len(d) == 0 or data is False:
# no data or data type specified
- edgedata={}
+ edgedata = {}
elif data is True:
# no edge types specified
- try: # try to evaluate as dictionary
- edgedata=dict(literal_eval(' '.join(d)))
+ try: # try to evaluate as dictionary
+ edgedata = dict(literal_eval(' '.join(d)))
except:
raise TypeError(
- "Failed to convert edge data (%s) to dictionary."%(d))
+ "Failed to convert edge data (%s) to dictionary." % (d))
else:
# convert edge data to dictionary with specified keys and type
- if len(d)!=len(data):
+ if len(d) != len(data):
raise IndexError(
- "Edge data %s and data_keys %s are not the same length"%
+ "Edge data %s and data_keys %s are not the same length" %
(d, data))
- edgedata={}
- for (edge_key,edge_type),edge_value in zip(data,d):
+ edgedata = {}
+ for (edge_key, edge_type), edge_value in zip(data, d):
try:
- edge_value=edge_type(edge_value)
+ edge_value = edge_type(edge_value)
except:
raise TypeError(
"Failed to convert %s data %s to type %s."
- %(edge_key, edge_value, edge_type))
- edgedata.update({edge_key:edge_value})
+ % (edge_key, edge_value, edge_type))
+ edgedata.update({edge_key: edge_value})
G.add_edge(u, v, **edgedata)
return G
-@open_file(0,mode='rb')
+
+@open_file(0, mode='rb')
def read_edgelist(path, comments="#", delimiter=None, create_using=None,
nodetype=None, data=True, edgetype=None, encoding='utf-8'):
"""Read a graph from a list of edges.
@@ -369,7 +373,7 @@ def read_edgelist(path, comments="#", delimiter=None, create_using=None,
types (e.g. int, float, str, frozenset - or tuples of those, etc.)
"""
lines = (line.decode(encoding) for line in path)
- return parse_edgelist(lines,comments=comments, delimiter=delimiter,
+ return parse_edgelist(lines, comments=comments, delimiter=delimiter,
create_using=create_using, nodetype=nodetype,
data=data)
@@ -406,12 +410,12 @@ def write_weighted_edgelist(G, path, comments="#",
write_weighted_edgelist()
"""
- write_edgelist(G,path, comments=comments, delimiter=delimiter,
- data=('weight',), encoding = encoding)
+ write_edgelist(G, path, comments=comments, delimiter=delimiter,
+ data=('weight',), encoding=encoding)
+
def read_weighted_edgelist(path, comments="#", delimiter=None,
create_using=None, nodetype=None, encoding='utf-8'):
-
"""Read a graph as list of edges with numeric weights.
Parameters
@@ -458,8 +462,8 @@ def read_weighted_edgelist(path, comments="#", delimiter=None,
delimiter=delimiter,
create_using=create_using,
nodetype=nodetype,
- data=(('weight',float),),
- encoding = encoding
+ data=(('weight', float),),
+ encoding=encoding
)
diff --git a/networkx/readwrite/gml.py b/networkx/readwrite/gml.py
index 127be651..3336a5a8 100644
--- a/networkx/readwrite/gml.py
+++ b/networkx/readwrite/gml.py
@@ -78,7 +78,7 @@ try:
literal_eval(r"u'\u4444'")
except SyntaxError:
# Remove 'u' prefixes in unicode literals in Python 3
- rtp_fix_unicode = lambda s: s[1:]
+ def rtp_fix_unicode(s): return s[1:]
else:
rtp_fix_unicode = None
@@ -308,7 +308,7 @@ def parse_gml_lines(lines, label, destringizer):
r'\[', # dict start
r'\]', # dict end
r'#.*$|\s+' # comments and whitespaces
- ]
+ ]
tokens = re.compile(
'|'.join('(' + pattern + ')' for pattern in patterns))
lineno = 0
@@ -688,7 +688,7 @@ def generate_gml(G, stringizer=None):
yield line
yield indent + ']'
elif isinstance(value, (list, tuple)) and key != 'label' \
- and value and not in_list:
+ and value and not in_list:
next_indent = indent + ' '
for val in value:
for line in stringize(key, val, (), next_indent, True):
diff --git a/networkx/readwrite/graph6.py b/networkx/readwrite/graph6.py
index aaf8ae98..fb189255 100644
--- a/networkx/readwrite/graph6.py
+++ b/networkx/readwrite/graph6.py
@@ -112,8 +112,8 @@ def from_graph6_bytes(string):
"""Return sequence of individual bits from 6-bit-per-value
list of data values."""
for d in data:
- for i in [5,4,3,2,1,0]:
- yield (d>>i)&1
+ for i in [5, 4, 3, 2, 1, 0]:
+ yield (d >> i) & 1
if string.startswith(b'>>graph6<<'):
string = string[10:]
@@ -126,16 +126,16 @@ def from_graph6_bytes(string):
raise ValueError('each input character must be in range(63, 127)')
n, data = data_to_n(data)
- nd = (n*(n-1)//2 + 5) // 6
+ nd = (n * (n - 1) // 2 + 5) // 6
if len(data) != nd:
- raise NetworkXError(\
- 'Expected %d bits but got %d in graph6' % (n*(n-1)//2, len(data)*6))
+ raise NetworkXError(
+ 'Expected %d bits but got %d in graph6' % (n * (n - 1) // 2, len(data) * 6))
- G=nx.Graph()
+ G = nx.Graph()
G.add_nodes_from(range(n))
- for (i,j),b in zip([(i,j) for j in range(1,n) for i in range(j)], bits()):
+ for (i, j), b in zip([(i, j) for j in range(1, n) for i in range(j)], bits()):
if b:
- G.add_edge(i,j)
+ G.add_edge(i, j)
return G
@@ -389,9 +389,9 @@ def data_to_n(data):
if data[0] <= 62:
return data[0], data[1:]
if data[1] <= 62:
- return (data[1]<<12) + (data[2]<<6) + data[3], data[4:]
- return ((data[2]<<30) + (data[3]<<24) + (data[4]<<18) +
- (data[5]<<12) + (data[6]<<6) + data[7], data[8:])
+ return (data[1] << 12) + (data[2] << 6) + data[3], data[4:]
+ return ((data[2] << 30) + (data[3] << 24) + (data[4] << 18) +
+ (data[5] << 12) + (data[6] << 6) + data[7], data[8:])
def n_to_data(n):
@@ -403,8 +403,8 @@ def n_to_data(n):
if n <= 62:
return [n]
elif n <= 258047:
- return [63, (n>>12) & 0x3f, (n>>6) & 0x3f, n & 0x3f]
+ return [63, (n >> 12) & 0x3f, (n >> 6) & 0x3f, n & 0x3f]
else: # if n <= 68719476735:
return [63, 63,
- (n>>30) & 0x3f, (n>>24) & 0x3f, (n>>18) & 0x3f,
- (n>>12) & 0x3f, (n>>6) & 0x3f, n & 0x3f]
+ (n >> 30) & 0x3f, (n >> 24) & 0x3f, (n >> 18) & 0x3f,
+ (n >> 12) & 0x3f, (n >> 6) & 0x3f, n & 0x3f]
diff --git a/networkx/readwrite/graphml.py b/networkx/readwrite/graphml.py
index 086b53b8..734cf995 100644
--- a/networkx/readwrite/graphml.py
+++ b/networkx/readwrite/graphml.py
@@ -539,7 +539,7 @@ class GraphMLWriter(GraphML):
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
- self.indent(elem, level+1)
+ self.indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
@@ -684,6 +684,7 @@ class GraphMLWriterLxml(GraphMLWriter):
self._graphml.__exit__(None, None, None)
self._xml_base.__exit__(None, None, None)
+
# Choose a writer function for default
if lxmletree is None:
write_graphml = write_graphml_xml
@@ -693,6 +694,7 @@ else:
class GraphMLReader(GraphML):
"""Read a GraphML document. Produces NetworkX graph objects."""
+
def __init__(self, node_type=str, edge_key_type=int):
try:
import xml.etree.ElementTree
diff --git a/networkx/readwrite/json_graph/cytoscape.py b/networkx/readwrite/json_graph/cytoscape.py
index b5da2452..6030d16e 100644
--- a/networkx/readwrite/json_graph/cytoscape.py
+++ b/networkx/readwrite/json_graph/cytoscape.py
@@ -6,6 +6,7 @@ __all__ = ['cytoscape_data', 'cytoscape_graph']
_attrs = dict(name='name', ident='id')
+
def cytoscape_data(G, attrs=None):
"""Return data in Cytoscape JSON format (cyjs).
@@ -30,34 +31,34 @@ def cytoscape_data(G, attrs=None):
name = attrs["name"]
ident = attrs["ident"]
-
+
if len(set([name, ident])) < 2:
raise nx.NetworkXError('Attribute names are not unique.')
-
- jsondata = {"data" : list(G.graph.items())}
+
+ jsondata = {"data": list(G.graph.items())}
jsondata['directed'] = G.is_directed()
jsondata['multigraph'] = G.is_multigraph()
- jsondata["elements"] = {"nodes" : [], "edges" : []}
+ jsondata["elements"] = {"nodes": [], "edges": []}
nodes = jsondata["elements"]["nodes"]
edges = jsondata["elements"]["edges"]
for i, j in G.nodes.items():
- n = {"data" : j.copy()}
+ n = {"data": j.copy()}
n["data"]["id"] = j.get(ident) or str(i)
n["data"]["value"] = i
n["data"]["name"] = j.get(name) or str(i)
nodes.append(n)
-
+
if G.is_multigraph():
for e in G.edges(keys=True):
- n = {"data" : G.adj[e[0]][e[1]][e[2]].copy()}
+ n = {"data": G.adj[e[0]][e[1]][e[2]].copy()}
n["data"]["source"] = e[0]
n["data"]["target"] = e[1]
n["data"]["key"] = e[2]
edges.append(n)
else:
for e in G.edges():
- n = {"data" : G.adj[e[0]][e[1]].copy()}
+ n = {"data": G.adj[e[0]][e[1]].copy()}
n["data"]["source"] = e[0]
n["data"]["target"] = e[1]
edges.append(n)
@@ -69,13 +70,13 @@ def cytoscape_graph(data, attrs=None):
attrs = _attrs
else:
attrs.update({k: v for (k, v) in _attrs.items() if k not in attrs})
-
+
name = attrs["name"]
ident = attrs["ident"]
-
+
if len(set([ident, name])) < 2:
raise nx.NetworkXError('Attribute names are not unique.')
-
+
multigraph = data.get('multigraph')
directed = data.get('directed')
if multigraph:
@@ -88,7 +89,7 @@ def cytoscape_graph(data, attrs=None):
for d in data["elements"]["nodes"]:
node_data = d["data"].copy()
node = d["data"]["value"]
-
+
if d["data"].get(name):
node_data[name] = d["data"].get(name)
if d["data"].get(ident):
@@ -96,7 +97,7 @@ def cytoscape_graph(data, attrs=None):
graph.add_node(node)
graph.nodes[node].update(node_data)
-
+
for d in data["elements"]["edges"]:
edge_data = d["data"].copy()
sour = d["data"].pop("source")
@@ -109,4 +110,3 @@ def cytoscape_graph(data, attrs=None):
graph.add_edge(sour, targ)
graph.edges[sour, targ].update(edge_data)
return graph
-
diff --git a/networkx/readwrite/json_graph/jit.py b/networkx/readwrite/json_graph/jit.py
index ee90d0d9..3dae5840 100644
--- a/networkx/readwrite/json_graph/jit.py
+++ b/networkx/readwrite/json_graph/jit.py
@@ -39,6 +39,7 @@ from networkx.utils.decorators import not_implemented_for
__all__ = ['jit_graph', 'jit_data']
+
def jit_graph(data, create_using=None):
"""Read a graph from JIT JSON.
diff --git a/networkx/readwrite/json_graph/tests/test_adjacency.py b/networkx/readwrite/json_graph/tests/test_adjacency.py
index f60afcc1..5dbb84ce 100644
--- a/networkx/readwrite/json_graph/tests/test_adjacency.py
+++ b/networkx/readwrite/json_graph/tests/test_adjacency.py
@@ -3,38 +3,39 @@ from nose.tools import assert_equal, assert_true, raises
import networkx as nx
from networkx.readwrite.json_graph import *
+
class TestAdjacency:
def test_graph(self):
G = nx.path_graph(4)
H = adjacency_graph(adjacency_data(G))
- nx.is_isomorphic(G,H)
+ nx.is_isomorphic(G, H)
def test_graph_attributes(self):
G = nx.path_graph(4)
- G.add_node(1,color='red')
- G.add_edge(1,2,width=7)
- G.graph['foo']='bar'
- G.graph[1]='one'
+ G.add_node(1, color='red')
+ G.add_edge(1, 2, width=7)
+ G.graph['foo'] = 'bar'
+ G.graph[1] = 'one'
H = adjacency_graph(adjacency_data(G))
- assert_equal(H.graph['foo'],'bar')
- assert_equal(H.nodes[1]['color'],'red')
- assert_equal(H[1][2]['width'],7)
+ assert_equal(H.graph['foo'], 'bar')
+ assert_equal(H.nodes[1]['color'], 'red')
+ assert_equal(H[1][2]['width'], 7)
d = json.dumps(adjacency_data(G))
H = adjacency_graph(json.loads(d))
- assert_equal(H.graph['foo'],'bar')
- assert_equal(H.graph[1],'one')
- assert_equal(H.nodes[1]['color'],'red')
- assert_equal(H[1][2]['width'],7)
+ assert_equal(H.graph['foo'], 'bar')
+ assert_equal(H.graph[1], 'one')
+ assert_equal(H.nodes[1]['color'], 'red')
+ assert_equal(H[1][2]['width'], 7)
def test_digraph(self):
G = nx.DiGraph()
nx.add_path(G, [1, 2, 3])
H = adjacency_graph(adjacency_data(G))
assert_true(H.is_directed())
- nx.is_isomorphic(G,H)
+ nx.is_isomorphic(G, H)
def test_multidigraph(self):
G = nx.MultiDiGraph()
@@ -45,11 +46,11 @@ class TestAdjacency:
def test_multigraph(self):
G = nx.MultiGraph()
- G.add_edge(1,2,key='first')
- G.add_edge(1,2,key='second',color='blue')
+ G.add_edge(1, 2, key='first')
+ G.add_edge(1, 2, key='second', color='blue')
H = adjacency_graph(adjacency_data(G))
- nx.is_isomorphic(G,H)
- assert_equal(H[1][2]['second']['color'],'blue')
+ nx.is_isomorphic(G, H)
+ assert_equal(H[1][2]['second']['color'], 'blue')
@raises(nx.NetworkXError)
def test_exception(self):
diff --git a/networkx/readwrite/json_graph/tests/test_cytoscape.py b/networkx/readwrite/json_graph/tests/test_cytoscape.py
index 3c2155fa..502d3af1 100644
--- a/networkx/readwrite/json_graph/tests/test_cytoscape.py
+++ b/networkx/readwrite/json_graph/tests/test_cytoscape.py
@@ -3,47 +3,44 @@ from nose.tools import assert_equal, assert_true, raises
import networkx as nx
from networkx.readwrite.json_graph import *
+
class TestCytoscape:
-
+
def test_graph(self):
G = nx.path_graph(4)
H = cytoscape_graph(cytoscape_data(G))
- nx.is_isomorphic(G,H)
+ nx.is_isomorphic(G, H)
def test_graph_attributes(self):
G = nx.path_graph(4)
- G.add_node(1,color='red')
- G.add_edge(1,2,width=7)
- G.graph['foo']='bar'
- G.graph[1]='one'
+ G.add_node(1, color='red')
+ G.add_edge(1, 2, width=7)
+ G.graph['foo'] = 'bar'
+ G.graph[1] = 'one'
G.add_node(3, name="node", id="123")
-
+
H = cytoscape_graph(cytoscape_data(G))
- assert_equal(H.graph['foo'],'bar')
- assert_equal(H.nodes[1]['color'],'red')
- assert_equal(H[1][2]['width'],7)
- assert_equal(H.nodes[3]['name'],'node')
- assert_equal(H.nodes[3]['id'],'123')
-
+ assert_equal(H.graph['foo'], 'bar')
+ assert_equal(H.nodes[1]['color'], 'red')
+ assert_equal(H[1][2]['width'], 7)
+ assert_equal(H.nodes[3]['name'], 'node')
+ assert_equal(H.nodes[3]['id'], '123')
d = json.dumps(cytoscape_data(G))
H = cytoscape_graph(json.loads(d))
- assert_equal(H.graph['foo'],'bar')
- assert_equal(H.graph[1],'one')
- assert_equal(H.nodes[1]['color'],'red')
- assert_equal(H[1][2]['width'],7)
- assert_equal(H.nodes[3]['name'],'node')
- assert_equal(H.nodes[3]['id'],'123')
-
-
-
-
+ assert_equal(H.graph['foo'], 'bar')
+ assert_equal(H.graph[1], 'one')
+ assert_equal(H.nodes[1]['color'], 'red')
+ assert_equal(H[1][2]['width'], 7)
+ assert_equal(H.nodes[3]['name'], 'node')
+ assert_equal(H.nodes[3]['id'], '123')
+
def test_digraph(self):
G = nx.DiGraph()
nx.add_path(G, [1, 2, 3])
H = cytoscape_graph(cytoscape_data(G))
assert_true(H.is_directed())
- nx.is_isomorphic(G,H)
+ nx.is_isomorphic(G, H)
def test_multidigraph(self):
G = nx.MultiDiGraph()
@@ -54,11 +51,11 @@ class TestCytoscape:
def test_multigraph(self):
G = nx.MultiGraph()
- G.add_edge(1,2,key='first')
- G.add_edge(1,2,key='second',color='blue')
+ G.add_edge(1, 2, key='first')
+ G.add_edge(1, 2, key='second', color='blue')
H = cytoscape_graph(cytoscape_data(G))
- assert_true(nx.is_isomorphic(G,H))
- assert_equal(H[1][2]['second']['color'],'blue')
+ assert_true(nx.is_isomorphic(G, H))
+ assert_equal(H[1][2]['second']['color'], 'blue')
@raises(nx.NetworkXError)
def test_exception(self):
diff --git a/networkx/readwrite/json_graph/tests/test_node_link.py b/networkx/readwrite/json_graph/tests/test_node_link.py
index d222b420..92a565b3 100644
--- a/networkx/readwrite/json_graph/tests/test_node_link.py
+++ b/networkx/readwrite/json_graph/tests/test_node_link.py
@@ -60,7 +60,7 @@ class TestNodeLink:
except NameError:
q = "qualité"
G = nx.Graph()
- G.add_node(1, **{q:q})
+ G.add_node(1, **{q: q})
s = node_link_data(G)
output = json.dumps(s, ensure_ascii=False)
data = json.loads(output)
@@ -103,4 +103,3 @@ class TestNodeLink:
assert_equal(H.graph['foo'], 'bar')
assert_equal(H.nodes[1]['color'], 'red')
assert_equal(H[1][2]['width'], 7)
-
diff --git a/networkx/readwrite/json_graph/tests/test_tree.py b/networkx/readwrite/json_graph/tests/test_tree.py
index 2ea95f49..cb59532e 100644
--- a/networkx/readwrite/json_graph/tests/test_tree.py
+++ b/networkx/readwrite/json_graph/tests/test_tree.py
@@ -3,29 +3,30 @@ from nose.tools import assert_equal, assert_true, raises
import networkx as nx
from networkx.readwrite.json_graph import *
+
class TestTree:
def test_graph(self):
- G=nx.DiGraph()
- G.add_nodes_from([1,2,3],color='red')
- G.add_edge(1,2,foo=7)
- G.add_edge(1,3,foo=10)
- G.add_edge(3,4,foo=10)
- H = tree_graph(tree_data(G,1))
- nx.is_isomorphic(G,H)
+ G = nx.DiGraph()
+ G.add_nodes_from([1, 2, 3], color='red')
+ G.add_edge(1, 2, foo=7)
+ G.add_edge(1, 3, foo=10)
+ G.add_edge(3, 4, foo=10)
+ H = tree_graph(tree_data(G, 1))
+ nx.is_isomorphic(G, H)
def test_graph_attributes(self):
- G=nx.DiGraph()
- G.add_nodes_from([1,2,3],color='red')
- G.add_edge(1,2,foo=7)
- G.add_edge(1,3,foo=10)
- G.add_edge(3,4,foo=10)
- H = tree_graph(tree_data(G,1))
- assert_equal(H.nodes[1]['color'],'red')
+ G = nx.DiGraph()
+ G.add_nodes_from([1, 2, 3], color='red')
+ G.add_edge(1, 2, foo=7)
+ G.add_edge(1, 3, foo=10)
+ G.add_edge(3, 4, foo=10)
+ H = tree_graph(tree_data(G, 1))
+ assert_equal(H.nodes[1]['color'], 'red')
- d = json.dumps(tree_data(G,1))
+ d = json.dumps(tree_data(G, 1))
H = tree_graph(json.loads(d))
- assert_equal(H.nodes[1]['color'],'red')
+ assert_equal(H.nodes[1]['color'], 'red')
@raises(nx.NetworkXError)
def test_exception(self):
diff --git a/networkx/readwrite/nx_yaml.py b/networkx/readwrite/nx_yaml.py
index 47cbbbc9..338849b9 100644
--- a/networkx/readwrite/nx_yaml.py
+++ b/networkx/readwrite/nx_yaml.py
@@ -26,7 +26,8 @@ __all__ = ['read_yaml', 'write_yaml']
import networkx as nx
from networkx.utils import open_file
-@open_file(1,mode='w')
+
+@open_file(1, mode='w')
def write_yaml(G_to_be_yaml, path_for_yaml_output, **kwds):
"""Write graph G in YAML format to path.
@@ -60,8 +61,9 @@ def write_yaml(G_to_be_yaml, path_for_yaml_output, **kwds):
except ImportError:
raise ImportError("write_yaml() requires PyYAML: http://pyyaml.org/")
yaml.dump(G_to_be_yaml, path_for_yaml_output, **kwds)
-
-@open_file(0,mode='r')
+
+
+@open_file(0, mode='r')
def read_yaml(path):
"""Read graph in YAML format from path.
@@ -83,7 +85,7 @@ def read_yaml(path):
>>> G=nx.path_graph(4)
>>> nx.write_yaml(G,'test.yaml')
>>> G=nx.read_yaml('test.yaml')
-
+
References
----------
.. [1] http://www.yaml.org
@@ -94,7 +96,7 @@ def read_yaml(path):
except ImportError:
raise ImportError("read_yaml() requires PyYAML: http://pyyaml.org/")
- G=yaml.load(path)
+ G = yaml.load(path)
return G
@@ -107,6 +109,8 @@ def setup_module(module):
raise SkipTest("PyYAML not available")
# fixture for nose tests
+
+
def teardown_module(module):
import os
os.unlink('test.yaml')
diff --git a/networkx/readwrite/sparse6.py b/networkx/readwrite/sparse6.py
index 84bcc8ff..00ccae60 100644
--- a/networkx/readwrite/sparse6.py
+++ b/networkx/readwrite/sparse6.py
@@ -66,25 +66,25 @@ def _generate_sparse6_bytes(G, nodes, header):
yield str.encode(chr(d + 63))
k = 1
- while 1<<k < n:
+ while 1 << k < n:
k += 1
def enc(x):
"""Big endian k-bit encoding of x"""
- return [1 if (x & 1 << (k-1-i)) else 0 for i in range(k)]
+ return [1 if (x & 1 << (k - 1 - i)) else 0 for i in range(k)]
edges = sorted((max(u, v), min(u, v)) for u, v in G.edges())
bits = []
curv = 0
for (v, u) in edges:
- if v == curv: # current vertex edge
+ if v == curv: # current vertex edge
bits.append(0)
bits.extend(enc(u))
- elif v == curv + 1: # next vertex edge
+ elif v == curv + 1: # next vertex edge
curv += 1
bits.append(1)
bits.extend(enc(u))
- else: # skip to vertex v and then add edge to u
+ else: # skip to vertex v and then add edge to u
curv = v
bits.append(1)
bits.extend(enc(v))
@@ -100,8 +100,8 @@ def _generate_sparse6_bytes(G, nodes, header):
else:
bits.extend([1] * ((-len(bits)) % 6))
- data = [(bits[i+0]<<5) + (bits[i+1]<<4) + (bits[i+2]<<3) + (bits[i+3]<<2) +
- (bits[i+4]<<1) + (bits[i+5]<<0) for i in range(0, len(bits), 6)]
+ data = [(bits[i + 0] << 5) + (bits[i + 1] << 4) + (bits[i + 2] << 3) + (bits[i + 3] << 2) +
+ (bits[i + 4] << 1) + (bits[i + 5] << 0) for i in range(0, len(bits), 6)]
for d in data:
yield str.encode(chr(d + 63))
@@ -152,32 +152,32 @@ def from_sparse6_bytes(string):
chars = [c - 63 for c in string[1:]]
n, data = data_to_n(chars)
k = 1
- while 1<<k < n:
+ while 1 << k < n:
k += 1
def parseData():
"""Return stream of pairs b[i], x[i] for sparse6 format."""
chunks = iter(data)
- d = None # partial data word
- dLen = 0 # how many unparsed bits are left in d
+ d = None # partial data word
+ dLen = 0 # how many unparsed bits are left in d
while 1:
if dLen < 1:
d = next(chunks)
dLen = 6
dLen -= 1
- b = (d>>dLen) & 1 # grab top remaining bit
+ b = (d >> dLen) & 1 # grab top remaining bit
- x = d & ((1<<dLen)-1) # partially built up value of x
+ x = d & ((1 << dLen) - 1) # partially built up value of x
xLen = dLen # how many bits included so far in x
- while xLen < k: # now grab full chunks until we have enough
+ while xLen < k: # now grab full chunks until we have enough
d = next(chunks)
dLen = 6
- x = (x<<6) + d
+ x = (x << 6) + d
xLen += 6
- x = (x >> (xLen - k)) # shift back the extra bits
+ x = (x >> (xLen - k)) # shift back the extra bits
dLen = xLen - k
- yield b,x
+ yield b, x
v = 0
@@ -185,7 +185,7 @@ def from_sparse6_bytes(string):
G.add_nodes_from(range(n))
multigraph = False
- for b,x in parseData():
+ for b, x in parseData():
if b == 1:
v += 1
# padding with ones can cause overlarge number here
@@ -194,9 +194,9 @@ def from_sparse6_bytes(string):
elif x > v:
v = x
else:
- if G.has_edge(x,v):
+ if G.has_edge(x, v):
multigraph = True
- G.add_edge(x,v)
+ G.add_edge(x, v)
if not multigraph:
G = nx.Graph(G)
return G
@@ -252,7 +252,7 @@ def to_sparse6_bytes(G, nodes=None, header=True):
return b''.join(_generate_sparse6_bytes(G, nodes, header))
-@open_file(0,mode='rb')
+@open_file(0, mode='rb')
def read_sparse6(path):
"""Read an undirected graph in sparse6 format from path.
diff --git a/networkx/readwrite/tests/test_adjlist.py b/networkx/readwrite/tests/test_adjlist.py
index 9d34f33f..08145c22 100644
--- a/networkx/readwrite/tests/test_adjlist.py
+++ b/networkx/readwrite/tests/test_adjlist.py
@@ -7,21 +7,21 @@ from nose.tools import assert_equal, assert_raises, assert_not_equal
import os
import tempfile
import networkx as nx
-from networkx.testing import (assert_nodes_equal, assert_edges_equal,
- assert_graphs_equal)
+from networkx.testing import (assert_nodes_equal, assert_edges_equal,
+ assert_graphs_equal)
class TestAdjlist():
def setUp(self):
- self.G=nx.Graph(name="test")
- e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')]
+ self.G = nx.Graph(name="test")
+ e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')]
self.G.add_edges_from(e)
self.G.add_node('g')
- self.DG=nx.DiGraph(self.G)
- self.XG=nx.MultiGraph()
- self.XG.add_weighted_edges_from([(1,2,5),(1,2,5),(1,2,1),(3,3,42)])
- self. XDG=nx.MultiDiGraph(self.XG)
+ self.DG = nx.DiGraph(self.G)
+ self.XG = nx.MultiGraph()
+ self.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)])
+ self. XDG = nx.MultiDiGraph(self.XG)
def test_read_multiline_adjlist_1(self):
# Unit test for https://networkx.lanl.gov/trac/ticket/252
@@ -38,10 +38,10 @@ class TestAdjlist():
def test_unicode(self):
G = nx.Graph()
- try: # Python 3.x
+ try: # Python 3.x
name1 = chr(2344) + chr(123) + chr(6543)
name2 = chr(5543) + chr(1543) + chr(324)
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = unichr(2344) + unichr(123) + unichr(6543)
name2 = unichr(5543) + unichr(1543) + unichr(324)
G.add_edge(name1, 'Radiohead', **{name2: 3})
@@ -54,114 +54,114 @@ class TestAdjlist():
def test_latin1_err(self):
G = nx.Graph()
- try: # Python 3.x
+ try: # Python 3.x
name1 = chr(2344) + chr(123) + chr(6543)
name2 = chr(5543) + chr(1543) + chr(324)
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = unichr(2344) + unichr(123) + unichr(6543)
name2 = unichr(5543) + unichr(1543) + unichr(324)
G.add_edge(name1, 'Radiohead', **{name2: 3})
fd, fname = tempfile.mkstemp()
assert_raises(UnicodeEncodeError,
nx.write_multiline_adjlist,
- G, fname, encoding = 'latin-1')
+ G, fname, encoding='latin-1')
os.close(fd)
os.unlink(fname)
def test_latin1(self):
G = nx.Graph()
- try: # Python 3.x
- blurb = chr(1245) # just to trigger the exception
+ try: # Python 3.x
+ blurb = chr(1245) # just to trigger the exception
name1 = 'Bj' + chr(246) + 'rk'
name2 = chr(220) + 'ber'
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = 'Bj' + unichr(246) + 'rk'
name2 = unichr(220) + 'ber'
G.add_edge(name1, 'Radiohead', **{name2: 3})
fd, fname = tempfile.mkstemp()
- nx.write_multiline_adjlist(G, fname, encoding = 'latin-1')
- H = nx.read_multiline_adjlist(fname, encoding = 'latin-1')
+ nx.write_multiline_adjlist(G, fname, encoding='latin-1')
+ H = nx.read_multiline_adjlist(fname, encoding='latin-1')
assert_graphs_equal(G, H)
os.close(fd)
os.unlink(fname)
def test_adjlist_graph(self):
- G=self.G
- (fd,fname)=tempfile.mkstemp()
- nx.write_adjlist(G,fname)
- H=nx.read_adjlist(fname)
- H2=nx.read_adjlist(fname)
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.G
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_adjlist(G, fname)
+ H = nx.read_adjlist(fname)
+ H2 = nx.read_adjlist(fname)
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_adjlist_digraph(self):
- G=self.DG
- (fd,fname)=tempfile.mkstemp()
- nx.write_adjlist(G,fname)
- H=nx.read_adjlist(fname,create_using=nx.DiGraph())
- H2=nx.read_adjlist(fname,create_using=nx.DiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.DG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_adjlist(G, fname)
+ H = nx.read_adjlist(fname, create_using=nx.DiGraph())
+ H2 = nx.read_adjlist(fname, create_using=nx.DiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_adjlist_integers(self):
- (fd,fname)=tempfile.mkstemp()
- G=nx.convert_node_labels_to_integers(self.G)
- nx.write_adjlist(G,fname)
- H=nx.read_adjlist(fname,nodetype=int)
- H2=nx.read_adjlist(fname,nodetype=int)
+ (fd, fname) = tempfile.mkstemp()
+ G = nx.convert_node_labels_to_integers(self.G)
+ nx.write_adjlist(G, fname)
+ H = nx.read_adjlist(fname, nodetype=int)
+ H2 = nx.read_adjlist(fname, nodetype=int)
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_adjlist_digraph(self):
- G=self.DG
- (fd,fname)=tempfile.mkstemp()
- nx.write_adjlist(G,fname)
- H=nx.read_adjlist(fname,create_using=nx.DiGraph())
- H2=nx.read_adjlist(fname,create_using=nx.DiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.DG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_adjlist(G, fname)
+ H = nx.read_adjlist(fname, create_using=nx.DiGraph())
+ H2 = nx.read_adjlist(fname, create_using=nx.DiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_adjlist_multigraph(self):
- G=self.XG
- (fd,fname)=tempfile.mkstemp()
- nx.write_adjlist(G,fname)
- H=nx.read_adjlist(fname,nodetype=int,
- create_using=nx.MultiGraph())
- H2=nx.read_adjlist(fname,nodetype=int,
- create_using=nx.MultiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.XG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_adjlist(G, fname)
+ H = nx.read_adjlist(fname, nodetype=int,
+ create_using=nx.MultiGraph())
+ H2 = nx.read_adjlist(fname, nodetype=int,
+ create_using=nx.MultiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_adjlist_multidigraph(self):
- G=self.XDG
- (fd,fname)=tempfile.mkstemp()
- nx.write_adjlist(G,fname)
- H=nx.read_adjlist(fname,nodetype=int,
- create_using=nx.MultiDiGraph())
- H2=nx.read_adjlist(fname,nodetype=int,
- create_using=nx.MultiDiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.XDG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_adjlist(G, fname)
+ H = nx.read_adjlist(fname, nodetype=int,
+ create_using=nx.MultiDiGraph())
+ H2 = nx.read_adjlist(fname, nodetype=int,
+ create_using=nx.MultiDiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_adjlist_delimiter(self):
- fh=io.BytesIO()
+ fh = io.BytesIO()
G = nx.path_graph(3)
nx.write_adjlist(G, fh, delimiter=':')
fh.seek(0)
@@ -173,94 +173,94 @@ class TestAdjlist():
class TestMultilineAdjlist():
def setUp(self):
- self.G=nx.Graph(name="test")
- e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')]
+ self.G = nx.Graph(name="test")
+ e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')]
self.G.add_edges_from(e)
self.G.add_node('g')
- self.DG=nx.DiGraph(self.G)
- self.DG.remove_edge('b','a')
- self.DG.remove_edge('b','c')
- self.XG=nx.MultiGraph()
- self.XG.add_weighted_edges_from([(1,2,5),(1,2,5),(1,2,1),(3,3,42)])
- self. XDG=nx.MultiDiGraph(self.XG)
+ self.DG = nx.DiGraph(self.G)
+ self.DG.remove_edge('b', 'a')
+ self.DG.remove_edge('b', 'c')
+ self.XG = nx.MultiGraph()
+ self.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)])
+ self. XDG = nx.MultiDiGraph(self.XG)
def test_multiline_adjlist_graph(self):
- G=self.G
- (fd,fname)=tempfile.mkstemp()
- nx.write_multiline_adjlist(G,fname)
- H=nx.read_multiline_adjlist(fname)
- H2=nx.read_multiline_adjlist(fname)
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.G
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_multiline_adjlist(G, fname)
+ H = nx.read_multiline_adjlist(fname)
+ H2 = nx.read_multiline_adjlist(fname)
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_multiline_adjlist_digraph(self):
- G=self.DG
- (fd,fname)=tempfile.mkstemp()
- nx.write_multiline_adjlist(G,fname)
- H=nx.read_multiline_adjlist(fname,create_using=nx.DiGraph())
- H2=nx.read_multiline_adjlist(fname,create_using=nx.DiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.DG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_multiline_adjlist(G, fname)
+ H = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph())
+ H2 = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_multiline_adjlist_integers(self):
- (fd,fname)=tempfile.mkstemp()
- G=nx.convert_node_labels_to_integers(self.G)
- nx.write_multiline_adjlist(G,fname)
- H=nx.read_multiline_adjlist(fname,nodetype=int)
- H2=nx.read_multiline_adjlist(fname,nodetype=int)
+ (fd, fname) = tempfile.mkstemp()
+ G = nx.convert_node_labels_to_integers(self.G)
+ nx.write_multiline_adjlist(G, fname)
+ H = nx.read_multiline_adjlist(fname, nodetype=int)
+ H2 = nx.read_multiline_adjlist(fname, nodetype=int)
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_multiline_adjlist_digraph(self):
- G=self.DG
- (fd,fname)=tempfile.mkstemp()
- nx.write_multiline_adjlist(G,fname)
- H=nx.read_multiline_adjlist(fname,create_using=nx.DiGraph())
- H2=nx.read_multiline_adjlist(fname,create_using=nx.DiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.DG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_multiline_adjlist(G, fname)
+ H = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph())
+ H2 = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_multiline_adjlist_multigraph(self):
- G=self.XG
- (fd,fname)=tempfile.mkstemp()
- nx.write_multiline_adjlist(G,fname)
- H=nx.read_multiline_adjlist(fname,nodetype=int,
- create_using=nx.MultiGraph())
- H2=nx.read_multiline_adjlist(fname,nodetype=int,
- create_using=nx.MultiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.XG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_multiline_adjlist(G, fname)
+ H = nx.read_multiline_adjlist(fname, nodetype=int,
+ create_using=nx.MultiGraph())
+ H2 = nx.read_multiline_adjlist(fname, nodetype=int,
+ create_using=nx.MultiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_multiline_adjlist_multidigraph(self):
- G=self.XDG
- (fd,fname)=tempfile.mkstemp()
- nx.write_multiline_adjlist(G,fname)
- H=nx.read_multiline_adjlist(fname,nodetype=int,
- create_using=nx.MultiDiGraph())
- H2=nx.read_multiline_adjlist(fname,nodetype=int,
- create_using=nx.MultiDiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.XDG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_multiline_adjlist(G, fname)
+ H = nx.read_multiline_adjlist(fname, nodetype=int,
+ create_using=nx.MultiDiGraph())
+ H2 = nx.read_multiline_adjlist(fname, nodetype=int,
+ create_using=nx.MultiDiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_multiline_adjlist_delimiter(self):
- fh=io.BytesIO()
+ fh = io.BytesIO()
G = nx.path_graph(3)
nx.write_multiline_adjlist(G, fh, delimiter=':')
fh.seek(0)
diff --git a/networkx/readwrite/tests/test_edgelist.py b/networkx/readwrite/tests/test_edgelist.py
index 29c21eb6..10b3d51d 100644
--- a/networkx/readwrite/tests/test_edgelist.py
+++ b/networkx/readwrite/tests/test_edgelist.py
@@ -7,21 +7,21 @@ import tempfile
import os
import networkx as nx
-from networkx.testing import (assert_edges_equal, assert_nodes_equal,
- assert_graphs_equal)
+from networkx.testing import (assert_edges_equal, assert_nodes_equal,
+ assert_graphs_equal)
class TestEdgelist:
def setUp(self):
- self.G=nx.Graph(name="test")
- e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')]
+ self.G = nx.Graph(name="test")
+ e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')]
self.G.add_edges_from(e)
self.G.add_node('g')
- self.DG=nx.DiGraph(self.G)
- self.XG=nx.MultiGraph()
- self.XG.add_weighted_edges_from([(1,2,5),(1,2,5),(1,2,1),(3,3,42)])
- self. XDG=nx.MultiDiGraph(self.XG)
+ self.DG = nx.DiGraph(self.G)
+ self.XG = nx.MultiGraph()
+ self.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)])
+ self. XDG = nx.MultiDiGraph(self.XG)
def test_read_edgelist_1(self):
s = b"""\
@@ -31,8 +31,8 @@ class TestEdgelist:
2 3
"""
bytesIO = io.BytesIO(s)
- G = nx.read_edgelist(bytesIO,nodetype=int)
- assert_edges_equal(G.edges(),[(1,2),(2,3)])
+ G = nx.read_edgelist(bytesIO, nodetype=int)
+ assert_edges_equal(G.edges(), [(1, 2), (2, 3)])
def test_read_edgelist_2(self):
s = b"""\
@@ -42,13 +42,13 @@ class TestEdgelist:
2 3 3.0
"""
bytesIO = io.BytesIO(s)
- G = nx.read_edgelist(bytesIO,nodetype=int,data=False)
- assert_edges_equal(G.edges(),[(1,2),(2,3)])
+ G = nx.read_edgelist(bytesIO, nodetype=int, data=False)
+ assert_edges_equal(G.edges(), [(1, 2), (2, 3)])
bytesIO = io.BytesIO(s)
- G = nx.read_weighted_edgelist(bytesIO,nodetype=int)
+ G = nx.read_weighted_edgelist(bytesIO, nodetype=int)
assert_edges_equal(G.edges(data=True),
- [(1,2,{'weight':2.0}),(2,3,{'weight':3.0})])
+ [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})])
def test_read_edgelist_3(self):
s = b"""\
@@ -58,54 +58,54 @@ class TestEdgelist:
2 3 {'weight':3.0}
"""
bytesIO = io.BytesIO(s)
- G = nx.read_edgelist(bytesIO,nodetype=int,data=False)
- assert_edges_equal(G.edges(),[(1,2),(2,3)])
+ G = nx.read_edgelist(bytesIO, nodetype=int, data=False)
+ assert_edges_equal(G.edges(), [(1, 2), (2, 3)])
bytesIO = io.BytesIO(s)
- G = nx.read_edgelist(bytesIO,nodetype=int,data=True)
+ G = nx.read_edgelist(bytesIO, nodetype=int, data=True)
assert_edges_equal(G.edges(data=True),
- [(1,2,{'weight':2.0}),(2,3,{'weight':3.0})])
+ [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})])
def test_write_edgelist_1(self):
- fh=io.BytesIO()
- G=nx.OrderedGraph()
- G.add_edges_from([(1,2),(2,3)])
- nx.write_edgelist(G,fh,data=False)
+ fh = io.BytesIO()
+ G = nx.OrderedGraph()
+ G.add_edges_from([(1, 2), (2, 3)])
+ nx.write_edgelist(G, fh, data=False)
fh.seek(0)
- assert_equal(fh.read(),b"1 2\n2 3\n")
+ assert_equal(fh.read(), b"1 2\n2 3\n")
def test_write_edgelist_2(self):
- fh=io.BytesIO()
- G=nx.OrderedGraph()
- G.add_edges_from([(1,2),(2,3)])
- nx.write_edgelist(G,fh,data=True)
+ fh = io.BytesIO()
+ G = nx.OrderedGraph()
+ G.add_edges_from([(1, 2), (2, 3)])
+ nx.write_edgelist(G, fh, data=True)
fh.seek(0)
- assert_equal(fh.read(),b"1 2 {}\n2 3 {}\n")
+ assert_equal(fh.read(), b"1 2 {}\n2 3 {}\n")
def test_write_edgelist_3(self):
- fh=io.BytesIO()
- G=nx.OrderedGraph()
- G.add_edge(1,2,weight=2.0)
- G.add_edge(2,3,weight=3.0)
- nx.write_edgelist(G,fh,data=True)
+ fh = io.BytesIO()
+ G = nx.OrderedGraph()
+ G.add_edge(1, 2, weight=2.0)
+ G.add_edge(2, 3, weight=3.0)
+ nx.write_edgelist(G, fh, data=True)
fh.seek(0)
- assert_equal(fh.read(),b"1 2 {'weight': 2.0}\n2 3 {'weight': 3.0}\n")
+ assert_equal(fh.read(), b"1 2 {'weight': 2.0}\n2 3 {'weight': 3.0}\n")
def test_write_edgelist_4(self):
- fh=io.BytesIO()
- G=nx.OrderedGraph()
- G.add_edge(1,2,weight=2.0)
- G.add_edge(2,3,weight=3.0)
- nx.write_edgelist(G,fh,data=[('weight')])
+ fh = io.BytesIO()
+ G = nx.OrderedGraph()
+ G.add_edge(1, 2, weight=2.0)
+ G.add_edge(2, 3, weight=3.0)
+ nx.write_edgelist(G, fh, data=[('weight')])
fh.seek(0)
- assert_equal(fh.read(),b"1 2 2.0\n2 3 3.0\n")
+ assert_equal(fh.read(), b"1 2 2.0\n2 3 3.0\n")
def test_unicode(self):
G = nx.Graph()
- try: # Python 3.x
+ try: # Python 3.x
name1 = chr(2344) + chr(123) + chr(6543)
name2 = chr(5543) + chr(1543) + chr(324)
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = unichr(2344) + unichr(123) + unichr(6543)
name2 = unichr(5543) + unichr(1543) + unichr(324)
G.add_edge(name1, 'Radiohead', **{name2: 3})
@@ -118,68 +118,68 @@ class TestEdgelist:
def test_latin1_issue(self):
G = nx.Graph()
- try: # Python 3.x
+ try: # Python 3.x
name1 = chr(2344) + chr(123) + chr(6543)
name2 = chr(5543) + chr(1543) + chr(324)
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = unichr(2344) + unichr(123) + unichr(6543)
name2 = unichr(5543) + unichr(1543) + unichr(324)
G.add_edge(name1, 'Radiohead', **{name2: 3})
fd, fname = tempfile.mkstemp()
assert_raises(UnicodeEncodeError,
nx.write_edgelist,
- G, fname, encoding = 'latin-1')
+ G, fname, encoding='latin-1')
os.close(fd)
os.unlink(fname)
def test_latin1(self):
G = nx.Graph()
- try: # Python 3.x
- blurb = chr(1245) # just to trigger the exception
+ try: # Python 3.x
+ blurb = chr(1245) # just to trigger the exception
name1 = 'Bj' + chr(246) + 'rk'
name2 = chr(220) + 'ber'
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = 'Bj' + unichr(246) + 'rk'
name2 = unichr(220) + 'ber'
G.add_edge(name1, 'Radiohead', **{name2: 3})
fd, fname = tempfile.mkstemp()
- nx.write_edgelist(G, fname, encoding = 'latin-1')
- H = nx.read_edgelist(fname, encoding = 'latin-1')
+ nx.write_edgelist(G, fname, encoding='latin-1')
+ H = nx.read_edgelist(fname, encoding='latin-1')
assert_graphs_equal(G, H)
os.close(fd)
os.unlink(fname)
def test_edgelist_graph(self):
- G=self.G
- (fd,fname)=tempfile.mkstemp()
- nx.write_edgelist(G,fname)
- H=nx.read_edgelist(fname)
- H2=nx.read_edgelist(fname)
- assert_not_equal(H,H2) # they should be different graphs
- G.remove_node('g') # isolated nodes are not written in edgelist
+ G = self.G
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_edgelist(G, fname)
+ H = nx.read_edgelist(fname)
+ H2 = nx.read_edgelist(fname)
+ assert_not_equal(H, H2) # they should be different graphs
+ G.remove_node('g') # isolated nodes are not written in edgelist
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_edgelist_digraph(self):
- G=self.DG
- (fd,fname)=tempfile.mkstemp()
- nx.write_edgelist(G,fname)
- H=nx.read_edgelist(fname,create_using=nx.DiGraph())
- H2=nx.read_edgelist(fname,create_using=nx.DiGraph())
- assert_not_equal(H,H2) # they should be different graphs
- G.remove_node('g') # isolated nodes are not written in edgelist
+ G = self.DG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_edgelist(G, fname)
+ H = nx.read_edgelist(fname, create_using=nx.DiGraph())
+ H2 = nx.read_edgelist(fname, create_using=nx.DiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
+ G.remove_node('g') # isolated nodes are not written in edgelist
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_edgelist_integers(self):
- G=nx.convert_node_labels_to_integers(self.G)
- (fd,fname)=tempfile.mkstemp()
- nx.write_edgelist(G,fname)
- H=nx.read_edgelist(fname,nodetype=int)
+ G = nx.convert_node_labels_to_integers(self.G)
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_edgelist(G, fname)
+ H = nx.read_edgelist(fname, nodetype=int)
# isolated nodes are not written in edgelist
G.remove_nodes_from(list(nx.isolates(G)))
assert_nodes_equal(list(H), list(G))
@@ -188,37 +188,37 @@ class TestEdgelist:
os.unlink(fname)
def test_edgelist_digraph(self):
- G=self.DG
- (fd,fname)=tempfile.mkstemp()
- nx.write_edgelist(G,fname)
- H=nx.read_edgelist(fname,create_using=nx.DiGraph())
- G.remove_node('g') # isolated nodes are not written in edgelist
- H2=nx.read_edgelist(fname,create_using=nx.DiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.DG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_edgelist(G, fname)
+ H = nx.read_edgelist(fname, create_using=nx.DiGraph())
+ G.remove_node('g') # isolated nodes are not written in edgelist
+ H2 = nx.read_edgelist(fname, create_using=nx.DiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_edgelist_multigraph(self):
- G=self.XG
- (fd,fname)=tempfile.mkstemp()
- nx.write_edgelist(G,fname)
- H=nx.read_edgelist(fname,nodetype=int,create_using=nx.MultiGraph())
- H2=nx.read_edgelist(fname,nodetype=int,create_using=nx.MultiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.XG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_edgelist(G, fname)
+ H = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
+ H2 = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
os.unlink(fname)
def test_edgelist_multidigraph(self):
- G=self.XDG
- (fd,fname)=tempfile.mkstemp()
- nx.write_edgelist(G,fname)
- H=nx.read_edgelist(fname,nodetype=int,create_using=nx.MultiDiGraph())
- H2=nx.read_edgelist(fname,nodetype=int,create_using=nx.MultiDiGraph())
- assert_not_equal(H,H2) # they should be different graphs
+ G = self.XDG
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_edgelist(G, fname)
+ H = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiDiGraph())
+ H2 = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiDiGraph())
+ assert_not_equal(H, H2) # they should be different graphs
assert_nodes_equal(list(H), list(G))
assert_edges_equal(list(H.edges()), list(G.edges()))
os.close(fd)
diff --git a/networkx/readwrite/tests/test_gpickle.py b/networkx/readwrite/tests/test_gpickle.py
index 9e2a5be8..c03b2ec5 100644
--- a/networkx/readwrite/tests/test_gpickle.py
+++ b/networkx/readwrite/tests/test_gpickle.py
@@ -9,15 +9,15 @@ from networkx.testing.utils import *
class TestGpickle(object):
def setUp(self):
- G=nx.Graph(name="test")
- e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')]
- G.add_edges_from(e,width=10)
- G.add_node('g',color='green')
- G.graph['number']=1
- DG=nx.DiGraph(G)
- MG=nx.MultiGraph(G)
+ G = nx.Graph(name="test")
+ e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')]
+ G.add_edges_from(e, width=10)
+ G.add_node('g', color='green')
+ G.graph['number'] = 1
+ DG = nx.DiGraph(G)
+ MG = nx.MultiGraph(G)
MG.add_edge('a', 'a')
- MDG=nx.MultiDiGraph(G)
+ MDG = nx.MultiDiGraph(G)
MDG.add_edge('a', 'a')
fG = G.copy()
fDG = DG.copy()
@@ -27,21 +27,21 @@ class TestGpickle(object):
nx.freeze(fDG)
nx.freeze(fMG)
nx.freeze(fMDG)
- self.G=G
- self.DG=DG
- self.MG=MG
- self.MDG=MDG
- self.fG=fG
- self.fDG=fDG
- self.fMG=fMG
- self.fMDG=fMDG
+ self.G = G
+ self.DG = DG
+ self.MG = MG
+ self.MDG = MDG
+ self.fG = fG
+ self.fDG = fDG
+ self.fMG = fMG
+ self.fMDG = fMDG
def test_gpickle(self):
for G in [self.G, self.DG, self.MG, self.MDG,
self.fG, self.fDG, self.fMG, self.fMDG]:
- (fd,fname)=tempfile.mkstemp()
- nx.write_gpickle(G,fname)
- Gin=nx.read_gpickle(fname)
+ (fd, fname) = tempfile.mkstemp()
+ nx.write_gpickle(G, fname)
+ Gin = nx.read_gpickle(fname)
assert_nodes_equal(list(G.nodes(data=True)),
list(Gin.nodes(data=True)))
assert_edges_equal(list(G.edges(data=True)),
diff --git a/networkx/readwrite/tests/test_graph6.py b/networkx/readwrite/tests/test_graph6.py
index f089bc49..d7643d25 100644
--- a/networkx/readwrite/tests/test_graph6.py
+++ b/networkx/readwrite/tests/test_graph6.py
@@ -27,10 +27,10 @@ class TestFromGraph6Bytes(TestCase):
def test_from_graph6_bytes(self):
data = b'DF{'
- G=nx.from_graph6_bytes(data)
- assert_nodes_equal(G.nodes(),[0, 1, 2, 3, 4])
+ G = nx.from_graph6_bytes(data)
+ assert_nodes_equal(G.nodes(), [0, 1, 2, 3, 4])
assert_edges_equal(G.edges(),
- [(0, 3), (0, 4), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)])
+ [(0, 3), (0, 4), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)])
def test_read_equals_from_bytes(self):
data = b'DF{'
@@ -100,7 +100,7 @@ class TestWriteGraph6(TestCase):
# Strip the trailing newline.
gstr = gstr.getvalue().rstrip()
assert_equal(len(gstr),
- ((i-1) * i // 2 + 5) // 6 + (1 if i < 63 else 4))
+ ((i - 1) * i // 2 + 5) // 6 + (1 if i < 63 else 4))
def test_roundtrip(self):
for i in list(range(13)) + [31, 47, 62, 63, 64, 72]:
@@ -118,11 +118,10 @@ class TestWriteGraph6(TestCase):
f.seek(0)
self.assertEqual(f.read(), b'>>graph6<<?\n')
-
def test_relabeling(self):
- G = nx.Graph([(0,1)])
+ G = nx.Graph([(0, 1)])
assert_equal(g6.to_graph6_bytes(G), b'>>graph6<<A_\n')
- G = nx.Graph([(1,2)])
+ G = nx.Graph([(1, 2)])
assert_equal(g6.to_graph6_bytes(G), b'>>graph6<<A_\n')
- G = nx.Graph([(1,42)])
+ G = nx.Graph([(1, 42)])
assert_equal(g6.to_graph6_bytes(G), b'>>graph6<<A_\n')
diff --git a/networkx/readwrite/tests/test_leda.py b/networkx/readwrite/tests/test_leda.py
index 60afc74d..e161b064 100644
--- a/networkx/readwrite/tests/test_leda.py
+++ b/networkx/readwrite/tests/test_leda.py
@@ -1,32 +1,34 @@
#!/usr/bin/env python
from nose.tools import *
import networkx as nx
-import io,os,tempfile
+import io
+import os
+import tempfile
+
class TestLEDA(object):
def test_parse_leda(self):
- data="""#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|"""
- G=nx.parse_leda(data)
- G=nx.parse_leda(data.split('\n'))
+ data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|"""
+ G = nx.parse_leda(data)
+ G = nx.parse_leda(data.split('\n'))
assert_equal(sorted(G.nodes()),
['v1', 'v2', 'v3', 'v4', 'v5'])
assert_equal(sorted(G.edges(data=True)),
- [('v1', 'v2', {'label': '4'}),
- ('v1', 'v3', {'label': '3'}),
- ('v2', 'v3', {'label': '2'}),
- ('v3', 'v4', {'label': '3'}),
- ('v3', 'v5', {'label': '7'}),
- ('v4', 'v5', {'label': '6'}),
+ [('v1', 'v2', {'label': '4'}),
+ ('v1', 'v3', {'label': '3'}),
+ ('v2', 'v3', {'label': '2'}),
+ ('v3', 'v4', {'label': '3'}),
+ ('v3', 'v5', {'label': '7'}),
+ ('v4', 'v5', {'label': '6'}),
('v5', 'v1', {'label': 'foo'})])
-
def test_read_LEDA(self):
fh = io.BytesIO()
- data="""#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|"""
- G=nx.parse_leda(data)
+ data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|"""
+ G = nx.parse_leda(data)
fh.write(data.encode('UTF-8'))
fh.seek(0)
Gin = nx.read_leda(fh)
- assert_equal(sorted(G.nodes()),sorted(Gin.nodes()))
- assert_equal(sorted(G.edges()),sorted(Gin.edges()))
+ assert_equal(sorted(G.nodes()), sorted(Gin.nodes()))
+ assert_equal(sorted(G.edges()), sorted(Gin.edges()))
diff --git a/networkx/readwrite/tests/test_p2g.py b/networkx/readwrite/tests/test_p2g.py
index a82c666d..5960e6e0 100644
--- a/networkx/readwrite/tests/test_p2g.py
+++ b/networkx/readwrite/tests/test_p2g.py
@@ -10,11 +10,11 @@ from networkx.testing import *
class TestP2G:
def setUp(self):
- self.G=nx.Graph(name="test")
- e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')]
+ self.G = nx.Graph(name="test")
+ e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')]
self.G.add_edges_from(e)
self.G.add_node('g')
- self.DG=nx.DiGraph(self.G)
+ self.DG = nx.DiGraph(self.G)
def test_read_p2g(self):
s = b"""\
@@ -29,13 +29,13 @@ c
"""
bytesIO = io.BytesIO(s)
G = read_p2g(bytesIO)
- assert_equal(G.name,'name')
- assert_equal(sorted(G),['a','b','c'])
- edges = [(str(u),str(v)) for u,v in G.edges()]
- assert_edges_equal(G.edges(),[('a','c'),('a','b'),('c','a'),('c','c')])
+ assert_equal(G.name, 'name')
+ assert_equal(sorted(G), ['a', 'b', 'c'])
+ edges = [(str(u), str(v)) for u, v in G.edges()]
+ assert_edges_equal(G.edges(), [('a', 'c'), ('a', 'b'), ('c', 'a'), ('c', 'c')])
def test_write_p2g(self):
- s=b"""foo
+ s = b"""foo
3 2
1
1
@@ -44,21 +44,21 @@ c
3
"""
- fh=io.BytesIO()
- G=nx.OrderedDiGraph()
- G.name='foo'
- G.add_edges_from([(1,2),(2,3)])
- write_p2g(G,fh)
+ fh = io.BytesIO()
+ G = nx.OrderedDiGraph()
+ G.name = 'foo'
+ G.add_edges_from([(1, 2), (2, 3)])
+ write_p2g(G, fh)
fh.seek(0)
- r=fh.read()
- assert_equal(r,s)
+ r = fh.read()
+ assert_equal(r, s)
def test_write_read_p2g(self):
- fh=io.BytesIO()
- G=nx.DiGraph()
- G.name='foo'
- G.add_edges_from([('a','b'),('b','c')])
- write_p2g(G,fh)
+ fh = io.BytesIO()
+ G = nx.DiGraph()
+ G.name = 'foo'
+ G.add_edges_from([('a', 'b'), ('b', 'c')])
+ write_p2g(G, fh)
fh.seek(0)
- H=read_p2g(fh)
- assert_edges_equal(G.edges(),H.edges())
+ H = read_p2g(fh)
+ assert_edges_equal(G.edges(), H.edges())
diff --git a/networkx/readwrite/tests/test_pajek.py b/networkx/readwrite/tests/test_pajek.py
index 07c6667b..ca168a86 100644
--- a/networkx/readwrite/tests/test_pajek.py
+++ b/networkx/readwrite/tests/test_pajek.py
@@ -4,21 +4,23 @@ Pajek tests
"""
from nose.tools import assert_equal
from networkx import *
-import os,tempfile
+import os
+import tempfile
from io import open
from networkx.testing import *
+
class TestPajek(object):
def setUp(self):
- self.data="""*network Tralala\n*vertices 4\n 1 "A1" 0.0938 0.0896 ellipse x_fact 1 y_fact 1\n 2 "Bb" 0.8188 0.2458 ellipse x_fact 1 y_fact 1\n 3 "C" 0.3688 0.7792 ellipse x_fact 1\n 4 "D2" 0.9583 0.8563 ellipse x_fact 1\n*arcs\n1 1 1 h2 0 w 3 c Blue s 3 a1 -130 k1 0.6 a2 -130 k2 0.6 ap 0.5 l "Bezier loop" lc BlueViolet fos 20 lr 58 lp 0.3 la 360\n2 1 1 h2 0 a1 120 k1 1.3 a2 -120 k2 0.3 ap 25 l "Bezier arc" lphi 270 la 180 lr 19 lp 0.5\n1 2 1 h2 0 a1 40 k1 2.8 a2 30 k2 0.8 ap 25 l "Bezier arc" lphi 90 la 0 lp 0.65\n4 2 -1 h2 0 w 1 k1 -2 k2 250 ap 25 l "Circular arc" c Red lc OrangeRed\n3 4 1 p Dashed h2 0 w 2 c OliveGreen ap 25 l "Straight arc" lc PineGreen\n1 3 1 p Dashed h2 0 w 5 k1 -1 k2 -20 ap 25 l "Oval arc" c Brown lc Black\n3 3 -1 h1 6 w 1 h2 12 k1 -2 k2 -15 ap 0.5 l "Circular loop" c Red lc OrangeRed lphi 270 la 180"""
- self.G=nx.MultiDiGraph()
+ self.data = """*network Tralala\n*vertices 4\n 1 "A1" 0.0938 0.0896 ellipse x_fact 1 y_fact 1\n 2 "Bb" 0.8188 0.2458 ellipse x_fact 1 y_fact 1\n 3 "C" 0.3688 0.7792 ellipse x_fact 1\n 4 "D2" 0.9583 0.8563 ellipse x_fact 1\n*arcs\n1 1 1 h2 0 w 3 c Blue s 3 a1 -130 k1 0.6 a2 -130 k2 0.6 ap 0.5 l "Bezier loop" lc BlueViolet fos 20 lr 58 lp 0.3 la 360\n2 1 1 h2 0 a1 120 k1 1.3 a2 -120 k2 0.3 ap 25 l "Bezier arc" lphi 270 la 180 lr 19 lp 0.5\n1 2 1 h2 0 a1 40 k1 2.8 a2 30 k2 0.8 ap 25 l "Bezier arc" lphi 90 la 0 lp 0.65\n4 2 -1 h2 0 w 1 k1 -2 k2 250 ap 25 l "Circular arc" c Red lc OrangeRed\n3 4 1 p Dashed h2 0 w 2 c OliveGreen ap 25 l "Straight arc" lc PineGreen\n1 3 1 p Dashed h2 0 w 5 k1 -1 k2 -20 ap 25 l "Oval arc" c Brown lc Black\n3 3 -1 h1 6 w 1 h2 12 k1 -2 k2 -15 ap 0.5 l "Circular loop" c Red lc OrangeRed lphi 270 la 180"""
+ self.G = nx.MultiDiGraph()
self.G.add_nodes_from(['A1', 'Bb', 'C', 'D2'])
self.G.add_edges_from([('A1', 'A1'), ('A1', 'Bb'), ('A1', 'C'),
- ('Bb', 'A1'),('C', 'C'), ('C', 'D2'),
+ ('Bb', 'A1'), ('C', 'C'), ('C', 'D2'),
('D2', 'Bb')])
- self.G.graph['name']='Tralala'
- (fd,self.fname)=tempfile.mkstemp()
+ self.G.graph['name'] = 'Tralala'
+ (fd, self.fname) = tempfile.mkstemp()
with os.fdopen(fd, 'wb') as fh:
fh.write(self.data.encode('UTF-8'))
@@ -27,13 +29,13 @@ class TestPajek(object):
def test_parse_pajek_simple(self):
# Example without node positions or shape
- data="""*Vertices 2\n1 "1"\n2 "2"\n*Edges\n1 2\n2 1"""
- G=parse_pajek(data)
+ data = """*Vertices 2\n1 "1"\n2 "2"\n*Edges\n1 2\n2 1"""
+ G = parse_pajek(data)
assert_equal(sorted(G.nodes()), ['1', '2'])
assert_edges_equal(G.edges(), [('1', '2'), ('1', '2')])
def test_parse_pajek(self):
- G=parse_pajek(self.data)
+ G = parse_pajek(self.data)
assert_equal(sorted(G.nodes()), ['A1', 'Bb', 'C', 'D2'])
assert_edges_equal(G.edges(), [('A1', 'A1'), ('A1', 'Bb'),
('A1', 'C'), ('Bb', 'A1'),
@@ -41,19 +43,19 @@ class TestPajek(object):
def test_parse_pajet_mat(self):
data = """*Vertices 3\n1 "one"\n2 "two"\n3 "three"\n*Matrix\n1 1 0\n0 1 0\n0 1 0\n"""
- G=parse_pajek(data)
+ G = parse_pajek(data)
assert_equal(set(G.nodes()), {'one', 'two', 'three'})
assert_equal(G.nodes['two'], {'id': '2'})
assert_edges_equal(set(G.edges()), {('one', 'one'), ('two', 'one'), ('two', 'two'), ('two', 'three')})
def test_read_pajek(self):
- G=parse_pajek(self.data)
- Gin=read_pajek(self.fname)
+ G = parse_pajek(self.data)
+ Gin = read_pajek(self.fname)
assert_equal(sorted(G.nodes()), sorted(Gin.nodes()))
assert_edges_equal(G.edges(), Gin.edges())
- assert_equal(self.G.graph,Gin.graph)
+ assert_equal(self.G.graph, Gin.graph)
for n in G:
- assert_equal(G.nodes[n],Gin.nodes[n])
+ assert_equal(G.nodes[n], Gin.nodes[n])
def test_noname(self):
# Make sure we can parse a line such as: *network
@@ -66,17 +68,17 @@ class TestPajek(object):
def test_unicode(self):
import io
G = nx.Graph()
- try: # Python 3.x
+ try: # Python 3.x
name1 = chr(2344) + chr(123) + chr(6543)
name2 = chr(5543) + chr(1543) + chr(324)
- except ValueError: # Python 2.6+
+ except ValueError: # Python 2.6+
name1 = unichr(2344) + unichr(123) + unichr(6543)
name2 = unichr(5543) + unichr(1543) + unichr(324)
G.add_edge(name1, 'Radiohead', foo=name2)
fh = io.BytesIO()
- nx.write_pajek(G,fh)
+ nx.write_pajek(G, fh)
fh.seek(0)
- H=nx.read_pajek(fh)
+ H = nx.read_pajek(fh)
assert_nodes_equal(list(G), list(H))
assert_edges_equal(list(G.edges()), list(H.edges()))
assert_equal(G.graph, H.graph)
diff --git a/networkx/readwrite/tests/test_shp.py b/networkx/readwrite/tests/test_shp.py
index f681d340..73be6069 100644
--- a/networkx/readwrite/tests/test_shp.py
+++ b/networkx/readwrite/tests/test_shp.py
@@ -9,6 +9,7 @@ from nose.tools import raises
import networkx as nx
+
class TestShp(object):
@classmethod
def setupClass(cls):
@@ -55,7 +56,7 @@ class TestShp(object):
shp = drv.CreateDataSource(shppath)
lyr = createlayer(shp)
-
+
for path, name in zip(self.paths, self.names):
feat = ogr.Feature(lyr.GetLayerDefn())
g = ogr.Geometry(ogr.wkbLineString)
@@ -68,10 +69,10 @@ class TestShp(object):
# create single record multiline shapefile for testing
multi_shp = drv.CreateDataSource(multi_shppath)
multi_lyr = createlayer(multi_shp, ogr.wkbMultiLineString)
-
+
multi_g = ogr.Geometry(ogr.wkbMultiLineString)
for path in self.paths:
-
+
g = ogr.Geometry(ogr.wkbLineString)
for p in path:
g.AddPoint_2D(*p)
@@ -80,7 +81,7 @@ class TestShp(object):
multi_feat = ogr.Feature(multi_lyr.GetLayerDefn())
multi_feat.SetGeometry(multi_g)
- multi_feat.SetField("Name", 'a')
+ multi_feat.SetField("Name", 'a')
multi_lyr.CreateFeature(multi_feat)
self.shppath = shppath
@@ -98,12 +99,12 @@ class TestShp(object):
assert_equal(sorted(expected.edges()), sorted(g.edges()))
g_names = [g.get_edge_data(s, e)['Name'] for s, e in g.edges()]
assert_equal(names, sorted(g_names))
-
+
# simplified
G = nx.read_shp(self.shppath)
- compare_graph_paths_names(G, self.simplified_paths, \
- self.simplified_names)
-
+ compare_graph_paths_names(G, self.simplified_paths,
+ self.simplified_names)
+
# unsimplified
G = nx.read_shp(self.shppath, simplify=False)
compare_graph_paths_names(G, self.paths, self.names)
@@ -148,7 +149,6 @@ class TestShp(object):
"LINESTRING (4.0 0.9,4 2)"
)
-
tpath = os.path.join(tempfile.gettempdir(), 'shpdir')
G = nx.read_shp(self.shppath)
nx.write_shp(G, tpath)
@@ -156,8 +156,8 @@ class TestShp(object):
self.checkgeom(shpdir.GetLayerByName("nodes"), expectedpoints_simple)
self.checkgeom(shpdir.GetLayerByName("edges"), expectedlines_simple)
- # Test unsimplified
- # Nodes should have additional point,
+ # Test unsimplified
+ # Nodes should have additional point,
# edges should be 'flattened'
G = nx.read_shp(self.shppath, simplify=False)
nx.write_shp(G, tpath)
@@ -165,7 +165,6 @@ class TestShp(object):
self.checkgeom(shpdir.GetLayerByName("nodes"), expectedpoints)
self.checkgeom(shpdir.GetLayerByName("edges"), expectedlines)
-
def test_attributeexport(self):
def testattributes(lyr, graph):
feature = lyr.GetNextFeature()
@@ -295,7 +294,7 @@ class TestMissingAttrWrite(object):
H = nx.read_shp(self.path)
for u, v, d in H.edges(data=True):
- if u == A and v == B:
- assert_equal(d['foo'], 100)
- if u == A and v == C:
- assert_equal(d['foo'], None)
+ if u == A and v == B:
+ assert_equal(d['foo'], 100)
+ if u == A and v == C:
+ assert_equal(d['foo'], None)
diff --git a/networkx/readwrite/tests/test_sparse6.py b/networkx/readwrite/tests/test_sparse6.py
index dc0a083b..526a9ddd 100644
--- a/networkx/readwrite/tests/test_sparse6.py
+++ b/networkx/readwrite/tests/test_sparse6.py
@@ -14,16 +14,16 @@ class TestSparseGraph6(object):
def test_from_sparse6_bytes(self):
data = b':Q___eDcdFcDeFcE`GaJ`IaHbKNbLM'
- G=nx.from_sparse6_bytes(data)
+ G = nx.from_sparse6_bytes(data)
assert_nodes_equal(sorted(G.nodes()),
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
- 10, 11, 12, 13, 14, 15, 16, 17])
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17])
assert_edges_equal(G.edges(),
- [(0, 1), (0, 2), (0, 3), (1, 12), (1, 14), (2, 13),
- (2, 15), (3, 16), (3, 17), (4, 7), (4, 9), (4, 11),
- (5, 6), (5, 8), (5, 9), (6, 10), (6, 11), (7, 8),
- (7, 10), (8, 12), (9, 15), (10, 14), (11, 13),
- (12, 16), (13, 17), (14, 17), (15, 16)])
+ [(0, 1), (0, 2), (0, 3), (1, 12), (1, 14), (2, 13),
+ (2, 15), (3, 16), (3, 17), (4, 7), (4, 9), (4, 11),
+ (5, 6), (5, 8), (5, 9), (6, 10), (6, 11), (7, 8),
+ (7, 10), (8, 12), (9, 15), (10, 14), (11, 13),
+ (12, 16), (13, 17), (14, 17), (15, 16)])
def test_from_bytes_multigraph_graph(self):
graph_data = b':An'
@@ -35,23 +35,23 @@ class TestSparseGraph6(object):
def test_read_sparse6(self):
data = b':Q___eDcdFcDeFcE`GaJ`IaHbKNbLM'
- G=nx.from_sparse6_bytes(data)
+ G = nx.from_sparse6_bytes(data)
fh = BytesIO(data)
- Gin=nx.read_sparse6(fh)
- assert_nodes_equal(G.nodes(),Gin.nodes())
- assert_edges_equal(G.edges(),Gin.edges())
+ Gin = nx.read_sparse6(fh)
+ assert_nodes_equal(G.nodes(), Gin.nodes())
+ assert_edges_equal(G.edges(), Gin.edges())
def test_read_many_graph6(self):
# Read many graphs into list
data = (b':Q___eDcdFcDeFcE`GaJ`IaHbKNbLM\n'
b':Q___dCfDEdcEgcbEGbFIaJ`JaHN`IM')
fh = BytesIO(data)
- glist=nx.read_sparse6(fh)
- assert_equal(len(glist),2)
+ glist = nx.read_sparse6(fh)
+ assert_equal(len(glist), 2)
for G in glist:
assert_nodes_equal(G.nodes(),
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
- 10, 11, 12, 13, 14, 15, 16, 17])
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17])
class TestWriteSparse6(TestCase):
@@ -142,7 +142,7 @@ class TestWriteSparse6(TestCase):
fullfilename = f.name
# file should be closed now, so write_sparse6 can open it
nx.write_sparse6(nx.null_graph(), fullfilename)
- fh=open(fullfilename, mode='rb')
+ fh = open(fullfilename, mode='rb')
self.assertEqual(fh.read(), b'>>sparse6<<:?\n')
fh.close()
import os
diff --git a/networkx/readwrite/tests/test_yaml.py b/networkx/readwrite/tests/test_yaml.py
index e26785a8..5298922a 100644
--- a/networkx/readwrite/tests/test_yaml.py
+++ b/networkx/readwrite/tests/test_yaml.py
@@ -2,13 +2,15 @@
Unit tests for yaml.
"""
-import os,tempfile
+import os
+import tempfile
from nose import SkipTest
from nose.tools import assert_equal
import networkx as nx
from networkx.testing import assert_edges_equal, assert_nodes_equal
+
class TestYaml(object):
@classmethod
def setupClass(cls):
@@ -23,14 +25,14 @@ class TestYaml(object):
def build_graphs(self):
self.G = nx.Graph(name="test")
- e = [('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')]
+ e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')]
self.G.add_edges_from(e)
self.G.add_node('g')
self.DG = nx.DiGraph(self.G)
self.MG = nx.MultiGraph()
- self.MG.add_weighted_edges_from([(1,2,5),(1,2,5),(1,2,1),(3,3,42)])
+ self.MG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)])
def assert_equal(self, G, data=False):
(fd, fname) = tempfile.mkstemp()
@@ -42,7 +44,7 @@ class TestYaml(object):
os.close(fd)
os.unlink(fname)
-
+
def testUndirected(self):
self.assert_equal(self.G, data=False)
@@ -51,4 +53,3 @@ class TestYaml(object):
def testMultiGraph(self):
self.assert_equal(self.MG, data=True)
-
diff --git a/networkx/relabel.py b/networkx/relabel.py
index 730d4abf..85f5cf60 100644
--- a/networkx/relabel.py
+++ b/networkx/relabel.py
@@ -89,7 +89,7 @@ def relabel_nodes(G, mapping, copy=True):
"""
# you can pass a function f(old_label)->new_label
# but we'll just make a dictionary here regardless
- if not hasattr(mapping,"__getitem__"):
+ if not hasattr(mapping, "__getitem__"):
m = {n: mapping(n) for n in G}
else:
m = mapping
diff --git a/networkx/tests/test_convert_pandas.py b/networkx/tests/test_convert_pandas.py
index 89629b03..f20ea918 100644
--- a/networkx/tests/test_convert_pandas.py
+++ b/networkx/tests/test_convert_pandas.py
@@ -73,11 +73,11 @@ class TestConvertPandas(object):
('Z1', 'Z3', {'Co': 'zE', 'Mi': 9, 'St': 'Z2'}),
('Z1', 'Z3', {'Co': 'zE', 'Mi': 4, 'St': 'Z3'})])
df = pd.DataFrame.from_items([
- ('O', ['X1', 'X1', 'X1', 'X1', 'Y1', 'Y1', 'Y1', 'Y1', 'Z1', 'Z1', 'Z1', 'Z1']),
- ('D', ['X4', 'X4', 'X4', 'X4', 'Y3', 'Y3', 'Y3', 'Y3', 'Z3', 'Z3', 'Z3', 'Z3']),
- ('St', ['X1', 'X2', 'X3', 'X4', 'Y1', 'Y2', 'X2', 'Y3', 'Z1', 'X3', 'Z2', 'Z3']),
- ('Co', ['zA', 'zB', 'zB', 'zB', 'zC', 'zC', 'zC', 'zC', 'zD', 'zD', 'zE', 'zE']),
- ('Mi', [0, 54, 49, 44, 0, 34, 29, 24, 0, 14, 9, 4])])
+ ('O', ['X1', 'X1', 'X1', 'X1', 'Y1', 'Y1', 'Y1', 'Y1', 'Z1', 'Z1', 'Z1', 'Z1']),
+ ('D', ['X4', 'X4', 'X4', 'X4', 'Y3', 'Y3', 'Y3', 'Y3', 'Z3', 'Z3', 'Z3', 'Z3']),
+ ('St', ['X1', 'X2', 'X3', 'X4', 'Y1', 'Y2', 'X2', 'Y3', 'Z1', 'X3', 'Z2', 'Z3']),
+ ('Co', ['zA', 'zB', 'zB', 'zB', 'zC', 'zC', 'zC', 'zC', 'zD', 'zD', 'zE', 'zE']),
+ ('Mi', [0, 54, 49, 44, 0, 34, 29, 24, 0, 14, 9, 4])])
G1 = nx.from_pandas_edgelist(df, source='O', target='D',
edge_attr=True,
create_using=nx.MultiDiGraph())
diff --git a/networkx/utils/heaps.py b/networkx/utils/heaps.py
index 4180b501..023e6cea 100644
--- a/networkx/utils/heaps.py
+++ b/networkx/utils/heaps.py
@@ -304,6 +304,7 @@ class PairingHeap(MinHeap):
class BinaryHeap(MinHeap):
"""A binary heap.
"""
+
def __init__(self):
"""Initialize a binary heap.
"""
diff --git a/networkx/utils/tests/test_decorators.py b/networkx/utils/tests/test_decorators.py
index caf25f9b..f09ea304 100644
--- a/networkx/utils/tests/test_decorators.py
+++ b/networkx/utils/tests/test_decorators.py
@@ -7,7 +7,7 @@ from nose import SkipTest
import networkx as nx
from networkx.utils.decorators import open_file, not_implemented_for
from networkx.utils.decorators import nodes_or_number, preserve_random_state, \
- random_state
+ random_state
def test_not_implemented_decorator():
@@ -176,6 +176,7 @@ class TestRandomState(object):
assert_true(np.all((np.random.RandomState(seed).rand(10),
random_state.rand(10))))
+
@raises(nx.NetworkXError)
def test_string_arg_index():
@random_state('a')
@@ -183,6 +184,7 @@ def test_string_arg_index():
pass
rstate = make_random_state(1)
+
@raises(nx.NetworkXError)
def test_invalid_arg_index():
@random_state(2)
diff --git a/networkx/utils/tests/test_heaps.py b/networkx/utils/tests/test_heaps.py
index 11cce634..4139694f 100644
--- a/networkx/utils/tests/test_heaps.py
+++ b/networkx/utils/tests/test_heaps.py
@@ -30,61 +30,61 @@ class X(object):
x = X()
-data = [# min should not invent an element.
- ('min', nx.NetworkXError),
- # Popping an empty heap should fail.
- ('pop', nx.NetworkXError),
- # Getting nonexisting elements should return None.
- ('get', 0, None),
- ('get', x, None),
- ('get', None, None),
- # Inserting a new key should succeed.
- ('insert', x, 1, True),
- ('get', x, 1),
- ('min', (x, 1)),
- # min should not pop the top element.
- ('min', (x, 1)),
- # Inserting a new key of different type should succeed.
- ('insert', 1, -2.0, True),
- # int and float values should interop.
- ('min', (1, -2.0)),
- # pop removes minimum-valued element.
- ('insert', 3, -10 ** 100, True),
- ('insert', 4, 5, True),
- ('pop', (3, -10 ** 100)),
- ('pop', (1, -2.0)),
- # Decrease-insert should succeed.
- ('insert', 4, -50, True),
- ('insert', 4, -60, False, True),
- # Decrease-insert should not create duplicate keys.
- ('pop', (4, -60)),
- ('pop', (x, 1)),
- # Popping all elements should empty the heap.
- ('min', nx.NetworkXError),
- ('pop', nx.NetworkXError),
- # Non-value-changing insert should fail.
- ('insert', x, 0, True),
- ('insert', x, 0, False, False),
- ('min', (x, 0)),
- ('insert', x, 0, True, False),
- ('min', (x, 0)),
- # Failed insert should not create duplicate keys.
- ('pop', (x, 0)),
- ('pop', nx.NetworkXError),
- # Increase-insert should succeed when allowed.
- ('insert', None, 0, True),
- ('insert', 2, -1, True),
- ('min', (2, -1)),
- ('insert', 2, 1, True, False),
- ('min', (None, 0)),
- # Increase-insert should fail when disallowed.
- ('insert', None, 2, False, False),
- ('min', (None, 0)),
- # Failed increase-insert should not create duplicate keys.
- ('pop', (None, 0)),
- ('pop', (2, 1)),
- ('min', nx.NetworkXError),
- ('pop', nx.NetworkXError)]
+data = [ # min should not invent an element.
+ ('min', nx.NetworkXError),
+ # Popping an empty heap should fail.
+ ('pop', nx.NetworkXError),
+ # Getting nonexisting elements should return None.
+ ('get', 0, None),
+ ('get', x, None),
+ ('get', None, None),
+ # Inserting a new key should succeed.
+ ('insert', x, 1, True),
+ ('get', x, 1),
+ ('min', (x, 1)),
+ # min should not pop the top element.
+ ('min', (x, 1)),
+ # Inserting a new key of different type should succeed.
+ ('insert', 1, -2.0, True),
+ # int and float values should interop.
+ ('min', (1, -2.0)),
+ # pop removes minimum-valued element.
+ ('insert', 3, -10 ** 100, True),
+ ('insert', 4, 5, True),
+ ('pop', (3, -10 ** 100)),
+ ('pop', (1, -2.0)),
+ # Decrease-insert should succeed.
+ ('insert', 4, -50, True),
+ ('insert', 4, -60, False, True),
+ # Decrease-insert should not create duplicate keys.
+ ('pop', (4, -60)),
+ ('pop', (x, 1)),
+ # Popping all elements should empty the heap.
+ ('min', nx.NetworkXError),
+ ('pop', nx.NetworkXError),
+ # Non-value-changing insert should fail.
+ ('insert', x, 0, True),
+ ('insert', x, 0, False, False),
+ ('min', (x, 0)),
+ ('insert', x, 0, True, False),
+ ('min', (x, 0)),
+ # Failed insert should not create duplicate keys.
+ ('pop', (x, 0)),
+ ('pop', nx.NetworkXError),
+ # Increase-insert should succeed when allowed.
+ ('insert', None, 0, True),
+ ('insert', 2, -1, True),
+ ('min', (2, -1)),
+ ('insert', 2, 1, True, False),
+ ('min', (None, 0)),
+ # Increase-insert should fail when disallowed.
+ ('insert', None, 2, False, False),
+ ('min', (None, 0)),
+ # Failed increase-insert should not create duplicate keys.
+ ('pop', (None, 0)),
+ ('pop', (2, 1)),
+ ('min', nx.NetworkXError),
+ ('pop', nx.NetworkXError)]
def _test_heap_class(cls, *args, **kwargs):
diff --git a/networkx/utils/tests/test_unionfind.py b/networkx/utils/tests/test_unionfind.py
index fa12c599..9f3116a0 100644
--- a/networkx/utils/tests/test_unionfind.py
+++ b/networkx/utils/tests/test_unionfind.py
@@ -2,6 +2,7 @@ from nose.tools import *
import networkx as nx
+
def test_unionfind():
# Fixed by: 2cddd5958689bdecdcd89b91ac9aaf6ce0e4f6b8
# Previously (in 2.x), the UnionFind class could handle mixed types.