diff options
author | Jarrod Millman <jarrod.millman@gmail.com> | 2020-07-09 23:12:10 -0700 |
---|---|---|
committer | Jarrod Millman <jarrod.millman@gmail.com> | 2020-07-10 09:44:54 -0700 |
commit | b22d6b36ce0545995c99d233546e8a1fe7e27fc5 (patch) | |
tree | 9078401c2f4a7b463a82378a734508e16ef34867 | |
parent | f30e9392bef0dccbcfd1b73ccb934064f6200fa3 (diff) | |
download | networkx-b22d6b36ce0545995c99d233546e8a1fe7e27fc5.tar.gz |
Format w/ black
450 files changed, 17505 insertions, 12793 deletions
diff --git a/doc/conf.py b/doc/conf.py index 4c8724ba..869e4b74 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -55,7 +55,7 @@ sphinx_gallery_conf = { autosummary_generate = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] suppress_warnings = ["ref.citation", "ref.footnote"] diff --git a/doc/release/report_functions_without_rst_generated.py b/doc/release/report_functions_without_rst_generated.py index beadb681..f73da43b 100644 --- a/doc/release/report_functions_without_rst_generated.py +++ b/doc/release/report_functions_without_rst_generated.py @@ -5,27 +5,28 @@ import networkx as nx print("Run this script from the doc/ directory of the repository") funcs = inspect.getmembers(nx, inspect.isfunction) -for n,f in funcs: - #print(n + ": "+str(f)) - cmd = r"find . -name *\."+n+".rst -print" - #print(cmd) - result=os.popen(cmd).read() - #print(result) +for n, f in funcs: + # print(n + ": "+str(f)) + cmd = r"find . -name *\." + n + ".rst -print" + # print(cmd) + result = os.popen(cmd).read() + # print(result) - old_names = ('find_cores', - 'test', - 'edge_betweenness', - 'betweenness_centrality_source', - 'write_graphml_lxml', - 'write_graphml_xml', - 'adj_matrix', - 'project', - 'fruchterman_reingold_layout', - 'node_degree_xy', - 'node_attribute_xy', - 'find_cliques_recursive', - 'recursive_simple_cycles', - ) + old_names = ( + "find_cores", + "test", + "edge_betweenness", + "betweenness_centrality_source", + "write_graphml_lxml", + "write_graphml_xml", + "adj_matrix", + "project", + "fruchterman_reingold_layout", + "node_degree_xy", + "node_attribute_xy", + "find_cliques_recursive", + "recursive_simple_cycles", + ) if len(result) == 0 and n not in old_names: print("Missing file from docs: ", n) diff --git a/examples/drawing/plot_giant_component.py b/examples/drawing/plot_giant_component.py index ce7101e4..c289bb34 100644 --- a/examples/drawing/plot_giant_component.py +++ b/examples/drawing/plot_giant_component.py @@ -46,10 +46,6 @@ for p in pvals: for Gi in Gcc[1:]: if len(Gi) > 1: nx.draw_networkx_edges( - G.subgraph(Gi), - pos, - edge_color="r", - alpha=0.3, - width=5.0, + G.subgraph(Gi), pos, edge_color="r", alpha=0.3, width=5.0, ) plt.show() diff --git a/examples/drawing/plot_multipartite_graph.py b/examples/drawing/plot_multipartite_graph.py index 87da9616..4b3c4cee 100644 --- a/examples/drawing/plot_multipartite_graph.py +++ b/examples/drawing/plot_multipartite_graph.py @@ -10,15 +10,18 @@ import networkx as nx from networkx.utils import pairwise -subset_sizes = [5,5,4,3,2,4,4,3] -subset_color = ['gold', - 'violet', - 'violet', - 'violet', - 'violet', - 'limegreen', - 'limegreen', - 'darkorange'] +subset_sizes = [5, 5, 4, 3, 2, 4, 4, 3] +subset_color = [ + "gold", + "violet", + "violet", + "violet", + "violet", + "limegreen", + "limegreen", + "darkorange", +] + def multilayered_graph(*subset_sizes): extents = pairwise(itertools.accumulate((0,) + subset_sizes)) @@ -30,9 +33,10 @@ def multilayered_graph(*subset_sizes): G.add_edges_from(itertools.product(layer1, layer2)) return G + G = multilayered_graph(*subset_sizes) -color = [subset_color[data['layer']] for v,data in G.nodes(data=True)] -pos = nx.multipartite_layout(G,subset_key='layer') +color = [subset_color[data["layer"]] for v, data in G.nodes(data=True)] +pos = nx.multipartite_layout(G, subset_key="layer") plt.figure(figsize=(8, 8)) nx.draw(G, pos, node_color=color, with_labels=False) plt.axis("equal") diff --git a/examples/graph/plot_roget.py b/examples/graph/plot_roget.py index 35c040a6..41e4347c 100644 --- a/examples/graph/plot_roget.py +++ b/examples/graph/plot_roget.py @@ -72,7 +72,7 @@ print(nx.number_connected_components(UG), "connected components") options = { "node_color": "black", "node_size": 1, - "edge_color": 'gray', + "edge_color": "gray", "linewidths": 0, "width": 0.1, } diff --git a/networkx/__init__.py b/networkx/__init__.py index cd7ab694..235a8825 100644 --- a/networkx/__init__.py +++ b/networkx/__init__.py @@ -9,6 +9,7 @@ See https://networkx.github.io for complete documentation. """ import sys + if sys.version_info[:2] < (3, 6): m = "Python 3.6 or later is required for NetworkX (%d.%d detected)." raise ImportError(m % sys.version_info[:2]) diff --git a/networkx/algorithms/approximation/clique.py b/networkx/algorithms/approximation/clique.py index 0d5a8fef..9283baf5 100644 --- a/networkx/algorithms/approximation/clique.py +++ b/networkx/algorithms/approximation/clique.py @@ -93,8 +93,8 @@ def clique_removal(G): return maxiset, cliques -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def large_clique_size(G): """Find the size of a large clique in a graph. diff --git a/networkx/algorithms/approximation/clustering_coefficient.py b/networkx/algorithms/approximation/clustering_coefficient.py index 0d16be48..56a8f83b 100644 --- a/networkx/algorithms/approximation/clustering_coefficient.py +++ b/networkx/algorithms/approximation/clustering_coefficient.py @@ -1,11 +1,11 @@ from networkx.utils import not_implemented_for from networkx.utils import py_random_state -__all__ = ['average_clustering'] +__all__ = ["average_clustering"] @py_random_state(2) -@not_implemented_for('directed') +@not_implemented_for("directed") def average_clustering(G, trials=1000, seed=None): r"""Estimates the average clustering coefficient of G. diff --git a/networkx/algorithms/approximation/connectivity.py b/networkx/algorithms/approximation/connectivity.py index 15c09e04..96f613b5 100644 --- a/networkx/algorithms/approximation/connectivity.py +++ b/networkx/algorithms/approximation/connectivity.py @@ -5,11 +5,13 @@ from operator import itemgetter import networkx as nx -__all__ = ['local_node_connectivity', - 'node_connectivity', - 'all_pairs_node_connectivity'] +__all__ = [ + "local_node_connectivity", + "node_connectivity", + "all_pairs_node_connectivity", +] -INF = float('inf') +INF = float("inf") def local_node_connectivity(G, source, target, cutoff=None): @@ -171,7 +173,7 @@ def node_connectivity(G, s=None, t=None): """ if (s is not None and t is None) or (s is None and t is not None): - raise nx.NetworkXError('Both source and target must be specified.') + raise nx.NetworkXError("Both source and target must be specified.") # Local node connectivity if s is not None and t is not None: @@ -188,6 +190,7 @@ def node_connectivity(G, s=None, t=None): def neighbors(v): return itertools.chain(G.predecessors(v), G.successors(v)) + else: connected_func = nx.is_connected iter_func = itertools.combinations @@ -344,7 +347,8 @@ def _bidirectional_pred_succ(G, source, target, exclude): # excludes nodes in the container "exclude" from the search if source is None or target is None: raise nx.NetworkXException( - "Bidirectional shortest path called without source or target") + "Bidirectional shortest path called without source or target" + ) if target == source: return ({target: None}, {source: None}, source) diff --git a/networkx/algorithms/approximation/dominating_set.py b/networkx/algorithms/approximation/dominating_set.py index 3ba9dcdd..548e21d5 100644 --- a/networkx/algorithms/approximation/dominating_set.py +++ b/networkx/algorithms/approximation/dominating_set.py @@ -14,12 +14,11 @@ incident to an endpoint of at least one edge in *F*. from ..matching import maximal_matching from ...utils import not_implemented_for -__all__ = ["min_weighted_dominating_set", - "min_edge_dominating_set"] +__all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"] # TODO Why doesn't this algorithm work for directed graphs? -@not_implemented_for('directed') +@not_implemented_for("directed") def min_weighted_dominating_set(G, weight=None): r"""Returns a dominating set that approximates the minimum weight node dominating set. diff --git a/networkx/algorithms/approximation/independent_set.py b/networkx/algorithms/approximation/independent_set.py index 4cbc02d5..35ad8f7d 100644 --- a/networkx/algorithms/approximation/independent_set.py +++ b/networkx/algorithms/approximation/independent_set.py @@ -26,6 +26,7 @@ doi:10.1007/BF01994876 """ from networkx.algorithms.approximation import clique_removal + __all__ = ["maximum_independent_set"] diff --git a/networkx/algorithms/approximation/kcomponents.py b/networkx/algorithms/approximation/kcomponents.py index 6f21607a..183b3f19 100644 --- a/networkx/algorithms/approximation/kcomponents.py +++ b/networkx/algorithms/approximation/kcomponents.py @@ -11,10 +11,10 @@ from networkx.utils import not_implemented_for from networkx.algorithms.approximation import local_node_connectivity -__all__ = ['k_components'] +__all__ = ["k_components"] -not_implemented_for('directed') +not_implemented_for("directed") def k_components(G, min_density=0.95): @@ -163,9 +163,9 @@ def _cliques_heuristic(G, H, k, min_density): if i == 0: overlap = False else: - overlap = set.intersection(*[ - {x for x in H[n] if x not in cands} - for n in cands]) + overlap = set.intersection( + *[{x for x in H[n] if x not in cands} for n in cands] + ) if overlap and len(overlap) < k: SH = H.subgraph(cands | overlap) else: @@ -207,10 +207,11 @@ class _AntiGraph(nx.Graph): case we only use k-core, connected_components, and biconnected_components. """ - all_edge_dict = {'weight': 1} + all_edge_dict = {"weight": 1} def single_edge_dict(self): return self.all_edge_dict + edge_attr_dict_factory = single_edge_dict def __getitem__(self, n): @@ -228,8 +229,9 @@ class _AntiGraph(nx.Graph): """ all_edge_dict = self.all_edge_dict - return {node: all_edge_dict for node in - set(self._adj) - set(self._adj[n]) - {n}} + return { + node: all_edge_dict for node in set(self._adj) - set(self._adj[n]) - {n} + } def neighbors(self, n): """Returns an iterator over all neighbors of node n in the diff --git a/networkx/algorithms/approximation/matching.py b/networkx/algorithms/approximation/matching.py index 8be396c4..17a52eda 100644 --- a/networkx/algorithms/approximation/matching.py +++ b/networkx/algorithms/approximation/matching.py @@ -9,6 +9,7 @@ edges; that is, no two edges share a common vertex. `Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_ """ import networkx as nx + __all__ = ["min_maximal_matching"] diff --git a/networkx/algorithms/approximation/steinertree.py b/networkx/algorithms/approximation/steinertree.py index 970bd1f7..7f91b63d 100644 --- a/networkx/algorithms/approximation/steinertree.py +++ b/networkx/algorithms/approximation/steinertree.py @@ -3,11 +3,11 @@ from itertools import chain from networkx.utils import pairwise, not_implemented_for import networkx as nx -__all__ = ['metric_closure', 'steiner_tree'] +__all__ = ["metric_closure", "steiner_tree"] -@not_implemented_for('directed') -def metric_closure(G, weight='weight'): +@not_implemented_for("directed") +def metric_closure(G, weight="weight"): """ Return the metric closure of a graph. The metric closure of a graph *G* is the complete graph in which each edge @@ -46,9 +46,9 @@ def metric_closure(G, weight='weight'): return M -@not_implemented_for('multigraph') -@not_implemented_for('directed') -def steiner_tree(G, terminal_nodes, weight='weight'): +@not_implemented_for("multigraph") +@not_implemented_for("directed") +def steiner_tree(G, terminal_nodes, weight="weight"): """ Return an approximation to the minimum Steiner tree of a graph. Parameters @@ -83,8 +83,8 @@ def steiner_tree(G, terminal_nodes, weight='weight'): # Use the 'distance' attribute of each edge provided by the metric closure # graph. H = M.subgraph(terminal_nodes) - mst_edges = nx.minimum_spanning_edges(H, weight='distance', data=True) + mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True) # Create an iterator over each edge in each shortest path; repeats are okay - edges = chain.from_iterable(pairwise(d['path']) for u, v, d in mst_edges) + edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges) T = G.edge_subgraph(edges) return T diff --git a/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py b/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py index c1642c18..1bb17798 100644 --- a/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py +++ b/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py @@ -8,29 +8,27 @@ from networkx.algorithms.approximation import average_clustering def test_petersen(): # Actual coefficient is 0 G = nx.petersen_graph() - assert (average_clustering(G, trials=int(len(G) / 2)) == - nx.average_clustering(G)) + assert average_clustering(G, trials=int(len(G) / 2)) == nx.average_clustering(G) def test_petersen_seed(): # Actual coefficient is 0 G = nx.petersen_graph() - assert (average_clustering(G, trials=int(len(G) / 2), seed=1) == - nx.average_clustering(G)) + assert average_clustering( + G, trials=int(len(G) / 2), seed=1 + ) == nx.average_clustering(G) def test_tetrahedral(): # Actual coefficient is 1 G = nx.tetrahedral_graph() - assert (average_clustering(G, trials=int(len(G) / 2)) == - nx.average_clustering(G)) + assert average_clustering(G, trials=int(len(G) / 2)) == nx.average_clustering(G) def test_dodecahedral(): # Actual coefficient is 0 G = nx.dodecahedral_graph() - assert (average_clustering(G, trials=int(len(G) / 2)) == - nx.average_clustering(G)) + assert average_clustering(G, trials=int(len(G) / 2)) == nx.average_clustering(G) def test_empty(): diff --git a/networkx/algorithms/approximation/tests/test_connectivity.py b/networkx/algorithms/approximation/tests/test_connectivity.py index d9053541..887db20b 100644 --- a/networkx/algorithms/approximation/tests/test_connectivity.py +++ b/networkx/algorithms/approximation/tests/test_connectivity.py @@ -7,9 +7,29 @@ from networkx.algorithms import approximation as approx def test_global_node_connectivity(): # Figure 1 chapter on Connectivity G = nx.Graph() - G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4), - (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8), - (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)]) + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 6), + (3, 4), + (3, 6), + (4, 6), + (4, 7), + (5, 7), + (6, 8), + (6, 9), + (7, 8), + (7, 10), + (8, 11), + (9, 10), + (9, 11), + (10, 11), + ] + ) assert 2 == approx.local_node_connectivity(G, 1, 11) assert 2 == approx.node_connectivity(G) assert 2 == approx.node_connectivity(G, 1, 11) @@ -49,6 +69,7 @@ def test_petersen(): assert 3 == approx.node_connectivity(G) assert 3 == approx.node_connectivity(G, 0, 5) + # Approximation fails with tutte graph # def test_tutte(): # G = nx.tutte_graph() @@ -66,6 +87,7 @@ def test_octahedral(): assert 4 == approx.node_connectivity(G) assert 4 == approx.node_connectivity(G, 0, 5) + # Approximation can fail with icosahedral graph depending # on iteration order. # def test_icosahedral(): @@ -109,7 +131,6 @@ def test_directed_node_connectivity(): class TestAllPairsNodeConnectivityApprox: - @classmethod def setup_class(cls): cls.path = nx.path_graph(7) @@ -121,9 +142,17 @@ class TestAllPairsNodeConnectivityApprox: cls.K20 = nx.complete_graph(20) cls.K10 = nx.complete_graph(10) cls.K5 = nx.complete_graph(5) - cls.G_list = [cls.path, cls.directed_path, cls.cycle, - cls.directed_cycle, cls.gnp, cls.directed_gnp, cls.K10, - cls.K5, cls.K20] + cls.G_list = [ + cls.path, + cls.directed_path, + cls.cycle, + cls.directed_cycle, + cls.gnp, + cls.directed_gnp, + cls.K10, + cls.K5, + cls.K20, + ] def test_cycles(self): K_undir = approx.all_pairs_node_connectivity(self.cycle) diff --git a/networkx/algorithms/approximation/tests/test_dominating_set.py b/networkx/algorithms/approximation/tests/test_dominating_set.py index a2b5d4cd..da1abdc5 100644 --- a/networkx/algorithms/approximation/tests/test_dominating_set.py +++ b/networkx/algorithms/approximation/tests/test_dominating_set.py @@ -4,7 +4,6 @@ from networkx.algorithms.approximation import min_edge_dominating_set class TestMinWeightDominatingSet: - def test_min_weighted_dominating_set(self): graph = nx.Graph() graph.add_edge(1, 2) diff --git a/networkx/algorithms/approximation/tests/test_kcomponents.py b/networkx/algorithms/approximation/tests/test_kcomponents.py index c1c65416..60a90e84 100644 --- a/networkx/algorithms/approximation/tests/test_kcomponents.py +++ b/networkx/algorithms/approximation/tests/test_kcomponents.py @@ -13,21 +13,25 @@ def build_k_number_dict(k_components): k_num[node] = k return k_num + ## # Some nice synthetic graphs ## def graph_example_1(): - G = nx.convert_node_labels_to_integers(nx.grid_graph([5, 5]), - label_attribute='labels') - rlabels = nx.get_node_attributes(G, 'labels') + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") labels = {v: k for k, v in rlabels.items()} - for nodes in [(labels[(0, 0)], labels[(1, 0)]), - (labels[(0, 4)], labels[(1, 4)]), - (labels[(3, 0)], labels[(4, 0)]), - (labels[(3, 4)], labels[(4, 4)])]: + for nodes in [ + (labels[(0, 0)], labels[(1, 0)]), + (labels[(0, 4)], labels[(1, 4)]), + (labels[(3, 0)], labels[(4, 0)]), + (labels[(3, 4)], labels[(4, 4)]), + ]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -53,13 +57,13 @@ def graph_example_1(): def torrents_and_ferraro_graph(): - G = nx.convert_node_labels_to_integers(nx.grid_graph([5, 5]), - label_attribute='labels') - rlabels = nx.get_node_attributes(G, 'labels') + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") labels = {v: k for k, v in rlabels.items()} - for nodes in [(labels[(0, 4)], labels[(1, 4)]), - (labels[(3, 4)], labels[(4, 4)])]: + for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -84,8 +88,7 @@ def torrents_and_ferraro_graph(): # This stupid mistake make one reviewer very angry :P G.add_edge(new_node + 16, new_node + 8) - for nodes in [(labels[(0, 0)], labels[(1, 0)]), - (labels[(3, 0)], labels[(4, 0)])]: + for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]: new_node = G.order() + 1 # Petersen graph is triconnected P = nx.petersen_graph() @@ -112,6 +115,7 @@ def torrents_and_ferraro_graph(): G.add_edge(new_node + 18, nbr) return G + # Helper function @@ -142,10 +146,42 @@ def test_karate_0(): def test_karate_1(): - karate_k_num = {0: 4, 1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 3, 7: 4, 8: 4, 9: 2, - 10: 3, 11: 1, 12: 2, 13: 4, 14: 2, 15: 2, 16: 2, 17: 2, 18: 2, - 19: 3, 20: 2, 21: 2, 22: 2, 23: 3, 24: 3, 25: 3, 26: 2, 27: 3, - 28: 3, 29: 3, 30: 4, 31: 3, 32: 4, 33: 4} + karate_k_num = { + 0: 4, + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 3, + 7: 4, + 8: 4, + 9: 2, + 10: 3, + 11: 1, + 12: 2, + 13: 4, + 14: 2, + 15: 2, + 16: 2, + 17: 2, + 18: 2, + 19: 3, + 20: 2, + 21: 2, + 22: 2, + 23: 3, + 24: 3, + 25: 3, + 26: 2, + 27: 3, + 28: 3, + 29: 3, + 30: 4, + 31: 3, + 32: 4, + 33: 4, + } approx_karate_k_num = karate_k_num.copy() approx_karate_k_num[24] = 2 approx_karate_k_num[25] = 2 @@ -183,9 +219,9 @@ def test_directed(): def test_same(): - equal = {'A': 2, 'B': 2, 'C': 2} - slightly_different = {'A': 2, 'B': 1, 'C': 2} - different = {'A': 2, 'B': 8, 'C': 18} + equal = {"A": 2, "B": 2, "C": 2} + slightly_different = {"A": 2, "B": 1, "C": 2} + different = {"A": 2, "B": 8, "C": 18} assert _same(equal) assert not _same(slightly_different) assert _same(slightly_different, tol=1) @@ -202,9 +238,7 @@ class TestAntiGraph: cls.Ad = _AntiGraph(nx.complement(cls.Gd)) cls.Gk = nx.karate_club_graph() cls.Ak = _AntiGraph(nx.complement(cls.Gk)) - cls.GA = [(cls.Gnp, cls.Anp), - (cls.Gd, cls.Ad), - (cls.Gk, cls.Ak)] + cls.GA = [(cls.Gnp, cls.Anp), (cls.Gd, cls.Ad), (cls.Gk, cls.Ak)] def test_size(self): for G, A in self.GA: @@ -247,7 +281,7 @@ class TestAntiGraph: def test_node_not_in_graph(self): for G, A in self.GA: - node = 'non_existent_node' + node = "non_existent_node" pytest.raises(nx.NetworkXError, A.neighbors, node) pytest.raises(nx.NetworkXError, G.neighbors, node) @@ -258,7 +292,9 @@ class TestAntiGraph: assert G.degree(node) == A.degree(node) assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree()) # AntiGraph is a ThinGraph, so all the weights are 1 - assert (sum(d for n, d in A.degree()) == - sum(d for n, d in A.degree(weight='weight'))) - assert (sum(d for n, d in G.degree(nodes)) == - sum(d for n, d in A.degree(nodes))) + assert sum(d for n, d in A.degree()) == sum( + d for n, d in A.degree(weight="weight") + ) + assert sum(d for n, d in G.degree(nodes)) == sum( + d for n, d in A.degree(nodes) + ) diff --git a/networkx/algorithms/approximation/tests/test_steinertree.py b/networkx/algorithms/approximation/tests/test_steinertree.py index 9a50112b..93b0af48 100644 --- a/networkx/algorithms/approximation/tests/test_steinertree.py +++ b/networkx/algorithms/approximation/tests/test_steinertree.py @@ -26,53 +26,59 @@ class TestSteinerTree: def test_metric_closure(self): M = metric_closure(self.G) - mc = [(1, 2, {'distance': 10, 'path': [1, 2]}), - (1, 3, {'distance': 20, 'path': [1, 2, 3]}), - (1, 4, {'distance': 22, 'path': [1, 2, 7, 5, 4]}), - (1, 5, {'distance': 12, 'path': [1, 2, 7, 5]}), - (1, 6, {'distance': 22, 'path': [1, 2, 7, 5, 6]}), - (1, 7, {'distance': 11, 'path': [1, 2, 7]}), - (2, 3, {'distance': 10, 'path': [2, 3]}), - (2, 4, {'distance': 12, 'path': [2, 7, 5, 4]}), - (2, 5, {'distance': 2, 'path': [2, 7, 5]}), - (2, 6, {'distance': 12, 'path': [2, 7, 5, 6]}), - (2, 7, {'distance': 1, 'path': [2, 7]}), - (3, 4, {'distance': 10, 'path': [3, 4]}), - (3, 5, {'distance': 12, 'path': [3, 2, 7, 5]}), - (3, 6, {'distance': 22, 'path': [3, 2, 7, 5, 6]}), - (3, 7, {'distance': 11, 'path': [3, 2, 7]}), - (4, 5, {'distance': 10, 'path': [4, 5]}), - (4, 6, {'distance': 20, 'path': [4, 5, 6]}), - (4, 7, {'distance': 11, 'path': [4, 5, 7]}), - (5, 6, {'distance': 10, 'path': [5, 6]}), - (5, 7, {'distance': 1, 'path': [5, 7]}), - (6, 7, {'distance': 11, 'path': [6, 5, 7]})] + mc = [ + (1, 2, {"distance": 10, "path": [1, 2]}), + (1, 3, {"distance": 20, "path": [1, 2, 3]}), + (1, 4, {"distance": 22, "path": [1, 2, 7, 5, 4]}), + (1, 5, {"distance": 12, "path": [1, 2, 7, 5]}), + (1, 6, {"distance": 22, "path": [1, 2, 7, 5, 6]}), + (1, 7, {"distance": 11, "path": [1, 2, 7]}), + (2, 3, {"distance": 10, "path": [2, 3]}), + (2, 4, {"distance": 12, "path": [2, 7, 5, 4]}), + (2, 5, {"distance": 2, "path": [2, 7, 5]}), + (2, 6, {"distance": 12, "path": [2, 7, 5, 6]}), + (2, 7, {"distance": 1, "path": [2, 7]}), + (3, 4, {"distance": 10, "path": [3, 4]}), + (3, 5, {"distance": 12, "path": [3, 2, 7, 5]}), + (3, 6, {"distance": 22, "path": [3, 2, 7, 5, 6]}), + (3, 7, {"distance": 11, "path": [3, 2, 7]}), + (4, 5, {"distance": 10, "path": [4, 5]}), + (4, 6, {"distance": 20, "path": [4, 5, 6]}), + (4, 7, {"distance": 11, "path": [4, 5, 7]}), + (5, 6, {"distance": 10, "path": [5, 6]}), + (5, 7, {"distance": 1, "path": [5, 7]}), + (6, 7, {"distance": 11, "path": [6, 5, 7]}), + ] assert_edges_equal(list(M.edges(data=True)), mc) def test_steiner_tree(self): S = steiner_tree(self.G, self.term_nodes) - expected_steiner_tree = [(1, 2, {'weight': 10}), - (2, 3, {'weight': 10}), - (2, 7, {'weight': 1}), - (3, 4, {'weight': 10}), - (5, 7, {'weight': 1})] + expected_steiner_tree = [ + (1, 2, {"weight": 10}), + (2, 3, {"weight": 10}), + (2, 7, {"weight": 1}), + (3, 4, {"weight": 10}), + (5, 7, {"weight": 1}), + ] assert_edges_equal(list(S.edges(data=True)), expected_steiner_tree) def test_multigraph_steiner_tree(self): with pytest.raises(nx.NetworkXNotImplemented): G = nx.MultiGraph() - G.add_edges_from([ - (1, 2, 0, {'weight': 1}), - (2, 3, 0, {'weight': 999}), - (2, 3, 1, {'weight': 1}), - (3, 4, 0, {'weight': 1}), - (3, 5, 0, {'weight': 1}) - ]) + G.add_edges_from( + [ + (1, 2, 0, {"weight": 1}), + (2, 3, 0, {"weight": 999}), + (2, 3, 1, {"weight": 1}), + (3, 4, 0, {"weight": 1}), + (3, 5, 0, {"weight": 1}), + ] + ) terminal_nodes = [2, 4, 5] expected_edges = [ - (2, 3, 1, {'weight': 1}), # edge with key 1 has lower weight - (3, 4, 0, {'weight': 1}), - (3, 5, 0, {'weight': 1}) + (2, 3, 1, {"weight": 1}), # edge with key 1 has lower weight + (3, 4, 0, {"weight": 1}), + (3, 5, 0, {"weight": 1}), ] # not implemented T = steiner_tree(G, terminal_nodes) diff --git a/networkx/algorithms/approximation/tests/test_treewidth.py b/networkx/algorithms/approximation/tests/test_treewidth.py index 0bef0903..5389b949 100644 --- a/networkx/algorithms/approximation/tests/test_treewidth.py +++ b/networkx/algorithms/approximation/tests/test_treewidth.py @@ -37,6 +37,7 @@ def is_tree_decomp(graph, decomp): class TestTreewidthMinDegree: """Unit tests for the min_degree function""" + @classmethod def setup_class(cls): """Setup for different kinds of trees""" @@ -135,8 +136,9 @@ class TestTreewidthMinDegree: def test_heuristic_first_steps(self): """Test first steps of min_degree heuristic""" - graph = {n: set(self.deterministic_graph[n]) - {n} - for n in self.deterministic_graph} + graph = { + n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph + } deg_heuristic = MinDegreeHeuristic(graph) elim_node = deg_heuristic.best_node(graph) print(f"Graph {graph}:") @@ -165,6 +167,7 @@ class TestTreewidthMinDegree: class TestTreewidthMinFillIn: """Unit tests for the treewidth_min_fill_in function.""" + @classmethod def setup_class(cls): """Setup for different kinds of trees""" @@ -238,8 +241,9 @@ class TestTreewidthMinFillIn: def test_heuristic_first_steps(self): """Test first steps of min_fill_in heuristic""" - graph = {n: set(self.deterministic_graph[n]) - {n} - for n in self.deterministic_graph} + graph = { + n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph + } print(f"Graph {graph}:") elim_node = min_fill_in_heuristic(graph) steps = [] diff --git a/networkx/algorithms/approximation/treewidth.py b/networkx/algorithms/approximation/treewidth.py index 2536d989..11716c78 100644 --- a/networkx/algorithms/approximation/treewidth.py +++ b/networkx/algorithms/approximation/treewidth.py @@ -39,8 +39,8 @@ import itertools __all__ = ["treewidth_min_degree", "treewidth_min_fill_in"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def treewidth_min_degree(G): """ Returns a treewidth decomposition using the Minimum Degree heuristic. @@ -62,8 +62,8 @@ def treewidth_min_degree(G): return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph)) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def treewidth_min_fill_in(G): """ Returns a treewidth decomposition using the Minimum Fill-in heuristic. @@ -80,7 +80,7 @@ def treewidth_min_fill_in(G): Treewidth decomposition : (int, Graph) tuple 2-tuple with treewidth and the corresponding decomposed tree. """ - return treewidth_decomp(G, min_fill_in_heuristic) + return treewidth_decomp(G, min_fill_in_heuristic) class MinDegreeHeuristic: @@ -91,6 +91,7 @@ class MinDegreeHeuristic: chosen, then the graph is updated and the corresponding node is removed. Next, a new node with the lowest degree is chosen, and so on. """ + def __init__(self, graph): self._graph = graph diff --git a/networkx/algorithms/assortativity/connectivity.py b/networkx/algorithms/assortativity/connectivity.py index 0450fff8..15ac6728 100644 --- a/networkx/algorithms/assortativity/connectivity.py +++ b/networkx/algorithms/assortativity/connectivity.py @@ -1,11 +1,11 @@ from collections import defaultdict -__all__ = ['average_degree_connectivity', - 'k_nearest_neighbors'] +__all__ = ["average_degree_connectivity", "k_nearest_neighbors"] -def average_degree_connectivity(G, source="in+out", target="in+out", - nodes=None, weight=None): +def average_degree_connectivity( + G, source="in+out", target="in+out", nodes=None, weight=None +): r"""Compute the average degree connectivity of graph. The average degree connectivity is the average nearest neighbor degree of @@ -76,22 +76,22 @@ def average_degree_connectivity(G, source="in+out", target="in+out", """ # First, determine the type of neighbors and the type of degree to use. if G.is_directed(): - if source not in ('in', 'out', 'in+out'): + if source not in ("in", "out", "in+out"): raise ValueError('source must be one of "in", "out", or "in+out"') - if target not in ('in', 'out', 'in+out'): + if target not in ("in", "out", "in+out"): raise ValueError('target must be one of "in", "out", or "in+out"') - direction = {'out': G.out_degree, - 'in': G.in_degree, - 'in+out': G.degree} - neighbor_funcs = {'out': G.successors, - 'in': G.predecessors, - 'in+out': G.neighbors} + direction = {"out": G.out_degree, "in": G.in_degree, "in+out": G.degree} + neighbor_funcs = { + "out": G.successors, + "in": G.predecessors, + "in+out": G.neighbors, + } source_degree = direction[source] target_degree = direction[target] neighbors = neighbor_funcs[source] # `reverse` indicates whether to look at the in-edge when # computing the weight of an edge. - reverse = (source == 'in') + reverse = source == "in" else: source_degree = G.degree target_degree = G.degree diff --git a/networkx/algorithms/assortativity/correlation.py b/networkx/algorithms/assortativity/correlation.py index 28b18b7c..d8c9b772 100644 --- a/networkx/algorithms/assortativity/correlation.py +++ b/networkx/algorithms/assortativity/correlation.py @@ -1,17 +1,21 @@ """Node assortativity coefficients and correlation measures. """ -from networkx.algorithms.assortativity.mixing import degree_mixing_matrix, \ - attribute_mixing_matrix, numeric_mixing_matrix +from networkx.algorithms.assortativity.mixing import ( + degree_mixing_matrix, + attribute_mixing_matrix, + numeric_mixing_matrix, +) from networkx.algorithms.assortativity.pairs import node_degree_xy -__all__ = ['degree_pearson_correlation_coefficient', - 'degree_assortativity_coefficient', - 'attribute_assortativity_coefficient', - 'numeric_assortativity_coefficient'] +__all__ = [ + "degree_pearson_correlation_coefficient", + "degree_assortativity_coefficient", + "attribute_assortativity_coefficient", + "numeric_assortativity_coefficient", +] -def degree_assortativity_coefficient(G, x='out', y='in', weight=None, - nodes=None): +def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None): """Compute degree assortativity of graph. Assortativity measures the similarity of connections @@ -74,8 +78,7 @@ def degree_assortativity_coefficient(G, x='out', y='in', weight=None, return numeric_ac(M) -def degree_pearson_correlation_coefficient(G, x='out', y='in', - weight=None, nodes=None): +def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None): """Compute degree assortativity of graph. Assortativity measures the similarity of connections @@ -129,8 +132,7 @@ def degree_pearson_correlation_coefficient(G, x='out', y='in', try: import scipy.stats as stats except ImportError as e: - raise ImportError("Assortativity requires SciPy:" - "http://scipy.org/ ") from e + raise ImportError("Assortativity requires SciPy:" "http://scipy.org/ ") from e xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight) x, y = zip(*xy) return stats.pearsonr(x, y)[0] @@ -251,8 +253,9 @@ def attribute_ac(M): try: import numpy except ImportError as e: - raise ImportError('attribute_assortativity requires ' - 'NumPy: http://scipy.org/') from e + raise ImportError( + "attribute_assortativity requires " "NumPy: http://scipy.org/" + ) from e if M.sum() != 1.0: M = M / M.sum() s = (M @ M).sum() @@ -267,8 +270,9 @@ def numeric_ac(M): try: import numpy except ImportError as e: - raise ImportError('numeric_assortativity requires ' - 'NumPy: http://scipy.org/') from e + raise ImportError( + "numeric_assortativity requires " "NumPy: http://scipy.org/" + ) from e if M.sum() != 1.0: M = M / float(M.sum()) nx, ny = M.shape # nx=ny @@ -276,8 +280,8 @@ def numeric_ac(M): y = numpy.arange(ny) a = M.sum(axis=0) b = M.sum(axis=1) - vara = (a * x**2).sum() - ((a * x).sum())**2 - varb = (b * x**2).sum() - ((b * x).sum())**2 + vara = (a * x ** 2).sum() - ((a * x).sum()) ** 2 + varb = (b * x ** 2).sum() - ((b * x).sum()) ** 2 xy = numpy.outer(x, y) ab = numpy.outer(a, b) return (xy * (M - ab)).sum() / numpy.sqrt(vara * varb) diff --git a/networkx/algorithms/assortativity/mixing.py b/networkx/algorithms/assortativity/mixing.py index bf7f9e1d..5edccf84 100644 --- a/networkx/algorithms/assortativity/mixing.py +++ b/networkx/algorithms/assortativity/mixing.py @@ -2,15 +2,16 @@ Mixing matrices for node attributes and degree. """ from networkx.utils import dict_to_numpy_array -from networkx.algorithms.assortativity.pairs import node_degree_xy, \ - node_attribute_xy +from networkx.algorithms.assortativity.pairs import node_degree_xy, node_attribute_xy -__all__ = ['attribute_mixing_matrix', - 'attribute_mixing_dict', - 'degree_mixing_matrix', - 'degree_mixing_dict', - 'numeric_mixing_matrix', - 'mixing_dict'] +__all__ = [ + "attribute_mixing_matrix", + "attribute_mixing_dict", + "degree_mixing_matrix", + "degree_mixing_dict", + "numeric_mixing_matrix", + "mixing_dict", +] def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): @@ -51,8 +52,7 @@ def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): return mixing_dict(xy_iter, normalized=normalized) -def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, - normalized=True): +def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True): """Returns mixing matrix for attribute. Parameters @@ -86,8 +86,7 @@ def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, return a -def degree_mixing_dict(G, x='out', y='in', weight=None, - nodes=None, normalized=False): +def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False): """Returns dictionary representation of mixing matrix for degree. Parameters @@ -118,8 +117,7 @@ def degree_mixing_dict(G, x='out', y='in', weight=None, return mixing_dict(xy_iter, normalized=normalized) -def degree_mixing_matrix(G, x='out', y='in', weight=None, - nodes=None, normalized=True): +def degree_mixing_matrix(G, x="out", y="in", weight=None, nodes=None, normalized=True): """Returns mixing matrix for attribute. Parameters diff --git a/networkx/algorithms/assortativity/neighbor_degree.py b/networkx/algorithms/assortativity/neighbor_degree.py index 0d821e6e..a4bc5371 100644 --- a/networkx/algorithms/assortativity/neighbor_degree.py +++ b/networkx/algorithms/assortativity/neighbor_degree.py @@ -12,13 +12,13 @@ def _average_nbr_deg(G, source_degree, target_degree, nodes=None, weight=None): if weight is None: avg[n] = sum(d for n, d in nbrdeg) / float(deg) else: - avg[n] = sum((G[n][nbr].get(weight, 1) * d - for nbr, d in nbrdeg)) / float(deg) + avg[n] = sum((G[n][nbr].get(weight, 1) * d for nbr, d in nbrdeg)) / float( + deg + ) return avg -def average_neighbor_degree(G, source='out', target='out', - nodes=None, weight=None): +def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None): r"""Returns the average degree of the neighborhood of each node. The average neighborhood degree of a node `i` is @@ -102,12 +102,11 @@ def average_neighbor_degree(G, source='out', target='out', source_degree = G.degree target_degree = G.degree if G.is_directed(): - direction = {'out': G.out_degree, - 'in': G.in_degree} + direction = {"out": G.out_degree, "in": G.in_degree} source_degree = direction[source] target_degree = direction[target] - return _average_nbr_deg(G, source_degree, target_degree, - nodes=nodes, weight=weight) + return _average_nbr_deg(G, source_degree, target_degree, nodes=nodes, weight=weight) + # obsolete # def average_neighbor_in_degree(G, nodes=None, weight=None): diff --git a/networkx/algorithms/assortativity/pairs.py b/networkx/algorithms/assortativity/pairs.py index ea4d4d9b..f3244d10 100644 --- a/networkx/algorithms/assortativity/pairs.py +++ b/networkx/algorithms/assortativity/pairs.py @@ -1,6 +1,5 @@ """Generators of x-y pairs of node data.""" -__all__ = ['node_attribute_xy', - 'node_degree_xy'] +__all__ = ["node_attribute_xy", "node_degree_xy"] def node_attribute_xy(G, attribute, nodes=None): @@ -57,7 +56,7 @@ def node_attribute_xy(G, attribute, nodes=None): yield (uattr, vattr) -def node_degree_xy(G, x='out', y='in', weight=None, nodes=None): +def node_degree_xy(G, x="out", y="in", weight=None, nodes=None): """Generate node degree-degree pairs for edges in G. Parameters @@ -107,8 +106,7 @@ def node_degree_xy(G, x='out', y='in', weight=None, nodes=None): xdeg = G.degree ydeg = G.degree if G.is_directed(): - direction = {'out': G.out_degree, - 'in': G.in_degree} + direction = {"out": G.out_degree, "in": G.in_degree} xdeg = direction[x] ydeg = direction[y] diff --git a/networkx/algorithms/assortativity/tests/base_test.py b/networkx/algorithms/assortativity/tests/base_test.py index 0598bd43..9a8a1e72 100644 --- a/networkx/algorithms/assortativity/tests/base_test.py +++ b/networkx/algorithms/assortativity/tests/base_test.py @@ -2,45 +2,43 @@ import networkx as nx class BaseTestAttributeMixing: - @classmethod def setup_class(cls): G = nx.Graph() - G.add_nodes_from([0, 1], fish='one') - G.add_nodes_from([2, 3], fish='two') - G.add_nodes_from([4], fish='red') - G.add_nodes_from([5], fish='blue') + G.add_nodes_from([0, 1], fish="one") + G.add_nodes_from([2, 3], fish="two") + G.add_nodes_from([4], fish="red") + G.add_nodes_from([5], fish="blue") G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)]) cls.G = G D = nx.DiGraph() - D.add_nodes_from([0, 1], fish='one') - D.add_nodes_from([2, 3], fish='two') - D.add_nodes_from([4], fish='red') - D.add_nodes_from([5], fish='blue') + D.add_nodes_from([0, 1], fish="one") + D.add_nodes_from([2, 3], fish="two") + D.add_nodes_from([4], fish="red") + D.add_nodes_from([5], fish="blue") D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)]) cls.D = D M = nx.MultiGraph() - M.add_nodes_from([0, 1], fish='one') - M.add_nodes_from([2, 3], fish='two') - M.add_nodes_from([4], fish='red') - M.add_nodes_from([5], fish='blue') + M.add_nodes_from([0, 1], fish="one") + M.add_nodes_from([2, 3], fish="two") + M.add_nodes_from([4], fish="red") + M.add_nodes_from([5], fish="blue") M.add_edges_from([(0, 1), (0, 1), (2, 3)]) cls.M = M S = nx.Graph() - S.add_nodes_from([0, 1], fish='one') - S.add_nodes_from([2, 3], fish='two') - S.add_nodes_from([4], fish='red') - S.add_nodes_from([5], fish='blue') + S.add_nodes_from([0, 1], fish="one") + S.add_nodes_from([2, 3], fish="two") + S.add_nodes_from([4], fish="red") + S.add_nodes_from([5], fish="blue") S.add_edge(0, 0) S.add_edge(2, 2) cls.S = S class BaseTestDegreeMixing: - @classmethod def setup_class(cls): cls.P4 = nx.path_graph(4) diff --git a/networkx/algorithms/assortativity/tests/test_connectivity.py b/networkx/algorithms/assortativity/tests/test_connectivity.py index 23244821..b1b0ac81 100644 --- a/networkx/algorithms/assortativity/tests/test_connectivity.py +++ b/networkx/algorithms/assortativity/tests/test_connectivity.py @@ -7,7 +7,6 @@ from networkx.testing import almost_equal class TestNeighborConnectivity: - def test_degree_p4(self): G = nx.path_graph(4) answer = {1: 2.0, 2: 1.5} @@ -21,18 +20,18 @@ class TestNeighborConnectivity: answer = {1: 2.0, 2: 1.5} D = G.to_directed() - nd = nx.average_degree_connectivity(D, source='in', target='in') + nd = nx.average_degree_connectivity(D, source="in", target="in") assert nd == answer D = G.to_directed() - nd = nx.average_degree_connectivity(D, source='in', target='in') + nd = nx.average_degree_connectivity(D, source="in", target="in") assert nd == answer def test_degree_p4_weighted(self): G = nx.path_graph(4) - G[1][2]['weight'] = 4 + G[1][2]["weight"] = 4 answer = {1: 2.0, 2: 1.8} - nd = nx.average_degree_connectivity(G, weight='weight') + nd = nx.average_degree_connectivity(G, weight="weight") assert nd == answer answer = {1: 2.0, 2: 1.5} nd = nx.average_degree_connectivity(G) @@ -40,25 +39,27 @@ class TestNeighborConnectivity: D = G.to_directed() answer = {2: 2.0, 4: 1.8} - nd = nx.average_degree_connectivity(D, weight='weight') + nd = nx.average_degree_connectivity(D, weight="weight") assert nd == answer answer = {1: 2.0, 2: 1.8} D = G.to_directed() - nd = nx.average_degree_connectivity(D, weight='weight', source='in', - target='in') + nd = nx.average_degree_connectivity( + D, weight="weight", source="in", target="in" + ) assert nd == answer D = G.to_directed() - nd = nx.average_degree_connectivity(D, source='in', target='out', - weight='weight') + nd = nx.average_degree_connectivity( + D, source="in", target="out", weight="weight" + ) assert nd == answer def test_weight_keyword(self): G = nx.path_graph(4) - G[1][2]['other'] = 4 + G[1][2]["other"] = 4 answer = {1: 2.0, 2: 1.8} - nd = nx.average_degree_connectivity(G, weight='other') + nd = nx.average_degree_connectivity(G, weight="other") assert nd == answer answer = {1: 2.0, 2: 1.5} nd = nx.average_degree_connectivity(G, weight=None) @@ -66,29 +67,27 @@ class TestNeighborConnectivity: D = G.to_directed() answer = {2: 2.0, 4: 1.8} - nd = nx.average_degree_connectivity(D, weight='other') + nd = nx.average_degree_connectivity(D, weight="other") assert nd == answer answer = {1: 2.0, 2: 1.8} D = G.to_directed() - nd = nx.average_degree_connectivity(D, weight='other', source='in', - target='in') + nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in") assert nd == answer D = G.to_directed() - nd = nx.average_degree_connectivity(D, weight='other', source='in', - target='in') + nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in") assert nd == answer def test_degree_barrat(self): G = nx.star_graph(5) G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)]) - G[0][5]['weight'] = 5 + G[0][5]["weight"] = 5 nd = nx.average_degree_connectivity(G)[5] assert nd == 1.8 - nd = nx.average_degree_connectivity(G, weight='weight')[5] + nd = nx.average_degree_connectivity(G, weight="weight")[5] assert almost_equal(nd, 3.222222, places=5) - nd = nx.k_nearest_neighbors(G, weight='weight')[5] + nd = nx.k_nearest_neighbors(G, weight="weight")[5] assert almost_equal(nd, 3.222222, places=5) def test_zero_deg(self): @@ -98,17 +97,17 @@ class TestNeighborConnectivity: G.add_edge(1, 4) c = nx.average_degree_connectivity(G) assert c == {1: 0, 3: 1} - c = nx.average_degree_connectivity(G, source='in', target='in') + c = nx.average_degree_connectivity(G, source="in", target="in") assert c == {0: 0, 1: 0} - c = nx.average_degree_connectivity(G, source='in', target='out') + c = nx.average_degree_connectivity(G, source="in", target="out") assert c == {0: 0, 1: 3} - c = nx.average_degree_connectivity(G, source='in', target='in+out') + c = nx.average_degree_connectivity(G, source="in", target="in+out") assert c == {0: 0, 1: 3} - c = nx.average_degree_connectivity(G, source='out', target='out') + c = nx.average_degree_connectivity(G, source="out", target="out") assert c == {0: 0, 3: 0} - c = nx.average_degree_connectivity(G, source='out', target='in') + c = nx.average_degree_connectivity(G, source="out", target="in") assert c == {0: 0, 3: 1} - c = nx.average_degree_connectivity(G, source='out', target='in+out') + c = nx.average_degree_connectivity(G, source="out", target="in+out") assert c == {0: 0, 3: 1} def test_in_out_weight(self): @@ -116,21 +115,20 @@ class TestNeighborConnectivity: G.add_edge(1, 2, weight=1) G.add_edge(1, 3, weight=1) G.add_edge(3, 1, weight=1) - for s, t in permutations(['in', 'out', 'in+out'], 2): + for s, t in permutations(["in", "out", "in+out"], 2): c = nx.average_degree_connectivity(G, source=s, target=t) - cw = nx.average_degree_connectivity(G, source=s, target=t, - weight='weight') + cw = nx.average_degree_connectivity(G, source=s, target=t, weight="weight") assert c == cw def test_invalid_source(self): with pytest.raises(ValueError): G = nx.DiGraph() - nx.average_degree_connectivity(G, source='bogus') + nx.average_degree_connectivity(G, source="bogus") def test_invalid_target(self): with pytest.raises(ValueError): G = nx.DiGraph() - nx.average_degree_connectivity(G, target='bogus') + nx.average_degree_connectivity(G, target="bogus") def test_single_node(self): # TODO Is this really the intended behavior for providing a diff --git a/networkx/algorithms/assortativity/tests/test_correlation.py b/networkx/algorithms/assortativity/tests/test_correlation.py index 3adad5e7..42aa1d00 100644 --- a/networkx/algorithms/assortativity/tests/test_correlation.py +++ b/networkx/algorithms/assortativity/tests/test_correlation.py @@ -1,7 +1,8 @@ import pytest -np = pytest.importorskip('numpy') -npt = pytest.importorskip('numpy.testing') -scipy = pytest.importorskip('scipy') + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") +scipy = pytest.importorskip("scipy") import networkx as nx @@ -10,7 +11,6 @@ from networkx.algorithms.assortativity.correlation import attribute_ac class TestDegreeMixingCorrelation(BaseTestDegreeMixing): - def test_degree_assortativity_undirected(self): r = nx.degree_assortativity_coefficient(self.P4) npt.assert_almost_equal(r, -1.0 / 2, decimal=4) @@ -37,17 +37,16 @@ class TestDegreeMixingCorrelation(BaseTestDegreeMixing): class TestAttributeMixingCorrelation(BaseTestAttributeMixing): - def test_attribute_assortativity_undirected(self): - r = nx.attribute_assortativity_coefficient(self.G, 'fish') + r = nx.attribute_assortativity_coefficient(self.G, "fish") assert r == 6.0 / 22.0 def test_attribute_assortativity_directed(self): - r = nx.attribute_assortativity_coefficient(self.D, 'fish') + r = nx.attribute_assortativity_coefficient(self.D, "fish") assert r == 1.0 / 3.0 def test_attribute_assortativity_multigraph(self): - r = nx.attribute_assortativity_coefficient(self.M, 'fish') + r = nx.attribute_assortativity_coefficient(self.M, "fish") assert r == 1.0 def test_attribute_assortativity_coefficient(self): diff --git a/networkx/algorithms/assortativity/tests/test_mixing.py b/networkx/algorithms/assortativity/tests/test_mixing.py index 372ff7ac..063085d6 100644 --- a/networkx/algorithms/assortativity/tests/test_mixing.py +++ b/networkx/algorithms/assortativity/tests/test_mixing.py @@ -1,6 +1,7 @@ import pytest -np = pytest.importorskip('numpy') -npt = pytest.importorskip('numpy.testing') + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") import networkx as nx @@ -10,38 +11,33 @@ from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing class TestDegreeMixingDict(BaseTestDegreeMixing): def test_degree_mixing_dict_undirected(self): d = nx.degree_mixing_dict(self.P4) - d_result = {1: {2: 2}, - 2: {1: 2, 2: 2}, - } + d_result = { + 1: {2: 2}, + 2: {1: 2, 2: 2}, + } assert d == d_result def test_degree_mixing_dict_undirected_normalized(self): d = nx.degree_mixing_dict(self.P4, normalized=True) - d_result = {1: {2: 1.0 / 3}, - 2: {1: 1.0 / 3, 2: 1.0 / 3}, - } + d_result = { + 1: {2: 1.0 / 3}, + 2: {1: 1.0 / 3, 2: 1.0 / 3}, + } assert d == d_result def test_degree_mixing_dict_directed(self): d = nx.degree_mixing_dict(self.D) print(d) - d_result = {1: {3: 2}, - 2: {1: 1, 3: 1}, - 3: {} - } + d_result = {1: {3: 2}, 2: {1: 1, 3: 1}, 3: {}} assert d == d_result def test_degree_mixing_dict_multigraph(self): d = nx.degree_mixing_dict(self.M) - d_result = {1: {2: 1}, - 2: {1: 1, 3: 3}, - 3: {2: 3} - } + d_result = {1: {2: 1}, 2: {1: 1, 3: 3}, 3: {2: 3}} assert d == d_result class TestDegreeMixingMatrix(BaseTestDegreeMixing): - def test_degree_mixing_matrix_undirected(self): # fmt: off a_result = np.array([[0, 0, 0], @@ -94,76 +90,62 @@ class TestDegreeMixingMatrix(BaseTestDegreeMixing): class TestAttributeMixingDict(BaseTestAttributeMixing): - def test_attribute_mixing_dict_undirected(self): - d = nx.attribute_mixing_dict(self.G, 'fish') - d_result = {'one': {'one': 2, 'red': 1}, - 'two': {'two': 2, 'blue': 1}, - 'red': {'one': 1}, - 'blue': {'two': 1} - } + d = nx.attribute_mixing_dict(self.G, "fish") + d_result = { + "one": {"one": 2, "red": 1}, + "two": {"two": 2, "blue": 1}, + "red": {"one": 1}, + "blue": {"two": 1}, + } assert d == d_result def test_attribute_mixing_dict_directed(self): - d = nx.attribute_mixing_dict(self.D, 'fish') - d_result = {'one': {'one': 1, 'red': 1}, - 'two': {'two': 1, 'blue': 1}, - 'red': {}, - 'blue': {} - } + d = nx.attribute_mixing_dict(self.D, "fish") + d_result = { + "one": {"one": 1, "red": 1}, + "two": {"two": 1, "blue": 1}, + "red": {}, + "blue": {}, + } assert d == d_result def test_attribute_mixing_dict_multigraph(self): - d = nx.attribute_mixing_dict(self.M, 'fish') - d_result = {'one': {'one': 4}, - 'two': {'two': 2}, - } + d = nx.attribute_mixing_dict(self.M, "fish") + d_result = { + "one": {"one": 4}, + "two": {"two": 2}, + } assert d == d_result class TestAttributeMixingMatrix(BaseTestAttributeMixing): - def test_attribute_mixing_matrix_undirected(self): - mapping = {'one': 0, 'two': 1, 'red': 2, 'blue': 3} - a_result = np.array([[2, 0, 1, 0], - [0, 2, 0, 1], - [1, 0, 0, 0], - [0, 1, 0, 0]] - ) - a = nx.attribute_mixing_matrix(self.G, 'fish', - mapping=mapping, - normalized=False) + mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} + a_result = np.array([[2, 0, 1, 0], [0, 2, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]]) + a = nx.attribute_mixing_matrix( + self.G, "fish", mapping=mapping, normalized=False + ) npt.assert_equal(a, a_result) - a = nx.attribute_mixing_matrix(self.G, 'fish', - mapping=mapping) + a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping) npt.assert_equal(a, a_result / float(a_result.sum())) def test_attribute_mixing_matrix_directed(self): - mapping = {'one': 0, 'two': 1, 'red': 2, 'blue': 3} - a_result = np.array([[1, 0, 1, 0], - [0, 1, 0, 1], - [0, 0, 0, 0], - [0, 0, 0, 0]] - ) - a = nx.attribute_mixing_matrix(self.D, 'fish', - mapping=mapping, - normalized=False) + mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} + a_result = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]]) + a = nx.attribute_mixing_matrix( + self.D, "fish", mapping=mapping, normalized=False + ) npt.assert_equal(a, a_result) - a = nx.attribute_mixing_matrix(self.D, 'fish', - mapping=mapping) + a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping) npt.assert_equal(a, a_result / float(a_result.sum())) def test_attribute_mixing_matrix_multigraph(self): - mapping = {'one': 0, 'two': 1, 'red': 2, 'blue': 3} - a_result = np.array([[4, 0, 0, 0], - [0, 2, 0, 0], - [0, 0, 0, 0], - [0, 0, 0, 0]] - ) - a = nx.attribute_mixing_matrix(self.M, 'fish', - mapping=mapping, - normalized=False) + mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} + a_result = np.array([[4, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]) + a = nx.attribute_mixing_matrix( + self.M, "fish", mapping=mapping, normalized=False + ) npt.assert_equal(a, a_result) - a = nx.attribute_mixing_matrix(self.M, 'fish', - mapping=mapping) + a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping) npt.assert_equal(a, a_result / float(a_result.sum())) diff --git a/networkx/algorithms/assortativity/tests/test_neighbor_degree.py b/networkx/algorithms/assortativity/tests/test_neighbor_degree.py index 106064e2..51b4aa86 100644 --- a/networkx/algorithms/assortativity/tests/test_neighbor_degree.py +++ b/networkx/algorithms/assortativity/tests/test_neighbor_degree.py @@ -3,7 +3,6 @@ from networkx.testing import almost_equal class TestAverageNeighbor: - def test_degree_p4(self): G = nx.path_graph(4) answer = {0: 2, 1: 1.5, 2: 1.5, 3: 2} @@ -19,30 +18,28 @@ class TestAverageNeighbor: assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, source='in', target='in') + nd = nx.average_neighbor_degree(D, source="in", target="in") assert nd == answer def test_degree_p4_weighted(self): G = nx.path_graph(4) - G[1][2]['weight'] = 4 + G[1][2]["weight"] = 4 answer = {0: 2, 1: 1.8, 2: 1.8, 3: 2} - nd = nx.average_neighbor_degree(G, weight='weight') + nd = nx.average_neighbor_degree(G, weight="weight") assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, weight='weight') + nd = nx.average_neighbor_degree(D, weight="weight") assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, weight='weight') + nd = nx.average_neighbor_degree(D, weight="weight") assert nd == answer - nd = nx.average_neighbor_degree(D, source='out', target='out', - weight='weight') + nd = nx.average_neighbor_degree(D, source="out", target="out", weight="weight") assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, source='in', target='in', - weight='weight') + nd = nx.average_neighbor_degree(D, source="in", target="in", weight="weight") assert nd == answer def test_degree_k4(self): @@ -60,7 +57,7 @@ class TestAverageNeighbor: assert nd == answer D = G.to_directed() - nd = nx.average_neighbor_degree(D, source='in', target='in') + nd = nx.average_neighbor_degree(D, source="in", target="in") assert nd == answer def test_degree_k4_nodes(self): @@ -72,8 +69,8 @@ class TestAverageNeighbor: def test_degree_barrat(self): G = nx.star_graph(5) G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)]) - G[0][5]['weight'] = 5 + G[0][5]["weight"] = 5 nd = nx.average_neighbor_degree(G)[5] assert nd == 1.8 - nd = nx.average_neighbor_degree(G, weight='weight')[5] + nd = nx.average_neighbor_degree(G, weight="weight")[5] assert almost_equal(nd, 3.222222, places=5) diff --git a/networkx/algorithms/assortativity/tests/test_pairs.py b/networkx/algorithms/assortativity/tests/test_pairs.py index 0db1c499..ae19f1b7 100644 --- a/networkx/algorithms/assortativity/tests/test_pairs.py +++ b/networkx/algorithms/assortativity/tests/test_pairs.py @@ -3,106 +3,84 @@ from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing class TestAttributeMixingXY(BaseTestAttributeMixing): - def test_node_attribute_xy_undirected(self): - attrxy = sorted(nx.node_attribute_xy(self.G, 'fish')) - attrxy_result = sorted([('one', 'one'), - ('one', 'one'), - ('two', 'two'), - ('two', 'two'), - ('one', 'red'), - ('red', 'one'), - ('blue', 'two'), - ('two', 'blue') - ]) + attrxy = sorted(nx.node_attribute_xy(self.G, "fish")) + attrxy_result = sorted( + [ + ("one", "one"), + ("one", "one"), + ("two", "two"), + ("two", "two"), + ("one", "red"), + ("red", "one"), + ("blue", "two"), + ("two", "blue"), + ] + ) assert attrxy == attrxy_result def test_node_attribute_xy_undirected_nodes(self): - attrxy = sorted(nx.node_attribute_xy(self.G, 'fish', - nodes=['one', 'yellow'])) - attrxy_result = sorted([ - ]) + attrxy = sorted(nx.node_attribute_xy(self.G, "fish", nodes=["one", "yellow"])) + attrxy_result = sorted([]) assert attrxy == attrxy_result def test_node_attribute_xy_directed(self): - attrxy = sorted(nx.node_attribute_xy(self.D, 'fish')) - attrxy_result = sorted([('one', 'one'), - ('two', 'two'), - ('one', 'red'), - ('two', 'blue') - ]) + attrxy = sorted(nx.node_attribute_xy(self.D, "fish")) + attrxy_result = sorted( + [("one", "one"), ("two", "two"), ("one", "red"), ("two", "blue")] + ) assert attrxy == attrxy_result def test_node_attribute_xy_multigraph(self): - attrxy = sorted(nx.node_attribute_xy(self.M, 'fish')) - attrxy_result = [('one', 'one'), - ('one', 'one'), - ('one', 'one'), - ('one', 'one'), - ('two', 'two'), - ('two', 'two') - ] + attrxy = sorted(nx.node_attribute_xy(self.M, "fish")) + attrxy_result = [ + ("one", "one"), + ("one", "one"), + ("one", "one"), + ("one", "one"), + ("two", "two"), + ("two", "two"), + ] assert attrxy == attrxy_result def test_node_attribute_xy_selfloop(self): - attrxy = sorted(nx.node_attribute_xy(self.S, 'fish')) - attrxy_result = [('one', 'one'), - ('two', 'two') - ] + attrxy = sorted(nx.node_attribute_xy(self.S, "fish")) + attrxy_result = [("one", "one"), ("two", "two")] assert attrxy == attrxy_result class TestDegreeMixingXY(BaseTestDegreeMixing): - def test_node_degree_xy_undirected(self): xy = sorted(nx.node_degree_xy(self.P4)) - xy_result = sorted([(1, 2), - (2, 1), - (2, 2), - (2, 2), - (1, 2), - (2, 1)]) + xy_result = sorted([(1, 2), (2, 1), (2, 2), (2, 2), (1, 2), (2, 1)]) assert xy == xy_result def test_node_degree_xy_undirected_nodes(self): xy = sorted(nx.node_degree_xy(self.P4, nodes=[0, 1, -1])) - xy_result = sorted([(1, 2), - (2, 1), ]) + xy_result = sorted([(1, 2), (2, 1),]) assert xy == xy_result def test_node_degree_xy_directed(self): xy = sorted(nx.node_degree_xy(self.D)) - xy_result = sorted([(2, 1), - (2, 3), - (1, 3), - (1, 3)]) + xy_result = sorted([(2, 1), (2, 3), (1, 3), (1, 3)]) assert xy == xy_result def test_node_degree_xy_multigraph(self): xy = sorted(nx.node_degree_xy(self.M)) - xy_result = sorted([(2, 3), - (2, 3), - (3, 2), - (3, 2), - (2, 3), - (3, 2), - (1, 2), - (2, 1)]) + xy_result = sorted( + [(2, 3), (2, 3), (3, 2), (3, 2), (2, 3), (3, 2), (1, 2), (2, 1)] + ) assert xy == xy_result def test_node_degree_xy_selfloop(self): xy = sorted(nx.node_degree_xy(self.S)) - xy_result = sorted([(2, 2), - (2, 2)]) + xy_result = sorted([(2, 2), (2, 2)]) assert xy == xy_result def test_node_degree_xy_weighted(self): G = nx.Graph() G.add_edge(1, 2, weight=7) G.add_edge(2, 3, weight=10) - xy = sorted(nx.node_degree_xy(G, weight='weight')) - xy_result = sorted([(7, 17), - (17, 10), - (17, 7), - (10, 17)]) + xy = sorted(nx.node_degree_xy(G, weight="weight")) + xy_result = sorted([(7, 17), (17, 10), (17, 7), (10, 17)]) assert xy == xy_result diff --git a/networkx/algorithms/asteroidal.py b/networkx/algorithms/asteroidal.py index 510b396f..c1bc7181 100644 --- a/networkx/algorithms/asteroidal.py +++ b/networkx/algorithms/asteroidal.py @@ -79,9 +79,11 @@ def find_asteroidal_triple(G): """Check for each pair of vertices whether they belong to the same connected component when the closed neighborhood of the third is removed.""" - if (component_structure[u][v] == component_structure[u][w] and - component_structure[v][u] == component_structure[v][w] and - component_structure[w][u] == component_structure[w][v]): + if ( + component_structure[u][v] == component_structure[u][w] + and component_structure[v][u] == component_structure[v][w] + and component_structure[w][u] == component_structure[w][v] + ): return [u, v, w] return None diff --git a/networkx/algorithms/bipartite/basic.py b/networkx/algorithms/bipartite/basic.py index 83d053a4..b812a4aa 100644 --- a/networkx/algorithms/bipartite/basic.py +++ b/networkx/algorithms/bipartite/basic.py @@ -6,12 +6,14 @@ Bipartite Graph Algorithms import networkx as nx from networkx.algorithms.components import connected_components -__all__ = ['is_bipartite', - 'is_bipartite_node_set', - 'color', - 'sets', - 'density', - 'degrees'] +__all__ = [ + "is_bipartite", + "is_bipartite_node_set", + "color", + "sets", + "density", + "degrees", +] def color(G): @@ -53,8 +55,8 @@ def color(G): import itertools def neighbors(v): - return itertools.chain.from_iterable([G.predecessors(v), - G.successors(v)]) + return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)]) + else: neighbors = G.neighbors @@ -130,8 +132,9 @@ def is_bipartite_node_set(G, nodes): S = set(nodes) for CC in (G.subgraph(c).copy() for c in connected_components(G)): X, Y = sets(CC) - if not ((X.issubset(S) and Y.isdisjoint(S)) or - (Y.issubset(S) and X.isdisjoint(S))): + if not ( + (X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S)) + ): return False return True @@ -194,7 +197,7 @@ def sets(G, top_nodes=None): Y = set(G) - X else: if not is_connected(G): - msg = 'Disconnected graph: Ambiguous solution for bipartite sets.' + msg = "Disconnected graph: Ambiguous solution for bipartite sets." raise nx.AmbiguousSolution(msg) c = color(G) X = {n for n, is_top in c.items() if is_top} diff --git a/networkx/algorithms/bipartite/centrality.py b/networkx/algorithms/bipartite/centrality.py index 9cd502e4..ef76a1f4 100644 --- a/networkx/algorithms/bipartite/centrality.py +++ b/networkx/algorithms/bipartite/centrality.py @@ -1,8 +1,6 @@ import networkx as nx -__all__ = ['degree_centrality', - 'betweenness_centrality', - 'closeness_centrality'] +__all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"] def degree_centrality(G, nodes): @@ -150,16 +148,19 @@ def betweenness_centrality(G, nodes): m = float(len(bottom)) s = (n - 1) // m t = (n - 1) % m - bet_max_top = (((m**2) * ((s + 1)**2)) + - (m * (s + 1) * (2 * t - s - 1)) - - (t * ((2 * s) - t + 3))) / 2.0 + bet_max_top = ( + ((m ** 2) * ((s + 1) ** 2)) + + (m * (s + 1) * (2 * t - s - 1)) + - (t * ((2 * s) - t + 3)) + ) / 2.0 p = (m - 1) // n r = (m - 1) % n - bet_max_bot = (((n**2) * ((p + 1)**2)) + - (n * (p + 1) * (2 * r - p - 1)) - - (r * ((2 * p) - r + 3))) / 2.0 - betweenness = nx.betweenness_centrality(G, normalized=False, - weight=None) + bet_max_bot = ( + ((n ** 2) * ((p + 1) ** 2)) + + (n * (p + 1) * (2 * r - p - 1)) + - (r * ((2 * p) - r + 3)) + ) / 2.0 + betweenness = nx.betweenness_centrality(G, normalized=False, weight=None) for node in top: betweenness[node] /= bet_max_top for node in bottom: diff --git a/networkx/algorithms/bipartite/cluster.py b/networkx/algorithms/bipartite/cluster.py index 7f51bfac..490d5f0c 100644 --- a/networkx/algorithms/bipartite/cluster.py +++ b/networkx/algorithms/bipartite/cluster.py @@ -5,11 +5,12 @@ import itertools import networkx as nx -__all__ = ['clustering', - 'average_clustering', - 'latapy_clustering', - 'robins_alexander_clustering'] - +__all__ = [ + "clustering", + "average_clustering", + "latapy_clustering", + "robins_alexander_clustering", +] def cc_dot(nu, nv): @@ -24,12 +25,10 @@ def cc_min(nu, nv): return float(len(nu & nv)) / min(len(nu), len(nv)) -modes = {'dot': cc_dot, - 'min': cc_min, - 'max': cc_max} +modes = {"dot": cc_dot, "min": cc_min, "max": cc_max} -def latapy_clustering(G, nodes=None, mode='dot'): +def latapy_clustering(G, nodes=None, mode="dot"): r"""Compute a bipartite clustering coefficient for nodes. The bipartie clustering coefficient is a measure of local density @@ -113,7 +112,8 @@ def latapy_clustering(G, nodes=None, mode='dot'): cc_func = modes[mode] except KeyError as e: raise nx.NetworkXError( - "Mode for bipartite clustering must be: dot, min or max") from e + "Mode for bipartite clustering must be: dot, min or max" + ) from e if nodes is None: nodes = G @@ -132,7 +132,7 @@ def latapy_clustering(G, nodes=None, mode='dot'): clustering = latapy_clustering -def average_clustering(G, nodes=None, mode='dot'): +def average_clustering(G, nodes=None, mode="dot"): r"""Compute the average bipartite clustering coefficient. A clustering coefficient for the whole graph is the average, @@ -254,7 +254,7 @@ def robins_alexander_clustering(G): if L_3 == 0: return 0 C_4 = _four_cycles(G) - return (4. * C_4) / L_3 + return (4.0 * C_4) / L_3 def _four_cycles(G): diff --git a/networkx/algorithms/bipartite/covering.py b/networkx/algorithms/bipartite/covering.py index f986d00b..c8460d73 100644 --- a/networkx/algorithms/bipartite/covering.py +++ b/networkx/algorithms/bipartite/covering.py @@ -4,11 +4,11 @@ from networkx.utils import not_implemented_for from networkx.algorithms.bipartite.matching import hopcroft_karp_matching from networkx.algorithms.covering import min_edge_cover as _min_edge_cover -__all__ = ['min_edge_cover'] +__all__ = ["min_edge_cover"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def min_edge_cover(G, matching_algorithm=None): """Returns a set of edges which constitutes the minimum edge cover of the graph. diff --git a/networkx/algorithms/bipartite/edgelist.py b/networkx/algorithms/bipartite/edgelist.py index 1de9ae52..e1da79a1 100644 --- a/networkx/algorithms/bipartite/edgelist.py +++ b/networkx/algorithms/bipartite/edgelist.py @@ -22,18 +22,14 @@ Arbitrary data:: For each edge (u, v) the node u is assigned to part 0 and the node v to part 1. """ -__all__ = ['generate_edgelist', - 'write_edgelist', - 'parse_edgelist', - 'read_edgelist'] +__all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"] import networkx as nx from networkx.utils import open_file, not_implemented_for -@open_file(1, mode='wb') -def write_edgelist(G, path, comments="#", delimiter=' ', data=True, - encoding='utf-8'): +@open_file(1, mode="wb") +def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"): """Write a bipartite graph as a list of edges. Parameters @@ -78,12 +74,12 @@ def write_edgelist(G, path, comments="#", delimiter=' ', data=True, generate_edgelist() """ for line in generate_edgelist(G, delimiter, data): - line += '\n' + line += "\n" path.write(line.encode(encoding)) -@not_implemented_for('directed') -def generate_edgelist(G, delimiter=' ', data=True): +@not_implemented_for("directed") +def generate_edgelist(G, delimiter=" ", data=True): """Generate a single line of the bipartite graph G in edge list format. Parameters @@ -132,7 +128,7 @@ def generate_edgelist(G, delimiter=' ', data=True): 2 3 """ try: - part0 = [n for n, d in G.nodes.items() if d['bipartite'] == 0] + part0 = [n for n, d in G.nodes.items() if d["bipartite"] == 0] except BaseException as e: raise AttributeError("Missing node attribute `bipartite`") from e if data is True or data is False: @@ -150,8 +146,9 @@ def generate_edgelist(G, delimiter=' ', data=True): yield delimiter.join(map(str, e)) -def parse_edgelist(lines, comments='#', delimiter=None, - create_using=None, nodetype=None, data=True): +def parse_edgelist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True +): """Parse lines of an edge list representation of a bipartite graph. Parameters @@ -218,6 +215,7 @@ def parse_edgelist(lines, comments='#', delimiter=None, -------- """ from ast import literal_eval + G = nx.empty_graph(0, create_using) for line in lines: p = line.find(comments) @@ -237,8 +235,9 @@ def parse_edgelist(lines, comments='#', delimiter=None, u = nodetype(u) v = nodetype(v) except BaseException as e: - raise TypeError(f"Failed to convert nodes {u},{v} " - f"to type {nodetype}.") from e + raise TypeError( + f"Failed to convert nodes {u},{v} " f"to type {nodetype}." + ) from e if len(d) == 0 or data is False: # no data or data type specified @@ -246,21 +245,26 @@ def parse_edgelist(lines, comments='#', delimiter=None, elif data is True: # no edge types specified try: # try to evaluate as dictionary - edgedata = dict(literal_eval(' '.join(d))) + edgedata = dict(literal_eval(" ".join(d))) except BaseException as e: - raise TypeError(f"Failed to convert edge data ({d})" - f"to dictionary.") from e + raise TypeError( + f"Failed to convert edge data ({d})" f"to dictionary." + ) from e else: # convert edge data to dictionary with specified keys and type if len(d) != len(data): - raise IndexError(f"Edge data {d} and data_keys {data} are not the same length") + raise IndexError( + f"Edge data {d} and data_keys {data} are not the same length" + ) edgedata = {} for (edge_key, edge_type), edge_value in zip(data, d): try: edge_value = edge_type(edge_value) except BaseException as e: - raise TypeError(f"Failed to convert {edge_key} data " - f"{edge_value} to type {edge_type}.") from e + raise TypeError( + f"Failed to convert {edge_key} data " + f"{edge_value} to type {edge_type}." + ) from e edgedata.update({edge_key: edge_value}) G.add_node(u, bipartite=0) G.add_node(v, bipartite=1) @@ -268,11 +272,17 @@ def parse_edgelist(lines, comments='#', delimiter=None, return G -@open_file(0, mode='rb') -def read_edgelist(path, comments="#", - delimiter=None, create_using=None, - nodetype=None, data=True, edgetype=None, - encoding='utf-8'): +@open_file(0, mode="rb") +def read_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + data=True, + edgetype=None, + encoding="utf-8", +): """Read a bipartite graph from a list of edges. Parameters @@ -341,8 +351,11 @@ def read_edgelist(path, comments="#", types (e.g. int, float, str, frozenset - or tuples of those, etc.) """ lines = (line.decode(encoding) for line in path) - return parse_edgelist(lines, comments=comments, - delimiter=delimiter, - create_using=create_using, - nodetype=nodetype, - data=data) + return parse_edgelist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=data, + ) diff --git a/networkx/algorithms/bipartite/generators.py b/networkx/algorithms/bipartite/generators.py index 3069467f..faf84c68 100644 --- a/networkx/algorithms/bipartite/generators.py +++ b/networkx/algorithms/bipartite/generators.py @@ -7,15 +7,16 @@ from functools import reduce import networkx as nx from networkx.utils import nodes_or_number, py_random_state -__all__ = ['configuration_model', - 'havel_hakimi_graph', - 'reverse_havel_hakimi_graph', - 'alternating_havel_hakimi_graph', - 'preferential_attachment_graph', - 'random_graph', - 'gnmk_random_graph', - 'complete_bipartite_graph', - ] +__all__ = [ + "configuration_model", + "havel_hakimi_graph", + "reverse_havel_hakimi_graph", + "alternating_havel_hakimi_graph", + "preferential_attachment_graph", + "random_graph", + "gnmk_random_graph", + "complete_bipartite_graph", +] @nodes_or_number([0, 1]) @@ -56,7 +57,7 @@ def complete_bipartite_graph(n1, n2, create_using=None): G.add_nodes_from(top, bipartite=0) G.add_nodes_from(bottom, bipartite=1) G.add_edges_from((u, v) for u in top for v in bottom) - G.graph['name'] = f"complete_bipartite_graph({n1},{n2})" + G.graph["name"] = f"complete_bipartite_graph({n1},{n2})" return G @@ -105,7 +106,9 @@ def configuration_model(aseq, bseq, create_using=None, seed=None): sumb = sum(bseq) if not suma == sumb: - raise nx.NetworkXError(f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}") + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) G = _add_nodes_with_bipartite_label(G, lena, lenb) @@ -177,7 +180,9 @@ def havel_hakimi_graph(aseq, bseq, create_using=None): sumb = sum(bseq) if not suma == sumb: - raise nx.NetworkXError(f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}") + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) G = _add_nodes_with_bipartite_label(G, naseq, nbseq) @@ -248,7 +253,9 @@ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): sumb = sum(bseq) if not suma == sumb: - raise nx.NetworkXError(f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}") + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) G = _add_nodes_with_bipartite_label(G, lena, lenb) @@ -320,7 +327,9 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): sumb = sum(bseq) if not suma == sumb: - raise nx.NetworkXError(f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}") + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) G = _add_nodes_with_bipartite_label(G, naseq, nbseq) @@ -335,8 +344,8 @@ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): if degree == 0: break # done, all are zero bstubs.sort() - small = bstubs[0:degree // 2] # add these low degree targets - large = bstubs[(-degree + degree // 2):] # now high degree targets + small = bstubs[0 : degree // 2] # add these low degree targets + large = bstubs[(-degree + degree // 2) :] # now high degree targets stubs = [x for z in zip(large, small) for x in z] # combine, sorry if len(stubs) < len(small) + len(large): # check for zip truncation stubs.append(large.pop()) @@ -563,7 +572,7 @@ def gnmk_random_graph(n, m, k, seed=None, directed=False): if k >= max_edges: # Maybe we should raise an exception here return nx.complete_bipartite_graph(n, m, create_using=G) - top = [n for n, d in G.nodes(data=True) if d['bipartite'] == 0] + top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0] bottom = list(set(G) - set(top)) edge_count = 0 while edge_count < k: @@ -582,5 +591,5 @@ def _add_nodes_with_bipartite_label(G, lena, lenb): G.add_nodes_from(range(0, lena + lenb)) b = dict(zip(range(0, lena), [0] * lena)) b.update(dict(zip(range(lena, lena + lenb), [1] * lenb))) - nx.set_node_attributes(G, b, 'bipartite') + nx.set_node_attributes(G, b, "bipartite") return G diff --git a/networkx/algorithms/bipartite/matching.py b/networkx/algorithms/bipartite/matching.py index 90fedfe2..255a95f1 100644 --- a/networkx/algorithms/bipartite/matching.py +++ b/networkx/algorithms/bipartite/matching.py @@ -44,10 +44,15 @@ from networkx.algorithms.bipartite.matrix import biadjacency_matrix from networkx.algorithms.bipartite import sets as bipartite_sets import networkx as nx -__all__ = ['maximum_matching', 'hopcroft_karp_matching', 'eppstein_matching', - 'to_vertex_cover', 'minimum_weight_full_matching'] +__all__ = [ + "maximum_matching", + "hopcroft_karp_matching", + "eppstein_matching", + "to_vertex_cover", + "minimum_weight_full_matching", +] -INFINITY = float('inf') +INFINITY = float("inf") def hopcroft_karp_matching(G, top_nodes=None): @@ -299,8 +304,7 @@ def eppstein_matching(G, top_nodes=None): recurse(v) -def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, - targets): +def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets): """Returns True if and only if the vertex `v` is connected to one of the target vertices by an alternating path in `G`. @@ -322,6 +326,7 @@ def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, `targets` is a set of vertices. """ + def _alternating_dfs(u, along_matched=True): """Returns True if and only if `u` is connected to one of the targets by an alternating path. @@ -344,8 +349,7 @@ def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, try: child = next(children) if child not in visited: - if ((parent, child) in valid_edges - or (child, parent) in valid_edges): + if (parent, child) in valid_edges or (child, parent) in valid_edges: if child in targets: return True visited.add(child) @@ -357,8 +361,9 @@ def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, # Check for alternating paths starting with edges in the matching, then # check for alternating paths starting with edges not in the # matching. - return (_alternating_dfs(v, along_matched=True) or - _alternating_dfs(v, along_matched=False)) + return _alternating_dfs(v, along_matched=True) or _alternating_dfs( + v, along_matched=False + ) def _connected_by_alternating_paths(G, matching, targets): @@ -385,12 +390,18 @@ def _connected_by_alternating_paths(G, matching, targets): # require nodes to be orderable. edge_sets = {frozenset((u, v)) for u, v in matching.items()} matched_edges = {tuple(edge) for edge in edge_sets} - unmatched_edges = {(u, v) for (u, v) in G.edges() - if frozenset((u, v)) not in edge_sets} + unmatched_edges = { + (u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets + } - return {v for v in G if v in targets or - _is_connected_by_alternating_path(G, v, matched_edges, - unmatched_edges, targets)} + return { + v + for v in G + if v in targets + or _is_connected_by_alternating_path( + G, v, matched_edges, unmatched_edges, targets + ) + } def to_vertex_cover(G, matching, top_nodes=None): @@ -474,7 +485,7 @@ def to_vertex_cover(G, matching, top_nodes=None): maximum_matching = hopcroft_karp_matching -def minimum_weight_full_matching(G, top_nodes=None, weight='weight'): +def minimum_weight_full_matching(G, top_nodes=None, weight="weight"): r"""Returns the minimum weight full matching of the bipartite graph `G`. Let :math:`G = ((U, V), E)` be a complete weighted bipartite graph with @@ -542,17 +553,18 @@ def minimum_weight_full_matching(G, top_nodes=None, weight='weight'): import numpy as np import scipy.optimize except ImportError as e: - raise ImportError('minimum_weight_full_matching requires SciPy: ' + - 'https://scipy.org/') from e + raise ImportError( + "minimum_weight_full_matching requires SciPy: " + "https://scipy.org/" + ) from e left, right = nx.bipartite.sets(G, top_nodes) U = list(left) V = list(right) # We explicitly create the biadjancency matrix having infinities # where edges are missing (as opposed to zeros, which is what one would # get by using toarray on the sparse matrix). - weights_sparse = biadjacency_matrix(G, row_order=U, - column_order=V, weight=weight, - format='coo') + weights_sparse = biadjacency_matrix( + G, row_order=U, column_order=V, weight=weight, format="coo" + ) weights = np.full(weights_sparse.shape, np.inf) weights[weights_sparse.row, weights_sparse.col] = weights_sparse.data left_matches = scipy.optimize.linear_sum_assignment(weights) diff --git a/networkx/algorithms/bipartite/matrix.py b/networkx/algorithms/bipartite/matrix.py index 05b39f31..b99410e3 100644 --- a/networkx/algorithms/bipartite/matrix.py +++ b/networkx/algorithms/bipartite/matrix.py @@ -7,11 +7,12 @@ import itertools from networkx.convert_matrix import _generate_weighted_edges import networkx as nx -__all__ = ['biadjacency_matrix', 'from_biadjacency_matrix'] +__all__ = ["biadjacency_matrix", "from_biadjacency_matrix"] -def biadjacency_matrix(G, row_order, column_order=None, - dtype=None, weight='weight', format='csr'): +def biadjacency_matrix( + G, row_order, column_order=None, dtype=None, weight="weight", format="csr" +): r"""Returns the biadjacency matrix of the bipartite graph G. Let `G = (U, V, E)` be a bipartite graph with node sets @@ -73,6 +74,7 @@ def biadjacency_matrix(G, row_order, column_order=None, https://docs.scipy.org/doc/scipy/reference/sparse.html """ from scipy import sparse + nlen = len(row_order) if nlen == 0: raise nx.NetworkXError("row_order is empty list") @@ -92,11 +94,14 @@ def biadjacency_matrix(G, row_order, column_order=None, if G.number_of_edges() == 0: row, col, data = [], [], [] else: - row, col, data = zip(*((row_index[u], col_index[v], d.get(weight, 1)) - for u, v, d in G.edges(row_order, data=True) - if u in row_index and v in col_index)) - M = sparse.coo_matrix((data, (row, col)), - shape=(nlen, mlen), dtype=dtype) + row, col, data = zip( + *( + (row_index[u], col_index[v], d.get(weight, 1)) + for u, v, d in G.edges(row_order, data=True) + if u in row_index and v in col_index + ) + ) + M = sparse.coo_matrix((data, (row, col)), shape=(nlen, mlen), dtype=dtype) try: return M.asformat(format) # From Scipy 1.1.0, asformat will throw a ValueError instead of an @@ -105,7 +110,7 @@ def biadjacency_matrix(G, row_order, column_order=None, raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from e -def from_biadjacency_matrix(A, create_using=None, edge_attribute='weight'): +def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"): r"""Creates a new bipartite graph from a biadjacency matrix given as a SciPy sparse matrix. @@ -154,7 +159,7 @@ def from_biadjacency_matrix(A, create_using=None, edge_attribute='weight'): # entry in the adjacency matrix. Otherwise, create one edge for each # positive entry in the adjacency matrix and set the weight of that edge to # be the entry in the matrix. - if A.dtype.kind in ('i', 'u') and G.is_multigraph(): + if A.dtype.kind in ("i", "u") and G.is_multigraph(): chain = itertools.chain.from_iterable triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples) G.add_weighted_edges_from(triples, weight=edge_attribute) diff --git a/networkx/algorithms/bipartite/projection.py b/networkx/algorithms/bipartite/projection.py index 29e24254..a5ec8bb4 100644 --- a/networkx/algorithms/bipartite/projection.py +++ b/networkx/algorithms/bipartite/projection.py @@ -2,12 +2,14 @@ import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['project', - 'projected_graph', - 'weighted_projected_graph', - 'collaboration_weighted_projected_graph', - 'overlap_weighted_projected_graph', - 'generic_weighted_projected_graph'] +__all__ = [ + "project", + "projected_graph", + "weighted_projected_graph", + "collaboration_weighted_projected_graph", + "overlap_weighted_projected_graph", + "generic_weighted_projected_graph", +] def projected_graph(B, nodes, multigraph=False): @@ -113,7 +115,7 @@ def projected_graph(B, nodes, multigraph=False): return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def weighted_projected_graph(B, nodes, ratio=False): r"""Returns a weighted projection of B onto one of its node sets. @@ -202,7 +204,7 @@ def weighted_projected_graph(B, nodes, ratio=False): return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def collaboration_weighted_projected_graph(B, nodes): r"""Newman's weighted projection of B onto one of its node sets. @@ -295,7 +297,7 @@ def collaboration_weighted_projected_graph(B, nodes): return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def overlap_weighted_projected_graph(B, nodes, jaccard=True): r"""Overlap weighted projection of B onto one of its node sets. @@ -394,7 +396,7 @@ def overlap_weighted_projected_graph(B, nodes, jaccard=True): return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def generic_weighted_projected_graph(B, nodes, weight_function=None): r"""Weighted projection of B with a user-specified weight function. @@ -490,9 +492,11 @@ def generic_weighted_projected_graph(B, nodes, weight_function=None): pred = B.adj G = nx.Graph() if weight_function is None: + def weight_function(G, u, v): # Notice that we use set(pred[v]) for handling the directed case. return len(set(G[u]) & set(pred[v])) + G.graph.update(B.graph) G.add_nodes_from((n, B.nodes[n]) for n in nodes) for u in nodes: diff --git a/networkx/algorithms/bipartite/redundancy.py b/networkx/algorithms/bipartite/redundancy.py index e0a519ad..5ea7db43 100644 --- a/networkx/algorithms/bipartite/redundancy.py +++ b/networkx/algorithms/bipartite/redundancy.py @@ -3,7 +3,7 @@ from itertools import combinations from networkx import NetworkXError -__all__ = ['node_redundancy'] +__all__ = ["node_redundancy"] def node_redundancy(G, nodes=None): @@ -86,8 +86,10 @@ def node_redundancy(G, nodes=None): if nodes is None: nodes = G if any(len(G[v]) < 2 for v in nodes): - raise NetworkXError('Cannot compute redundancy coefficient for a node' - ' that has fewer than two neighbors.') + raise NetworkXError( + "Cannot compute redundancy coefficient for a node" + " that has fewer than two neighbors." + ) # TODO This can be trivially parallelized. return {v: _node_redundancy(G, v) for v in nodes} @@ -106,6 +108,7 @@ def _node_redundancy(G, v): n = len(G[v]) # TODO On Python 3, we could just use `G[u].keys() & G[w].keys()` instead # of instantiating the entire sets. - overlap = sum(1 for (u, w) in combinations(G[v], 2) - if (set(G[u]) & set(G[w])) - {v}) + overlap = sum( + 1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v} + ) return (2 * overlap) / (n * (n - 1)) diff --git a/networkx/algorithms/bipartite/spectral.py b/networkx/algorithms/bipartite/spectral.py index 40494e52..f9c66982 100644 --- a/networkx/algorithms/bipartite/spectral.py +++ b/networkx/algorithms/bipartite/spectral.py @@ -3,10 +3,10 @@ Spectral bipartivity measure. """ import networkx as nx -__all__ = ['spectral_bipartivity'] +__all__ = ["spectral_bipartivity"] -def spectral_bipartivity(G, nodes=None, weight='weight'): +def spectral_bipartivity(G, nodes=None, weight="weight"): """Returns the spectral bipartivity. Parameters @@ -50,8 +50,9 @@ def spectral_bipartivity(G, nodes=None, weight='weight'): try: import scipy.linalg except ImportError as e: - raise ImportError('spectral_bipartivity() requires SciPy: ', - 'http://scipy.org/') from e + raise ImportError( + "spectral_bipartivity() requires SciPy: ", "http://scipy.org/" + ) from e nodelist = list(G) # ordering of nodes in matrix A = nx.to_numpy_matrix(G, nodelist, weight=weight) expA = scipy.linalg.expm(A) diff --git a/networkx/algorithms/bipartite/tests/test_basic.py b/networkx/algorithms/bipartite/tests/test_basic.py index a82e77a0..1df1f070 100644 --- a/networkx/algorithms/bipartite/tests/test_basic.py +++ b/networkx/algorithms/bipartite/tests/test_basic.py @@ -5,7 +5,6 @@ from networkx.algorithms import bipartite class TestBipartiteBasic: - def test_is_bipartite(self): assert bipartite.is_bipartite(nx.path_graph(4)) assert bipartite.is_bipartite(nx.DiGraph([(1, 0)])) @@ -83,40 +82,40 @@ class TestBipartiteBasic: G.add_edge(0, 1, weight=0.1, other=0.2) X = {1, 3} Y = {0, 2, 4} - u, d = bipartite.degrees(G, Y, weight='weight') + u, d = bipartite.degrees(G, Y, weight="weight") assert dict(u) == {1: 1.1, 3: 2} assert dict(d) == {0: 0.1, 2: 2, 4: 1} - u, d = bipartite.degrees(G, Y, weight='other') + u, d = bipartite.degrees(G, Y, weight="other") assert dict(u) == {1: 1.2, 3: 2} assert dict(d) == {0: 0.2, 2: 2, 4: 1} def test_biadjacency_matrix_weight(self): - scipy = pytest.importorskip('scipy') + scipy = pytest.importorskip("scipy") G = nx.path_graph(5) G.add_edge(0, 1, weight=2, other=4) X = [1, 3] Y = [0, 2, 4] - M = bipartite.biadjacency_matrix(G, X, weight='weight') + M = bipartite.biadjacency_matrix(G, X, weight="weight") assert M[0, 0] == 2 - M = bipartite.biadjacency_matrix(G, X, weight='other') + M = bipartite.biadjacency_matrix(G, X, weight="other") assert M[0, 0] == 4 def test_biadjacency_matrix(self): - scipy = pytest.importorskip('scipy') + scipy = pytest.importorskip("scipy") tops = [2, 5, 10] bots = [5, 10, 15] for i in range(len(tops)): G = bipartite.random_graph(tops[i], bots[i], 0.2) - top = [n for n, d in G.nodes(data=True) if d['bipartite'] == 0] + top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0] M = bipartite.biadjacency_matrix(G, top) assert M.shape[0] == tops[i] assert M.shape[1] == bots[i] def test_biadjacency_matrix_order(self): - scipy = pytest.importorskip('scipy') + scipy = pytest.importorskip("scipy") G = nx.path_graph(5) G.add_edge(0, 1, weight=2) X = [3, 1] Y = [4, 2, 0] - M = bipartite.biadjacency_matrix(G, X, Y, weight='weight') + M = bipartite.biadjacency_matrix(G, X, Y, weight="weight") assert M[1, 2] == 2 diff --git a/networkx/algorithms/bipartite/tests/test_centrality.py b/networkx/algorithms/bipartite/tests/test_centrality.py index 4dc92871..48a5abde 100644 --- a/networkx/algorithms/bipartite/tests/test_centrality.py +++ b/networkx/algorithms/bipartite/tests/test_centrality.py @@ -4,19 +4,19 @@ from networkx.testing import almost_equal class TestBipartiteCentrality: - @classmethod def setup_class(cls): cls.P4 = nx.path_graph(4) cls.K3 = nx.complete_bipartite_graph(3, 3) cls.C4 = nx.cycle_graph(4) cls.davis = nx.davis_southern_women_graph() - cls.top_nodes = [n for n, d in cls.davis.nodes(data=True) - if d['bipartite'] == 0] + cls.top_nodes = [ + n for n, d in cls.davis.nodes(data=True) if d["bipartite"] == 0 + ] def test_degree_centrality(self): d = bipartite.degree_centrality(self.P4, [1, 3]) - answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5} + answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5} assert d == answer d = bipartite.degree_centrality(self.K3, [0, 1, 2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} @@ -57,113 +57,119 @@ class TestBipartiteCentrality: def test_davis_degree_centrality(self): G = self.davis deg = bipartite.degree_centrality(G, self.top_nodes) - answer = {'E8': 0.78, - 'E9': 0.67, - 'E7': 0.56, - 'Nora Fayette': 0.57, - 'Evelyn Jefferson': 0.57, - 'Theresa Anderson': 0.57, - 'E6': 0.44, - 'Sylvia Avondale': 0.50, - 'Laura Mandeville': 0.50, - 'Brenda Rogers': 0.50, - 'Katherina Rogers': 0.43, - 'E5': 0.44, - 'Helen Lloyd': 0.36, - 'E3': 0.33, - 'Ruth DeSand': 0.29, - 'Verne Sanderson': 0.29, - 'E12': 0.33, - 'Myra Liddel': 0.29, - 'E11': 0.22, - 'Eleanor Nye': 0.29, - 'Frances Anderson': 0.29, - 'Pearl Oglethorpe': 0.21, - 'E4': 0.22, - 'Charlotte McDowd': 0.29, - 'E10': 0.28, - 'Olivia Carleton': 0.14, - 'Flora Price': 0.14, - 'E2': 0.17, - 'E1': 0.17, - 'Dorothy Murchison': 0.14, - 'E13': 0.17, - 'E14': 0.17} + answer = { + "E8": 0.78, + "E9": 0.67, + "E7": 0.56, + "Nora Fayette": 0.57, + "Evelyn Jefferson": 0.57, + "Theresa Anderson": 0.57, + "E6": 0.44, + "Sylvia Avondale": 0.50, + "Laura Mandeville": 0.50, + "Brenda Rogers": 0.50, + "Katherina Rogers": 0.43, + "E5": 0.44, + "Helen Lloyd": 0.36, + "E3": 0.33, + "Ruth DeSand": 0.29, + "Verne Sanderson": 0.29, + "E12": 0.33, + "Myra Liddel": 0.29, + "E11": 0.22, + "Eleanor Nye": 0.29, + "Frances Anderson": 0.29, + "Pearl Oglethorpe": 0.21, + "E4": 0.22, + "Charlotte McDowd": 0.29, + "E10": 0.28, + "Olivia Carleton": 0.14, + "Flora Price": 0.14, + "E2": 0.17, + "E1": 0.17, + "Dorothy Murchison": 0.14, + "E13": 0.17, + "E14": 0.17, + } for node, value in answer.items(): assert almost_equal(value, deg[node], places=2) def test_davis_betweenness_centrality(self): G = self.davis bet = bipartite.betweenness_centrality(G, self.top_nodes) - answer = {'E8': 0.24, - 'E9': 0.23, - 'E7': 0.13, - 'Nora Fayette': 0.11, - 'Evelyn Jefferson': 0.10, - 'Theresa Anderson': 0.09, - 'E6': 0.07, - 'Sylvia Avondale': 0.07, - 'Laura Mandeville': 0.05, - 'Brenda Rogers': 0.05, - 'Katherina Rogers': 0.05, - 'E5': 0.04, - 'Helen Lloyd': 0.04, - 'E3': 0.02, - 'Ruth DeSand': 0.02, - 'Verne Sanderson': 0.02, - 'E12': 0.02, - 'Myra Liddel': 0.02, - 'E11': 0.02, - 'Eleanor Nye': 0.01, - 'Frances Anderson': 0.01, - 'Pearl Oglethorpe': 0.01, - 'E4': 0.01, - 'Charlotte McDowd': 0.01, - 'E10': 0.01, - 'Olivia Carleton': 0.01, - 'Flora Price': 0.01, - 'E2': 0.00, - 'E1': 0.00, - 'Dorothy Murchison': 0.00, - 'E13': 0.00, - 'E14': 0.00} + answer = { + "E8": 0.24, + "E9": 0.23, + "E7": 0.13, + "Nora Fayette": 0.11, + "Evelyn Jefferson": 0.10, + "Theresa Anderson": 0.09, + "E6": 0.07, + "Sylvia Avondale": 0.07, + "Laura Mandeville": 0.05, + "Brenda Rogers": 0.05, + "Katherina Rogers": 0.05, + "E5": 0.04, + "Helen Lloyd": 0.04, + "E3": 0.02, + "Ruth DeSand": 0.02, + "Verne Sanderson": 0.02, + "E12": 0.02, + "Myra Liddel": 0.02, + "E11": 0.02, + "Eleanor Nye": 0.01, + "Frances Anderson": 0.01, + "Pearl Oglethorpe": 0.01, + "E4": 0.01, + "Charlotte McDowd": 0.01, + "E10": 0.01, + "Olivia Carleton": 0.01, + "Flora Price": 0.01, + "E2": 0.00, + "E1": 0.00, + "Dorothy Murchison": 0.00, + "E13": 0.00, + "E14": 0.00, + } for node, value in answer.items(): assert almost_equal(value, bet[node], places=2) def test_davis_closeness_centrality(self): G = self.davis clos = bipartite.closeness_centrality(G, self.top_nodes) - answer = {'E8': 0.85, - 'E9': 0.79, - 'E7': 0.73, - 'Nora Fayette': 0.80, - 'Evelyn Jefferson': 0.80, - 'Theresa Anderson': 0.80, - 'E6': 0.69, - 'Sylvia Avondale': 0.77, - 'Laura Mandeville': 0.73, - 'Brenda Rogers': 0.73, - 'Katherina Rogers': 0.73, - 'E5': 0.59, - 'Helen Lloyd': 0.73, - 'E3': 0.56, - 'Ruth DeSand': 0.71, - 'Verne Sanderson': 0.71, - 'E12': 0.56, - 'Myra Liddel': 0.69, - 'E11': 0.54, - 'Eleanor Nye': 0.67, - 'Frances Anderson': 0.67, - 'Pearl Oglethorpe': 0.67, - 'E4': 0.54, - 'Charlotte McDowd': 0.60, - 'E10': 0.55, - 'Olivia Carleton': 0.59, - 'Flora Price': 0.59, - 'E2': 0.52, - 'E1': 0.52, - 'Dorothy Murchison': 0.65, - 'E13': 0.52, - 'E14': 0.52} + answer = { + "E8": 0.85, + "E9": 0.79, + "E7": 0.73, + "Nora Fayette": 0.80, + "Evelyn Jefferson": 0.80, + "Theresa Anderson": 0.80, + "E6": 0.69, + "Sylvia Avondale": 0.77, + "Laura Mandeville": 0.73, + "Brenda Rogers": 0.73, + "Katherina Rogers": 0.73, + "E5": 0.59, + "Helen Lloyd": 0.73, + "E3": 0.56, + "Ruth DeSand": 0.71, + "Verne Sanderson": 0.71, + "E12": 0.56, + "Myra Liddel": 0.69, + "E11": 0.54, + "Eleanor Nye": 0.67, + "Frances Anderson": 0.67, + "Pearl Oglethorpe": 0.67, + "E4": 0.54, + "Charlotte McDowd": 0.60, + "E10": 0.55, + "Olivia Carleton": 0.59, + "Flora Price": 0.59, + "E2": 0.52, + "E1": 0.52, + "Dorothy Murchison": 0.65, + "E13": 0.52, + "E14": 0.52, + } for node, value in answer.items(): assert almost_equal(value, clos[node], places=2) diff --git a/networkx/algorithms/bipartite/tests/test_cluster.py b/networkx/algorithms/bipartite/tests/test_cluster.py index 7f4cb01a..84403bc3 100644 --- a/networkx/algorithms/bipartite/tests/test_cluster.py +++ b/networkx/algorithms/bipartite/tests/test_cluster.py @@ -10,24 +10,28 @@ def test_pairwise_bipartite_cc_functions(): # Latapy et al (2008) G1 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7)]) G2 = nx.Graph([(0, 2), (0, 3), (0, 4), (1, 3), (1, 4), (1, 5)]) - G3 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)]) - result = {0: [1 / 3.0, 2 / 3.0, 2 / 5.0], - 1: [1 / 2.0, 2 / 3.0, 2 / 3.0], - 2: [2 / 8.0, 2 / 5.0, 2 / 5.0]} + G3 = nx.Graph( + [(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)] + ) + result = { + 0: [1 / 3.0, 2 / 3.0, 2 / 5.0], + 1: [1 / 2.0, 2 / 3.0, 2 / 3.0], + 2: [2 / 8.0, 2 / 5.0, 2 / 5.0], + } for i, G in enumerate([G1, G2, G3]): - assert(bipartite.is_bipartite(G)) - assert(cc_dot(set(G[0]), set(G[1])) == result[i][0]) - assert(cc_min(set(G[0]), set(G[1])) == result[i][1]) - assert(cc_max(set(G[0]), set(G[1])) == result[i][2]) + assert bipartite.is_bipartite(G) + assert cc_dot(set(G[0]), set(G[1])) == result[i][0] + assert cc_min(set(G[0]), set(G[1])) == result[i][1] + assert cc_max(set(G[0]), set(G[1])) == result[i][2] def test_star_graph(): G = nx.star_graph(3) # all modes are the same answer = {0: 0, 1: 1, 2: 1, 3: 1} - assert bipartite.clustering(G, mode='dot') == answer - assert bipartite.clustering(G, mode='min') == answer - assert bipartite.clustering(G, mode='max') == answer + assert bipartite.clustering(G, mode="dot") == answer + assert bipartite.clustering(G, mode="min") == answer + assert bipartite.clustering(G, mode="max") == answer def test_not_bipartite(): @@ -37,23 +41,23 @@ def test_not_bipartite(): def test_bad_mode(): with pytest.raises(nx.NetworkXError): - bipartite.clustering(nx.path_graph(4), mode='foo') + bipartite.clustering(nx.path_graph(4), mode="foo") def test_path_graph(): G = nx.path_graph(4) answer = {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5} - assert bipartite.clustering(G, mode='dot') == answer - assert bipartite.clustering(G, mode='max') == answer + assert bipartite.clustering(G, mode="dot") == answer + assert bipartite.clustering(G, mode="max") == answer answer = {0: 1, 1: 1, 2: 1, 3: 1} - assert bipartite.clustering(G, mode='min') == answer + assert bipartite.clustering(G, mode="min") == answer def test_average_path_graph(): G = nx.path_graph(4) - assert bipartite.average_clustering(G, mode='dot') == 0.5 - assert bipartite.average_clustering(G, mode='max') == 0.5 - assert bipartite.average_clustering(G, mode='min') == 1 + assert bipartite.average_clustering(G, mode="dot") == 0.5 + assert bipartite.average_clustering(G, mode="max") == 0.5 + assert bipartite.average_clustering(G, mode="min") == 1 def test_ra_clustering_davis(): diff --git a/networkx/algorithms/bipartite/tests/test_covering.py b/networkx/algorithms/bipartite/tests/test_covering.py index 1f80cfdb..2f1b02e3 100644 --- a/networkx/algorithms/bipartite/tests/test_covering.py +++ b/networkx/algorithms/bipartite/tests/test_covering.py @@ -12,15 +12,13 @@ class TestMinEdgeCover: def test_graph_single_edge(self): G = nx.Graph() G.add_edge(0, 1) - assert (bipartite.min_edge_cover(G) == - {(0, 1), (1, 0)}) + assert bipartite.min_edge_cover(G) == {(0, 1), (1, 0)} def test_bipartite_default(self): G = nx.Graph() G.add_nodes_from([1, 2, 3, 4], bipartite=0) - G.add_nodes_from(['a', 'b', 'c'], bipartite=1) - G.add_edges_from([(1, 'a'), (1, 'b'), (2, 'b'), - (2, 'c'), (3, 'c'), (4, 'a')]) + G.add_nodes_from(["a", "b", "c"], bipartite=1) + G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) min_cover = bipartite.min_edge_cover(G) assert nx.is_edge_cover(G, min_cover) assert len(min_cover) == 8 @@ -28,10 +26,8 @@ class TestMinEdgeCover: def test_bipartite_explicit(self): G = nx.Graph() G.add_nodes_from([1, 2, 3, 4], bipartite=0) - G.add_nodes_from(['a', 'b', 'c'], bipartite=1) - G.add_edges_from([(1, 'a'), (1, 'b'), (2, 'b'), - (2, 'c'), (3, 'c'), (4, 'a')]) - min_cover = bipartite.min_edge_cover(G, - bipartite.eppstein_matching) + G.add_nodes_from(["a", "b", "c"], bipartite=1) + G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) + min_cover = bipartite.min_edge_cover(G, bipartite.eppstein_matching) assert nx.is_edge_cover(G, min_cover) assert len(min_cover) == 8 diff --git a/networkx/algorithms/bipartite/tests/test_edgelist.py b/networkx/algorithms/bipartite/tests/test_edgelist.py index 86bfb4af..4df378a3 100644 --- a/networkx/algorithms/bipartite/tests/test_edgelist.py +++ b/networkx/algorithms/bipartite/tests/test_edgelist.py @@ -7,21 +7,19 @@ import tempfile import os import networkx as nx -from networkx.testing import (assert_edges_equal, assert_nodes_equal, - assert_graphs_equal) +from networkx.testing import assert_edges_equal, assert_nodes_equal, assert_graphs_equal from networkx.algorithms import bipartite class TestEdgelist: - @classmethod def setup_class(cls): cls.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] cls.G.add_edges_from(e) - cls.G.add_nodes_from(['a', 'c', 'e'], bipartite=0) - cls.G.add_nodes_from(['b', 'd', 'f'], bipartite=1) - cls.G.add_node('g', bipartite=0) + cls.G.add_nodes_from(["a", "c", "e"], bipartite=0) + cls.G.add_nodes_from(["b", "d", "f"], bipartite=1) + cls.G.add_node("g", bipartite=0) cls.DG = nx.DiGraph(cls.G) cls.MG = nx.MultiGraph() cls.MG.add_edges_from([(1, 2), (1, 2), (1, 2)]) @@ -52,8 +50,9 @@ class TestEdgelist: bytesIO = io.BytesIO(s) G = bipartite.read_edgelist(bytesIO, nodetype=int, data=True) - assert_edges_equal(G.edges(data=True), - [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})]) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) def test_write_edgelist_1(self): fh = io.BytesIO() @@ -97,7 +96,7 @@ class TestEdgelist: G.add_node(1, bipartite=0) G.add_node(2, bipartite=1) G.add_node(3, bipartite=0) - bipartite.write_edgelist(G, fh, data=[('weight')]) + bipartite.write_edgelist(G, fh, data=[("weight")]) fh.seek(0) assert fh.read() == b"1 2 2.0\n3 2 3.0\n" @@ -105,9 +104,9 @@ class TestEdgelist: G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) - G.add_node('Radiohead', bipartite=1) + G.add_node("Radiohead", bipartite=1) fd, fname = tempfile.mkstemp() bipartite.write_edgelist(G, fname) H = bipartite.read_edgelist(fname) @@ -119,26 +118,26 @@ class TestEdgelist: G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) - G.add_node('Radiohead', bipartite=1) + G.add_node("Radiohead", bipartite=1) fd, fname = tempfile.mkstemp() - pytest.raises(UnicodeEncodeError, - bipartite.write_edgelist, - G, fname, encoding='latin-1') + pytest.raises( + UnicodeEncodeError, bipartite.write_edgelist, G, fname, encoding="latin-1" + ) os.close(fd) os.unlink(fname) def test_latin1(self): G = nx.Graph() - name1 = 'Bj' + chr(246) + 'rk' - name2 = chr(220) + 'ber' - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = "Bj" + chr(246) + "rk" + name2 = chr(220) + "ber" + G.add_edge(name1, "Radiohead", **{name2: 3}) G.add_node(name1, bipartite=0) - G.add_node('Radiohead', bipartite=1) + G.add_node("Radiohead", bipartite=1) fd, fname = tempfile.mkstemp() - bipartite.write_edgelist(G, fname, encoding='latin-1') - H = bipartite.read_edgelist(fname, encoding='latin-1') + bipartite.write_edgelist(G, fname, encoding="latin-1") + H = bipartite.read_edgelist(fname, encoding="latin-1") assert_graphs_equal(G, H) os.close(fd) os.unlink(fname) @@ -150,7 +149,7 @@ class TestEdgelist: H = bipartite.read_edgelist(fname) H2 = bipartite.read_edgelist(fname) assert H != H2 # they should be different graphs - G.remove_node('g') # isolated nodes are not written in edgelist + G.remove_node("g") # isolated nodes are not written in edgelist assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) diff --git a/networkx/algorithms/bipartite/tests/test_generators.py b/networkx/algorithms/bipartite/tests/test_generators.py index 6d50749c..f6ffcf46 100644 --- a/networkx/algorithms/bipartite/tests/test_generators.py +++ b/networkx/algorithms/bipartite/tests/test_generators.py @@ -8,7 +8,7 @@ from ..generators import ( havel_hakimi_graph, preferential_attachment_graph, random_graph, - reverse_havel_hakimi_graph + reverse_havel_hakimi_graph, ) """ @@ -17,7 +17,7 @@ Generators - Bipartite """ -class TestGeneratorsBipartite(): +class TestGeneratorsBipartite: def test_complete_bipartite_graph(self): G = complete_bipartite_graph(0, 0) assert nx.is_isomorphic(G, nx.null_graph()) @@ -44,12 +44,19 @@ class TestGeneratorsBipartite(): assert nx.number_of_nodes(G) == m1 + m2 assert nx.number_of_edges(G) == m1 * m2 - pytest.raises(nx.NetworkXError, complete_bipartite_graph, - 7, 3, create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, complete_bipartite_graph, - 7, 3, create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, complete_bipartite_graph, - 7, 3, create_using=nx.MultiDiGraph) + pytest.raises( + nx.NetworkXError, complete_bipartite_graph, 7, 3, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, complete_bipartite_graph, 7, 3, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, + complete_bipartite_graph, + 7, + 3, + create_using=nx.MultiDiGraph, + ) mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph) assert mG.is_multigraph() @@ -65,9 +72,13 @@ class TestGeneratorsBipartite(): assert not mG.is_directed() # specify nodes rather than number of nodes - G = complete_bipartite_graph([1, 2], ['a', 'b']) - has_edges = G.has_edge(1, 'a') & G.has_edge(1, 'b') &\ - G.has_edge(2, 'a') & G.has_edge(2, 'b') + G = complete_bipartite_graph([1, 2], ["a", "b"]) + has_edges = ( + G.has_edge(1, "a") + & G.has_edge(1, "b") + & G.has_edge(2, "a") + & G.has_edge(2, "b") + ) assert has_edges assert G.size() == 4 @@ -85,28 +96,24 @@ class TestGeneratorsBipartite(): aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2] - pytest.raises(nx.NetworkXError, - configuration_model, aseq, bseq) + pytest.raises(nx.NetworkXError, configuration_model, aseq, bseq) aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2, 2] G = configuration_model(aseq, bseq) - assert (sorted(d for n, d in G.degree()) == - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 2, 2, 2] bseq = [3, 3, 3, 3] G = configuration_model(aseq, bseq) - assert (sorted(d for n, d in G.degree()) == - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 1, 1, 1] bseq = [3, 3, 3] G = configuration_model(aseq, bseq) assert G.is_multigraph() assert not G.is_directed() - assert (sorted(d for n, d in G.degree()) == - [1, 1, 1, 2, 2, 2, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3] GU = nx.project(nx.Graph(G), range(len(aseq))) assert GU.number_of_nodes() == 6 @@ -118,15 +125,19 @@ class TestGeneratorsBipartite(): assert not G.is_multigraph() assert not G.is_directed() - pytest.raises(nx.NetworkXError, - configuration_model, aseq, bseq, - create_using=nx.DiGraph()) - pytest.raises(nx.NetworkXError, - configuration_model, aseq, bseq, - create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, - configuration_model, aseq, bseq, - create_using=nx.MultiDiGraph) + pytest.raises( + nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph() + ) + pytest.raises( + nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, + configuration_model, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) def test_havel_hakimi_graph(self): aseq = [] @@ -142,21 +153,18 @@ class TestGeneratorsBipartite(): aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2] - pytest.raises(nx.NetworkXError, - havel_hakimi_graph, aseq, bseq) + pytest.raises(nx.NetworkXError, havel_hakimi_graph, aseq, bseq) bseq = [2, 2, 2, 2, 2, 2] G = havel_hakimi_graph(aseq, bseq) - assert (sorted(d for n, d in G.degree()) == - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 2, 2, 2] bseq = [3, 3, 3, 3] G = havel_hakimi_graph(aseq, bseq) assert G.is_multigraph() assert not G.is_directed() - assert (sorted(d for n, d in G.degree()) == - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] GU = nx.project(nx.Graph(G), range(len(aseq))) assert GU.number_of_nodes() == 6 @@ -168,15 +176,19 @@ class TestGeneratorsBipartite(): assert not G.is_multigraph() assert not G.is_directed() - pytest.raises(nx.NetworkXError, - havel_hakimi_graph, aseq, bseq, - create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, - havel_hakimi_graph, aseq, bseq, - create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, - havel_hakimi_graph, aseq, bseq, - create_using=nx.MultiDiGraph) + pytest.raises( + nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, + havel_hakimi_graph, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) def test_reverse_havel_hakimi_graph(self): aseq = [] @@ -192,27 +204,23 @@ class TestGeneratorsBipartite(): aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2] - pytest.raises(nx.NetworkXError, - reverse_havel_hakimi_graph, aseq, bseq) + pytest.raises(nx.NetworkXError, reverse_havel_hakimi_graph, aseq, bseq) bseq = [2, 2, 2, 2, 2, 2] G = reverse_havel_hakimi_graph(aseq, bseq) - assert (sorted(d for n, d in G.degree()) == - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 2, 2, 2] bseq = [3, 3, 3, 3] G = reverse_havel_hakimi_graph(aseq, bseq) - assert (sorted(d for n, d in G.degree()) == - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 1, 1, 1] bseq = [3, 3, 3] G = reverse_havel_hakimi_graph(aseq, bseq) assert G.is_multigraph() assert not G.is_directed() - assert (sorted(d for n, d in G.degree()) == - [1, 1, 1, 2, 2, 2, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3] GU = nx.project(nx.Graph(G), range(len(aseq))) assert GU.number_of_nodes() == 6 @@ -224,15 +232,27 @@ class TestGeneratorsBipartite(): assert not G.is_multigraph() assert not G.is_directed() - pytest.raises(nx.NetworkXError, - reverse_havel_hakimi_graph, aseq, bseq, - create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, - reverse_havel_hakimi_graph, aseq, bseq, - create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, - reverse_havel_hakimi_graph, aseq, bseq, - create_using=nx.MultiDiGraph) + pytest.raises( + nx.NetworkXError, + reverse_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + reverse_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + reverse_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) def test_alternating_havel_hakimi_graph(self): aseq = [] @@ -248,27 +268,23 @@ class TestGeneratorsBipartite(): aseq = [3, 3, 3, 3] bseq = [2, 2, 2, 2, 2] - pytest.raises(nx.NetworkXError, - alternating_havel_hakimi_graph, aseq, bseq) + pytest.raises(nx.NetworkXError, alternating_havel_hakimi_graph, aseq, bseq) bseq = [2, 2, 2, 2, 2, 2] G = alternating_havel_hakimi_graph(aseq, bseq) - assert (sorted(d for n, d in G.degree()) == - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 2, 2, 2] bseq = [3, 3, 3, 3] G = alternating_havel_hakimi_graph(aseq, bseq) - assert (sorted(d for n, d in G.degree()) == - [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] aseq = [2, 2, 2, 1, 1, 1] bseq = [3, 3, 3] G = alternating_havel_hakimi_graph(aseq, bseq) assert G.is_multigraph() assert not G.is_directed() - assert (sorted(d for n, d in G.degree()) == - [1, 1, 1, 2, 2, 2, 3, 3, 3]) + assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3] GU = nx.project(nx.Graph(G), range(len(aseq))) assert GU.number_of_nodes() == 6 @@ -280,15 +296,27 @@ class TestGeneratorsBipartite(): assert not G.is_multigraph() assert not G.is_directed() - pytest.raises(nx.NetworkXError, - alternating_havel_hakimi_graph, aseq, bseq, - create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, - alternating_havel_hakimi_graph, aseq, bseq, - create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, - alternating_havel_hakimi_graph, aseq, bseq, - create_using=nx.MultiDiGraph) + pytest.raises( + nx.NetworkXError, + alternating_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + alternating_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + alternating_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) def test_preferential_attachment(self): aseq = [3, 2, 1, 1] @@ -300,15 +328,27 @@ class TestGeneratorsBipartite(): assert not G.is_multigraph() assert not G.is_directed() - pytest.raises(nx.NetworkXError, - preferential_attachment_graph, aseq, 0.5, - create_using=nx.DiGraph()) - pytest.raises(nx.NetworkXError, - preferential_attachment_graph, aseq, 0.5, - create_using=nx.DiGraph()) - pytest.raises(nx.NetworkXError, - preferential_attachment_graph, aseq, 0.5, - create_using=nx.DiGraph()) + pytest.raises( + nx.NetworkXError, + preferential_attachment_graph, + aseq, + 0.5, + create_using=nx.DiGraph(), + ) + pytest.raises( + nx.NetworkXError, + preferential_attachment_graph, + aseq, + 0.5, + create_using=nx.DiGraph(), + ) + pytest.raises( + nx.NetworkXError, + preferential_attachment_graph, + aseq, + 0.5, + create_using=nx.DiGraph(), + ) def test_random_graph(self): n = 10 diff --git a/networkx/algorithms/bipartite/tests/test_matching.py b/networkx/algorithms/bipartite/tests/test_matching.py index 92f24254..e0651fb5 100644 --- a/networkx/algorithms/bipartite/tests/test_matching.py +++ b/networkx/algorithms/bipartite/tests/test_matching.py @@ -12,7 +12,7 @@ from networkx.algorithms.bipartite.matching import minimum_weight_full_matching from networkx.algorithms.bipartite.matching import to_vertex_cover -class TestMatching(): +class TestMatching: """Tests for bipartite matching algorithms.""" def setup(self): @@ -26,8 +26,7 @@ class TestMatching(): self.simple_graph = nx.complete_bipartite_graph(2, 3) self.simple_solution = {0: 2, 1: 3, 2: 0, 3: 1} - edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), - (5, 11)] + edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), (5, 11)] self.top_nodes = set(range(6)) self.graph = nx.Graph() self.graph.add_nodes_from(range(12)) @@ -35,33 +34,48 @@ class TestMatching(): # Example bipartite graph from issue 2127 G = nx.Graph() - G.add_nodes_from([ - (1, 'C'), (1, 'B'), (0, 'G'), (1, 'F'), - (1, 'E'), (0, 'C'), (1, 'D'), (1, 'I'), - (0, 'A'), (0, 'D'), (0, 'F'), (0, 'E'), - (0, 'H'), (1, 'G'), (1, 'A'), (0, 'I'), - (0, 'B'), (1, 'H'), - ]) - G.add_edge((1, 'C'), (0, 'A')) - G.add_edge((1, 'B'), (0, 'A')) - G.add_edge((0, 'G'), (1, 'I')) - G.add_edge((0, 'G'), (1, 'H')) - G.add_edge((1, 'F'), (0, 'A')) - G.add_edge((1, 'F'), (0, 'C')) - G.add_edge((1, 'F'), (0, 'E')) - G.add_edge((1, 'E'), (0, 'A')) - G.add_edge((1, 'E'), (0, 'C')) - G.add_edge((0, 'C'), (1, 'D')) - G.add_edge((0, 'C'), (1, 'I')) - G.add_edge((0, 'C'), (1, 'G')) - G.add_edge((0, 'C'), (1, 'H')) - G.add_edge((1, 'D'), (0, 'A')) - G.add_edge((1, 'I'), (0, 'A')) - G.add_edge((1, 'I'), (0, 'E')) - G.add_edge((0, 'A'), (1, 'G')) - G.add_edge((0, 'A'), (1, 'H')) - G.add_edge((0, 'E'), (1, 'G')) - G.add_edge((0, 'E'), (1, 'H')) + G.add_nodes_from( + [ + (1, "C"), + (1, "B"), + (0, "G"), + (1, "F"), + (1, "E"), + (0, "C"), + (1, "D"), + (1, "I"), + (0, "A"), + (0, "D"), + (0, "F"), + (0, "E"), + (0, "H"), + (1, "G"), + (1, "A"), + (0, "I"), + (0, "B"), + (1, "H"), + ] + ) + G.add_edge((1, "C"), (0, "A")) + G.add_edge((1, "B"), (0, "A")) + G.add_edge((0, "G"), (1, "I")) + G.add_edge((0, "G"), (1, "H")) + G.add_edge((1, "F"), (0, "A")) + G.add_edge((1, "F"), (0, "C")) + G.add_edge((1, "F"), (0, "E")) + G.add_edge((1, "E"), (0, "A")) + G.add_edge((1, "E"), (0, "C")) + G.add_edge((0, "C"), (1, "D")) + G.add_edge((0, "C"), (1, "I")) + G.add_edge((0, "C"), (1, "G")) + G.add_edge((0, "C"), (1, "H")) + G.add_edge((1, "D"), (0, "A")) + G.add_edge((1, "I"), (0, "A")) + G.add_edge((1, "I"), (0, "E")) + G.add_edge((0, "A"), (1, "G")) + G.add_edge((0, "A"), (1, "H")) + G.add_edge((0, "E"), (1, "G")) + G.add_edge((0, "E"), (1, "H")) self.disconnected_graph = G def check_match(self, matching): @@ -157,7 +171,7 @@ class TestMatching(): matching = hopcroft_karp_matching(btc, top_nodes) vertex_cover = to_vertex_cover(btc, matching, top_nodes) independent_set = set(G) - {v for _, v in vertex_cover} - assert {'B', 'D', 'F', 'I', 'H'} == independent_set + assert {"B", "D", "F", "I", "H"} == independent_set def test_vertex_cover_issue_2384(self): G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)]) @@ -182,21 +196,19 @@ class TestMatching(): def test_eppstein_matching(): """Test in accordance to issue #1927""" G = nx.Graph() - G.add_nodes_from(['a', 2, 3, 4], bipartite=0) - G.add_nodes_from([1, 'b', 'c'], bipartite=1) - G.add_edges_from([('a', 1), ('a', 'b'), (2, 'b'), - (2, 'c'), (3, 'c'), (4, 1)]) + G.add_nodes_from(["a", 2, 3, 4], bipartite=0) + G.add_nodes_from([1, "b", "c"], bipartite=1) + G.add_edges_from([("a", 1), ("a", "b"), (2, "b"), (2, "c"), (3, "c"), (4, 1)]) matching = eppstein_matching(G) assert len(matching) == len(maximum_matching(G)) assert all(x in set(matching.keys()) for x in set(matching.values())) class TestMinimumWeightFullMatching: - @classmethod def setup_class(cls): global scipy - scipy = pytest.importorskip('scipy') + scipy = pytest.importorskip("scipy") def test_minimum_weight_full_matching_incomplete_graph(self): B = nx.Graph() @@ -288,5 +300,5 @@ class TestMinimumWeightFullMatching: G.add_edge(0, 3, mass=0.2) G.add_edge(1, 2, mass=1) G.add_edge(1, 3, mass=2) - matching = minimum_weight_full_matching(G, weight='mass') + matching = minimum_weight_full_matching(G, weight="mass") assert matching == {0: 3, 1: 2, 2: 1, 3: 0} diff --git a/networkx/algorithms/bipartite/tests/test_matrix.py b/networkx/algorithms/bipartite/tests/test_matrix.py index 63db1b84..176741ae 100644 --- a/networkx/algorithms/bipartite/tests/test_matrix.py +++ b/networkx/algorithms/bipartite/tests/test_matrix.py @@ -1,7 +1,8 @@ import pytest -np = pytest.importorskip('numpy') -sp = pytest.importorskip('scipy') -sparse = pytest.importorskip('scipy.sparse') + +np = pytest.importorskip("numpy") +sp = pytest.importorskip("scipy") +sparse = pytest.importorskip("scipy.sparse") import networkx as nx @@ -10,15 +11,14 @@ from networkx.testing.utils import assert_edges_equal class TestBiadjacencyMatrix: - def test_biadjacency_matrix_weight(self): G = nx.path_graph(5) G.add_edge(0, 1, weight=2, other=4) X = [1, 3] Y = [0, 2, 4] - M = bipartite.biadjacency_matrix(G, X, weight='weight') + M = bipartite.biadjacency_matrix(G, X, weight="weight") assert M[0, 0] == 2 - M = bipartite.biadjacency_matrix(G, X, weight='other') + M = bipartite.biadjacency_matrix(G, X, weight="other") assert M[0, 0] == 4 def test_biadjacency_matrix(self): @@ -26,7 +26,7 @@ class TestBiadjacencyMatrix: bots = [5, 10, 15] for i in range(len(tops)): G = bipartite.random_graph(tops[i], bots[i], 0.2) - top = [n for n, d in G.nodes(data=True) if d['bipartite'] == 0] + top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0] M = bipartite.biadjacency_matrix(G, top) assert M.shape[0] == tops[i] assert M.shape[1] == bots[i] @@ -36,7 +36,7 @@ class TestBiadjacencyMatrix: G.add_edge(0, 1, weight=2) X = [3, 1] Y = [4, 2, 0] - M = bipartite.biadjacency_matrix(G, X, Y, weight='weight') + M = bipartite.biadjacency_matrix(G, X, Y, weight="weight") assert M[1, 2] == 2 def test_null_graph(self): @@ -57,7 +57,7 @@ class TestBiadjacencyMatrix: def test_format_keyword(self): with pytest.raises(nx.NetworkXError): - bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format='foo') + bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format="foo") def test_from_biadjacency_roundtrip(self): B1 = nx.path_graph(5) @@ -69,8 +69,8 @@ class TestBiadjacencyMatrix: M = sparse.csc_matrix([[1, 2], [0, 3]]) B = bipartite.from_biadjacency_matrix(M) assert_edges_equal(B.edges(), [(0, 2), (0, 3), (1, 3)]) - B = bipartite.from_biadjacency_matrix(M, edge_attribute='weight') - e = [(0, 2, {'weight': 1}), (0, 3, {'weight': 2}), (1, 3, {'weight': 3})] + B = bipartite.from_biadjacency_matrix(M, edge_attribute="weight") + e = [(0, 2, {"weight": 1}), (0, 3, {"weight": 2}), (1, 3, {"weight": 3})] assert_edges_equal(B.edges(data=True), e) def test_from_biadjacency_multigraph(self): diff --git a/networkx/algorithms/bipartite/tests/test_project.py b/networkx/algorithms/bipartite/tests/test_project.py index 13877ec0..dbe75894 100644 --- a/networkx/algorithms/bipartite/tests/test_project.py +++ b/networkx/algorithms/bipartite/tests/test_project.py @@ -4,7 +4,6 @@ from networkx.testing import assert_edges_equal, assert_nodes_equal class TestBipartiteProject: - def test_path_projected_graph(self): G = nx.path_graph(4) P = bipartite.projected_graph(G, [1, 3]) @@ -16,27 +15,27 @@ class TestBipartiteProject: def test_path_projected_properties_graph(self): G = nx.path_graph(4) - G.add_node(1, name='one') - G.add_node(2, name='two') + G.add_node(1, name="one") + G.add_node(2, name="two") P = bipartite.projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - assert P.nodes[1]['name'] == G.nodes[1]['name'] + assert P.nodes[1]["name"] == G.nodes[1]["name"] P = bipartite.projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - assert P.nodes[2]['name'] == G.nodes[2]['name'] + assert P.nodes[2]["name"] == G.nodes[2]["name"] def test_path_collaboration_projected_graph(self): G = nx.path_graph(4) P = bipartite.collaboration_weighted_projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - P[1][3]['weight'] = 1 + P[1][3]["weight"] = 1 P = bipartite.collaboration_weighted_projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - P[0][2]['weight'] = 1 + P[0][2]["weight"] = 1 def test_directed_path_collaboration_projected_graph(self): G = nx.DiGraph() @@ -44,22 +43,22 @@ class TestBipartiteProject: P = bipartite.collaboration_weighted_projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - P[1][3]['weight'] = 1 + P[1][3]["weight"] = 1 P = bipartite.collaboration_weighted_projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - P[0][2]['weight'] = 1 + P[0][2]["weight"] = 1 def test_path_weighted_projected_graph(self): G = nx.path_graph(4) P = bipartite.weighted_projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - P[1][3]['weight'] = 1 + P[1][3]["weight"] = 1 P = bipartite.weighted_projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - P[0][2]['weight'] = 1 + P[0][2]["weight"] = 1 def test_path_weighted_projected_directed_graph(self): G = nx.DiGraph() @@ -67,11 +66,11 @@ class TestBipartiteProject: P = bipartite.weighted_projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) - P[1][3]['weight'] = 1 + P[1][3]["weight"] = 1 P = bipartite.weighted_projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) - P[0][2]['weight'] = 1 + P[0][2]["weight"] = 1 def test_star_projected_graph(self): G = nx.star_graph(3) @@ -88,256 +87,283 @@ class TestBipartiteProject: def test_project_multigraph(self): G = nx.Graph() - G.add_edge('a', 1) - G.add_edge('b', 1) - G.add_edge('a', 2) - G.add_edge('b', 2) - P = bipartite.projected_graph(G, 'ab') - assert_edges_equal(list(P.edges()), [('a', 'b')]) - P = bipartite.weighted_projected_graph(G, 'ab') - assert_edges_equal(list(P.edges()), [('a', 'b')]) - P = bipartite.projected_graph(G, 'ab', multigraph=True) - assert_edges_equal(list(P.edges()), [('a', 'b'), ('a', 'b')]) + G.add_edge("a", 1) + G.add_edge("b", 1) + G.add_edge("a", 2) + G.add_edge("b", 2) + P = bipartite.projected_graph(G, "ab") + assert_edges_equal(list(P.edges()), [("a", "b")]) + P = bipartite.weighted_projected_graph(G, "ab") + assert_edges_equal(list(P.edges()), [("a", "b")]) + P = bipartite.projected_graph(G, "ab", multigraph=True) + assert_edges_equal(list(P.edges()), [("a", "b"), ("a", "b")]) def test_project_collaboration(self): G = nx.Graph() - G.add_edge('a', 1) - G.add_edge('b', 1) - G.add_edge('b', 2) - G.add_edge('c', 2) - G.add_edge('c', 3) - G.add_edge('c', 4) - G.add_edge('b', 4) - P = bipartite.collaboration_weighted_projected_graph(G, 'abc') - assert P['a']['b']['weight'] == 1 - assert P['b']['c']['weight'] == 2 + G.add_edge("a", 1) + G.add_edge("b", 1) + G.add_edge("b", 2) + G.add_edge("c", 2) + G.add_edge("c", 3) + G.add_edge("c", 4) + G.add_edge("b", 4) + P = bipartite.collaboration_weighted_projected_graph(G, "abc") + assert P["a"]["b"]["weight"] == 1 + assert P["b"]["c"]["weight"] == 2 def test_directed_projection(self): G = nx.DiGraph() - G.add_edge('A', 1) - G.add_edge(1, 'B') - G.add_edge('A', 2) - G.add_edge('B', 2) - P = bipartite.projected_graph(G, 'AB') - assert_edges_equal(list(P.edges()), [('A', 'B')]) - P = bipartite.weighted_projected_graph(G, 'AB') - assert_edges_equal(list(P.edges()), [('A', 'B')]) - assert P['A']['B']['weight'] == 1 - - P = bipartite.projected_graph(G, 'AB', multigraph=True) - assert_edges_equal(list(P.edges()), [('A', 'B')]) + G.add_edge("A", 1) + G.add_edge(1, "B") + G.add_edge("A", 2) + G.add_edge("B", 2) + P = bipartite.projected_graph(G, "AB") + assert_edges_equal(list(P.edges()), [("A", "B")]) + P = bipartite.weighted_projected_graph(G, "AB") + assert_edges_equal(list(P.edges()), [("A", "B")]) + assert P["A"]["B"]["weight"] == 1 + + P = bipartite.projected_graph(G, "AB", multigraph=True) + assert_edges_equal(list(P.edges()), [("A", "B")]) G = nx.DiGraph() - G.add_edge('A', 1) - G.add_edge(1, 'B') - G.add_edge('A', 2) - G.add_edge(2, 'B') - P = bipartite.projected_graph(G, 'AB') - assert_edges_equal(list(P.edges()), [('A', 'B')]) - P = bipartite.weighted_projected_graph(G, 'AB') - assert_edges_equal(list(P.edges()), [('A', 'B')]) - assert P['A']['B']['weight'] == 2 + G.add_edge("A", 1) + G.add_edge(1, "B") + G.add_edge("A", 2) + G.add_edge(2, "B") + P = bipartite.projected_graph(G, "AB") + assert_edges_equal(list(P.edges()), [("A", "B")]) + P = bipartite.weighted_projected_graph(G, "AB") + assert_edges_equal(list(P.edges()), [("A", "B")]) + assert P["A"]["B"]["weight"] == 2 - P = bipartite.projected_graph(G, 'AB', multigraph=True) - assert_edges_equal(list(P.edges()), [('A', 'B'), ('A', 'B')]) + P = bipartite.projected_graph(G, "AB", multigraph=True) + assert_edges_equal(list(P.edges()), [("A", "B"), ("A", "B")]) class TestBipartiteWeightedProjection: - @classmethod def setup_class(cls): # Tore Opsahl's example # http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/ cls.G = nx.Graph() - cls.G.add_edge('A', 1) - cls.G.add_edge('A', 2) - cls.G.add_edge('B', 1) - cls.G.add_edge('B', 2) - cls.G.add_edge('B', 3) - cls.G.add_edge('B', 4) - cls.G.add_edge('B', 5) - cls.G.add_edge('C', 1) - cls.G.add_edge('D', 3) - cls.G.add_edge('E', 4) - cls.G.add_edge('E', 5) - cls.G.add_edge('E', 6) - cls.G.add_edge('F', 6) + cls.G.add_edge("A", 1) + cls.G.add_edge("A", 2) + cls.G.add_edge("B", 1) + cls.G.add_edge("B", 2) + cls.G.add_edge("B", 3) + cls.G.add_edge("B", 4) + cls.G.add_edge("B", 5) + cls.G.add_edge("C", 1) + cls.G.add_edge("D", 3) + cls.G.add_edge("E", 4) + cls.G.add_edge("E", 5) + cls.G.add_edge("E", 6) + cls.G.add_edge("F", 6) # Graph based on figure 6 from Newman (2001) cls.N = nx.Graph() - cls.N.add_edge('A', 1) - cls.N.add_edge('A', 2) - cls.N.add_edge('A', 3) - cls.N.add_edge('B', 1) - cls.N.add_edge('B', 2) - cls.N.add_edge('B', 3) - cls.N.add_edge('C', 1) - cls.N.add_edge('D', 1) - cls.N.add_edge('E', 3) + cls.N.add_edge("A", 1) + cls.N.add_edge("A", 2) + cls.N.add_edge("A", 3) + cls.N.add_edge("B", 1) + cls.N.add_edge("B", 2) + cls.N.add_edge("B", 3) + cls.N.add_edge("C", 1) + cls.N.add_edge("D", 1) + cls.N.add_edge("E", 3) def test_project_weighted_shared(self): - edges = [('A', 'B', 2), - ('A', 'C', 1), - ('B', 'C', 1), - ('B', 'D', 1), - ('B', 'E', 2), - ('E', 'F', 1)] + edges = [ + ("A", "B", 2), + ("A", "C", 1), + ("B", "C", 1), + ("B", "D", 1), + ("B", "E", 2), + ("E", "F", 1), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.weighted_projected_graph(self.G, 'ABCDEF') + P = bipartite.weighted_projected_graph(self.G, "ABCDEF") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] - - edges = [('A', 'B', 3), - ('A', 'E', 1), - ('A', 'C', 1), - ('A', 'D', 1), - ('B', 'E', 1), - ('B', 'C', 1), - ('B', 'D', 1), - ('C', 'D', 1)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3), + ("A", "E", 1), + ("A", "C", 1), + ("A", "D", 1), + ("B", "E", 1), + ("B", "C", 1), + ("B", "D", 1), + ("C", "D", 1), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.weighted_projected_graph(self.N, 'ABCDE') + P = bipartite.weighted_projected_graph(self.N, "ABCDE") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_project_weighted_newman(self): - edges = [('A', 'B', 1.5), - ('A', 'C', 0.5), - ('B', 'C', 0.5), - ('B', 'D', 1), - ('B', 'E', 2), - ('E', 'F', 1)] + edges = [ + ("A", "B", 1.5), + ("A", "C", 0.5), + ("B", "C", 0.5), + ("B", "D", 1), + ("B", "E", 2), + ("E", "F", 1), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.collaboration_weighted_projected_graph(self.G, 'ABCDEF') + P = bipartite.collaboration_weighted_projected_graph(self.G, "ABCDEF") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] - - edges = [('A', 'B', 11 / 6.0), - ('A', 'E', 1 / 2.0), - ('A', 'C', 1 / 3.0), - ('A', 'D', 1 / 3.0), - ('B', 'E', 1 / 2.0), - ('B', 'C', 1 / 3.0), - ('B', 'D', 1 / 3.0), - ('C', 'D', 1 / 3.0)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 11 / 6.0), + ("A", "E", 1 / 2.0), + ("A", "C", 1 / 3.0), + ("A", "D", 1 / 3.0), + ("B", "E", 1 / 2.0), + ("B", "C", 1 / 3.0), + ("B", "D", 1 / 3.0), + ("C", "D", 1 / 3.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.collaboration_weighted_projected_graph(self.N, 'ABCDE') + P = bipartite.collaboration_weighted_projected_graph(self.N, "ABCDE") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_project_weighted_ratio(self): - edges = [('A', 'B', 2 / 6.0), - ('A', 'C', 1 / 6.0), - ('B', 'C', 1 / 6.0), - ('B', 'D', 1 / 6.0), - ('B', 'E', 2 / 6.0), - ('E', 'F', 1 / 6.0)] + edges = [ + ("A", "B", 2 / 6.0), + ("A", "C", 1 / 6.0), + ("B", "C", 1 / 6.0), + ("B", "D", 1 / 6.0), + ("B", "E", 2 / 6.0), + ("E", "F", 1 / 6.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.weighted_projected_graph(self.G, 'ABCDEF', ratio=True) + P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] - - edges = [('A', 'B', 3 / 3.0), - ('A', 'E', 1 / 3.0), - ('A', 'C', 1 / 3.0), - ('A', 'D', 1 / 3.0), - ('B', 'E', 1 / 3.0), - ('B', 'C', 1 / 3.0), - ('B', 'D', 1 / 3.0), - ('C', 'D', 1 / 3.0)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3 / 3.0), + ("A", "E", 1 / 3.0), + ("A", "C", 1 / 3.0), + ("A", "D", 1 / 3.0), + ("B", "E", 1 / 3.0), + ("B", "C", 1 / 3.0), + ("B", "D", 1 / 3.0), + ("C", "D", 1 / 3.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.weighted_projected_graph(self.N, 'ABCDE', ratio=True) + P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_project_weighted_overlap(self): - edges = [('A', 'B', 2 / 2.0), - ('A', 'C', 1 / 1.0), - ('B', 'C', 1 / 1.0), - ('B', 'D', 1 / 1.0), - ('B', 'E', 2 / 3.0), - ('E', 'F', 1 / 1.0)] + edges = [ + ("A", "B", 2 / 2.0), + ("A", "C", 1 / 1.0), + ("B", "C", 1 / 1.0), + ("B", "D", 1 / 1.0), + ("B", "E", 2 / 3.0), + ("E", "F", 1 / 1.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.overlap_weighted_projected_graph(self.G, 'ABCDEF', jaccard=False) + P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF", jaccard=False) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] - - edges = [('A', 'B', 3 / 3.0), - ('A', 'E', 1 / 1.0), - ('A', 'C', 1 / 1.0), - ('A', 'D', 1 / 1.0), - ('B', 'E', 1 / 1.0), - ('B', 'C', 1 / 1.0), - ('B', 'D', 1 / 1.0), - ('C', 'D', 1 / 1.0)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3 / 3.0), + ("A", "E", 1 / 1.0), + ("A", "C", 1 / 1.0), + ("A", "D", 1 / 1.0), + ("B", "E", 1 / 1.0), + ("B", "C", 1 / 1.0), + ("B", "D", 1 / 1.0), + ("C", "D", 1 / 1.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.overlap_weighted_projected_graph(self.N, 'ABCDE', jaccard=False) + P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE", jaccard=False) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_project_weighted_jaccard(self): - edges = [('A', 'B', 2 / 5.0), - ('A', 'C', 1 / 2.0), - ('B', 'C', 1 / 5.0), - ('B', 'D', 1 / 5.0), - ('B', 'E', 2 / 6.0), - ('E', 'F', 1 / 3.0)] + edges = [ + ("A", "B", 2 / 5.0), + ("A", "C", 1 / 2.0), + ("B", "C", 1 / 5.0), + ("B", "D", 1 / 5.0), + ("B", "E", 2 / 6.0), + ("E", "F", 1 / 3.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.overlap_weighted_projected_graph(self.G, 'ABCDEF') + P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] - - edges = [('A', 'B', 3 / 3.0), - ('A', 'E', 1 / 3.0), - ('A', 'C', 1 / 3.0), - ('A', 'D', 1 / 3.0), - ('B', 'E', 1 / 3.0), - ('B', 'C', 1 / 3.0), - ('B', 'D', 1 / 3.0), - ('C', 'D', 1 / 1.0)] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3 / 3.0), + ("A", "E", 1 / 3.0), + ("A", "C", 1 / 3.0), + ("A", "D", 1 / 3.0), + ("B", "E", 1 / 3.0), + ("B", "C", 1 / 3.0), + ("B", "D", 1 / 3.0), + ("C", "D", 1 / 1.0), + ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) - P = bipartite.overlap_weighted_projected_graph(self.N, 'ABCDE') + P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE") assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in P.edges(): - assert P[u][v]['weight'] == Panswer[u][v]['weight'] + assert P[u][v]["weight"] == Panswer[u][v]["weight"] def test_generic_weighted_projected_graph_simple(self): def shared(G, u, v): return len(set(G[u]) & set(G[v])) + B = nx.path_graph(5) - G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4], weight_function=shared) + G = bipartite.generic_weighted_projected_graph( + B, [0, 2, 4], weight_function=shared + ) assert_nodes_equal(list(G), [0, 2, 4]) - assert_edges_equal(list(list(G.edges(data=True))), - [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})]) + assert_edges_equal( + list(list(G.edges(data=True))), + [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})], + ) G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4]) assert_nodes_equal(list(G), [0, 2, 4]) - assert_edges_equal(list(list(G.edges(data=True))), - [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})]) + assert_edges_equal( + list(list(G.edges(data=True))), + [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})], + ) B = nx.DiGraph() nx.add_path(B, range(5)) G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4]) assert_nodes_equal(list(G), [0, 2, 4]) - assert_edges_equal(list(G.edges(data=True)), - [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})]) + assert_edges_equal( + list(G.edges(data=True)), [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})] + ) def test_generic_weighted_projected_graph_custom(self): def jaccard(G, u, v): @@ -345,19 +371,22 @@ class TestBipartiteWeightedProjection: vnbrs = set(G[v]) return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs) - def my_weight(G, u, v, weight='weight'): + def my_weight(G, u, v, weight="weight"): w = 0 for nbr in set(G[u]) & set(G[v]): w += G.edges[u, nbr].get(weight, 1) + G.edges[v, nbr].get(weight, 1) return w + B = nx.bipartite.complete_bipartite_graph(2, 2) for i, (u, v) in enumerate(B.edges()): - B.edges[u, v]['weight'] = i + 1 - G = bipartite.generic_weighted_projected_graph(B, [0, 1], - weight_function=jaccard) - assert_edges_equal(list(G.edges(data=True)), [(0, 1, {'weight': 1.0})]) - G = bipartite.generic_weighted_projected_graph(B, [0, 1], - weight_function=my_weight) - assert_edges_equal(list(G.edges(data=True)), [(0, 1, {'weight': 10})]) + B.edges[u, v]["weight"] = i + 1 + G = bipartite.generic_weighted_projected_graph( + B, [0, 1], weight_function=jaccard + ) + assert_edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 1.0})]) + G = bipartite.generic_weighted_projected_graph( + B, [0, 1], weight_function=my_weight + ) + assert_edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 10})]) G = bipartite.generic_weighted_projected_graph(B, [0, 1]) - assert_edges_equal(list(G.edges(data=True)), [(0, 1, {'weight': 2})]) + assert_edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 2})]) diff --git a/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py b/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py index 3563540d..3c85e187 100644 --- a/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py +++ b/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py @@ -13,7 +13,7 @@ class TestSpectralBipartivity: @classmethod def setup_class(cls): global scipy - scipy = pytest.importorskip('scipy') + scipy = pytest.importorskip("scipy") def test_star_like(self): # star-like diff --git a/networkx/algorithms/boundary.py b/networkx/algorithms/boundary.py index 7ce7bfa4..bff8804f 100644 --- a/networkx/algorithms/boundary.py +++ b/networkx/algorithms/boundary.py @@ -10,11 +10,10 @@ nodes in *S* that are outside *S*. """ from itertools import chain -__all__ = ['edge_boundary', 'node_boundary'] +__all__ = ["edge_boundary", "node_boundary"] -def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, - default=None): +def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None): """Returns the edge boundary of `nbunch1`. The *edge boundary* of a set *S* with respect to a set *T* is the @@ -83,9 +82,11 @@ def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, if nbunch2 is None: return (e for e in edges if (e[0] in nset1) ^ (e[1] in nset1)) nset2 = set(nbunch2) - return (e for e in edges - if (e[0] in nset1 and e[1] in nset2) - or (e[1] in nset1 and e[0] in nset2)) + return ( + e + for e in edges + if (e[0] in nset1 and e[1] in nset2) or (e[1] in nset1 and e[0] in nset2) + ) def node_boundary(G, nbunch1, nbunch2=None): diff --git a/networkx/algorithms/bridges.py b/networkx/algorithms/bridges.py index 2340c9ce..5788e8fe 100644 --- a/networkx/algorithms/bridges.py +++ b/networkx/algorithms/bridges.py @@ -4,11 +4,11 @@ from itertools import chain import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['bridges', 'has_bridges', 'local_bridges'] +__all__ = ["bridges", "has_bridges", "local_bridges"] -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def bridges(G, root=None): """Generate all bridges in a graph. @@ -65,8 +65,8 @@ def bridges(G, root=None): yield u, v -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def has_bridges(G, root=None): """Decide whether a graph has any bridges. @@ -122,8 +122,8 @@ def has_bridges(G, root=None): return True -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def local_bridges(G, with_span=True, weight=None): """Iterate over local bridges of `G` optionally computing the span @@ -178,4 +178,4 @@ def local_bridges(G, with_span=True, weight=None): span = nx.shortest_path_length(G, u, v, weight=hide_edge) yield u, v, span except nx.NetworkXNoPath: - yield u, v, float('inf') + yield u, v, float("inf") diff --git a/networkx/algorithms/centrality/betweenness.py b/networkx/algorithms/centrality/betweenness.py index 76b3ad2a..e5968ba0 100644 --- a/networkx/algorithms/centrality/betweenness.py +++ b/networkx/algorithms/centrality/betweenness.py @@ -5,14 +5,14 @@ from itertools import count from networkx.utils import py_random_state from networkx.utils.decorators import not_implemented_for -__all__ = ['betweenness_centrality', 'edge_betweenness_centrality', - 'edge_betweenness'] +__all__ = ["betweenness_centrality", "edge_betweenness_centrality", "edge_betweenness"] @py_random_state(5) -@not_implemented_for('multigraph') -def betweenness_centrality(G, k=None, normalized=True, weight=None, - endpoints=False, seed=None): +@not_implemented_for("multigraph") +def betweenness_centrality( + G, k=None, normalized=True, weight=None, endpoints=False, seed=None +): r"""Compute the shortest-path betweenness centrality for nodes. Betweenness centrality of a node $v$ is the sum of the @@ -132,14 +132,19 @@ def betweenness_centrality(G, k=None, normalized=True, weight=None, else: betweenness = _accumulate_basic(betweenness, S, P, sigma, s) # rescaling - betweenness = _rescale(betweenness, len(G), normalized=normalized, - directed=G.is_directed(), k=k, endpoints=endpoints) + betweenness = _rescale( + betweenness, + len(G), + normalized=normalized, + directed=G.is_directed(), + k=k, + endpoints=endpoints, + ) return betweenness @py_random_state(4) -def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, - seed=None): +def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None): r"""Compute betweenness centrality for edges. Betweenness centrality of an edge $e$ is the sum of the @@ -223,10 +228,12 @@ def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, # rescaling for n in G: # remove nodes to only return edges del betweenness[n] - betweenness = _rescale_e(betweenness, len(G), normalized=normalized, - directed=G.is_directed()) + betweenness = _rescale_e( + betweenness, len(G), normalized=normalized, directed=G.is_directed() + ) return betweenness + # obsolete name @@ -236,17 +243,18 @@ def edge_betweenness(G, k=None, normalized=True, weight=None, seed=None): # helpers for betweenness centrality + def _single_source_shortest_path_basic(G, s): S = [] P = {} for v in G: P[v] = [] - sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G + sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G D = {} sigma[s] = 1.0 D[s] = 0 Q = [s] - while Q: # use BFS to find shortest paths + while Q: # use BFS to find shortest paths v = Q.pop(0) S.append(v) Dv = D[v] @@ -255,7 +263,7 @@ def _single_source_shortest_path_basic(G, s): if w not in D: Q.append(w) D[w] = Dv + 1 - if D[w] == Dv + 1: # this is a shortest path, count paths + if D[w] == Dv + 1: # this is a shortest path, count paths sigma[w] += sigmav P[w].append(v) # predecessors return S, P, sigma @@ -267,14 +275,14 @@ def _single_source_dijkstra_path_basic(G, s, weight): P = {} for v in G: P[v] = [] - sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G + sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G D = {} sigma[s] = 1.0 push = heappush pop = heappop seen = {s: 0} c = count() - Q = [] # use Q as heap with (distance,node id) tuples + Q = [] # use Q as heap with (distance,node id) tuples push(Q, (0, next(c), s, s)) while Q: (dist, _, pred, v) = pop(Q) @@ -338,8 +346,7 @@ def _accumulate_edges(betweenness, S, P, sigma, s): return betweenness -def _rescale(betweenness, n, normalized, - directed=False, k=None, endpoints=False): +def _rescale(betweenness, n, normalized, directed=False, k=None, endpoints=False): if normalized: if endpoints: if n < 2: diff --git a/networkx/algorithms/centrality/betweenness_subset.py b/networkx/algorithms/centrality/betweenness_subset.py index eecb24ac..bb28c7c7 100644 --- a/networkx/algorithms/centrality/betweenness_subset.py +++ b/networkx/algorithms/centrality/betweenness_subset.py @@ -1,16 +1,20 @@ """Betweenness centrality measures for subsets of nodes.""" -from networkx.algorithms.centrality.betweenness import\ - _single_source_dijkstra_path_basic as dijkstra -from networkx.algorithms.centrality.betweenness import\ - _single_source_shortest_path_basic as shortest_path +from networkx.algorithms.centrality.betweenness import ( + _single_source_dijkstra_path_basic as dijkstra, +) +from networkx.algorithms.centrality.betweenness import ( + _single_source_shortest_path_basic as shortest_path, +) -__all__ = ['betweenness_centrality_subset', 'betweenness_centrality_source', - 'edge_betweenness_centrality_subset'] +__all__ = [ + "betweenness_centrality_subset", + "betweenness_centrality_source", + "edge_betweenness_centrality_subset", +] -def betweenness_centrality_subset(G, sources, targets, normalized=False, - weight=None): +def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None): r"""Compute betweenness centrality for a subset of nodes. .. math:: @@ -105,8 +109,9 @@ def betweenness_centrality_subset(G, sources, targets, normalized=False, return b -def edge_betweenness_centrality_subset(G, sources, targets, normalized=False, - weight=None): +def edge_betweenness_centrality_subset( + G, sources, targets, normalized=False, weight=None +): r"""Compute betweenness centrality for edges for a subset of nodes. .. math:: @@ -187,13 +192,11 @@ def edge_betweenness_centrality_subset(G, sources, targets, normalized=False, # obsolete name -def betweenness_centrality_source(G, normalized=True, weight=None, - sources=None): +def betweenness_centrality_source(G, normalized=True, weight=None, sources=None): if sources is None: sources = G.nodes() targets = list(G) - return betweenness_centrality_subset(G, sources, targets, normalized, - weight) + return betweenness_centrality_subset(G, sources, targets, normalized, weight) def _accumulate_subset(betweenness, S, P, sigma, s, targets): diff --git a/networkx/algorithms/centrality/closeness.py b/networkx/algorithms/centrality/closeness.py index 878dcb2e..dd842685 100644 --- a/networkx/algorithms/centrality/closeness.py +++ b/networkx/algorithms/centrality/closeness.py @@ -6,7 +6,7 @@ import networkx as nx from networkx.exception import NetworkXError from networkx.utils.decorators import not_implemented_for -__all__ = ['closeness_centrality', 'incremental_closeness_centrality'] +__all__ = ["closeness_centrality", "incremental_closeness_centrality"] def closeness_centrality(G, u=None, distance=None, wf_improved=True): @@ -99,7 +99,8 @@ def closeness_centrality(G, u=None, distance=None, wf_improved=True): if distance is not None: # use Dijkstra's algorithm with specified attribute as edge weight path_length = functools.partial( - nx.single_source_dijkstra_path_length, weight=distance) + nx.single_source_dijkstra_path_length, weight=distance + ) else: path_length = nx.single_source_shortest_path_length @@ -126,12 +127,10 @@ def closeness_centrality(G, u=None, distance=None, wf_improved=True): return closeness_centrality -@not_implemented_for('directed') -def incremental_closeness_centrality(G, - edge, - prev_cc=None, - insertion=True, - wf_improved=True): +@not_implemented_for("directed") +def incremental_closeness_centrality( + G, edge, prev_cc=None, insertion=True, wf_improved=True +): r"""Incremental closeness centrality for nodes. Compute closeness centrality for nodes using level-based work filtering @@ -223,7 +222,7 @@ def incremental_closeness_centrality(G, http://sariyuce.com/papers/bigdata13.pdf """ if prev_cc is not None and set(prev_cc.keys()) != set(G.nodes()): - raise NetworkXError('prev_cc and G do not have the same nodes') + raise NetworkXError("prev_cc and G do not have the same nodes") # Unpack edge (u, v) = edge @@ -248,7 +247,7 @@ def incremental_closeness_centrality(G, nodes = G.nodes() closeness_centrality = {} for n in nodes: - if (n in du and n in dv and abs(du[n] - dv[n]) <= 1): + if n in du and n in dv and abs(du[n] - dv[n]) <= 1: closeness_centrality[n] = prev_cc[n] else: sp = path_length(G, n) diff --git a/networkx/algorithms/centrality/current_flow_betweenness.py b/networkx/algorithms/centrality/current_flow_betweenness.py index 2d8e66f2..106ceb4a 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness.py +++ b/networkx/algorithms/centrality/current_flow_betweenness.py @@ -7,22 +7,31 @@ from networkx.algorithms.centrality.flow_matrix import ( laplacian_sparse_matrix, SuperLUInverseLaplacian, ) -from networkx.utils import (not_implemented_for, - reverse_cuthill_mckee_ordering, - py_random_state) +from networkx.utils import ( + not_implemented_for, + reverse_cuthill_mckee_ordering, + py_random_state, +) -__all__ = ['current_flow_betweenness_centrality', - 'approximate_current_flow_betweenness_centrality', - 'edge_current_flow_betweenness_centrality'] +__all__ = [ + "current_flow_betweenness_centrality", + "approximate_current_flow_betweenness_centrality", + "edge_current_flow_betweenness_centrality", +] @py_random_state(7) -@not_implemented_for('directed') -def approximate_current_flow_betweenness_centrality(G, normalized=True, - weight=None, - dtype=float, solver='full', - epsilon=0.5, kmax=10000, - seed=None): +@not_implemented_for("directed") +def approximate_current_flow_betweenness_centrality( + G, + normalized=True, + weight=None, + dtype=float, + solver="full", + epsilon=0.5, + kmax=10000, + seed=None, +): r"""Compute the approximate current-flow betweenness centrality for nodes. Approximates the current-flow betweenness centrality within absolute @@ -89,29 +98,33 @@ def approximate_current_flow_betweenness_centrality(G, normalized=True, try: import numpy as np except ImportError as e: - raise ImportError('current_flow_betweenness_centrality requires NumPy ' - 'http://numpy.org/') from e + raise ImportError( + "current_flow_betweenness_centrality requires NumPy " "http://numpy.org/" + ) from e if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - solvername = {"full": FullInverseLaplacian, - "lu": SuperLUInverseLaplacian, - "cg": CGInverseLaplacian} + solvername = { + "full": FullInverseLaplacian, + "lu": SuperLUInverseLaplacian, + "cg": CGInverseLaplacian, + } n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) - L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight, - dtype=dtype, format='csc') + L = laplacian_sparse_matrix( + H, nodelist=range(n), weight=weight, dtype=dtype, format="csc" + ) C = solvername[solver](L, dtype=dtype) # initialize solver betweenness = dict.fromkeys(H, 0.0) nb = (n - 1.0) * (n - 2.0) # normalization factor cstar = n * (n - 1) / nb l = 1 # parameter in approximation, adjustable - k = l * int(np.ceil((cstar / epsilon)**2 * np.log(n))) + k = l * int(np.ceil((cstar / epsilon) ** 2 * np.log(n))) if k > kmax: msg = f"Number random pairs k>kmax ({k}>{kmax}) " - raise nx.NetworkXError(msg, 'Increase kmax or epsilon') + raise nx.NetworkXError(msg, "Increase kmax or epsilon") cstar2k = cstar / (2 * k) for i in range(k): s, t = seed.sample(range(n), 2) @@ -133,9 +146,10 @@ def approximate_current_flow_betweenness_centrality(G, normalized=True, return {ordering[k]: float(v * factor) for k, v in betweenness.items()} -@not_implemented_for('directed') -def current_flow_betweenness_centrality(G, normalized=True, weight=None, - dtype=float, solver='full'): +@not_implemented_for("directed") +def current_flow_betweenness_centrality( + G, normalized=True, weight=None, dtype=float, solver="full" +): r"""Compute current-flow betweenness centrality for nodes. Current-flow betweenness centrality uses an electrical current @@ -212,8 +226,7 @@ def current_flow_betweenness_centrality(G, normalized=True, weight=None, # this could be done without a copy if we really wanted to H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H - for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, - solver=solver): + for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): pos = dict(zip(row.argsort()[::-1], range(n))) for i in range(n): betweenness[s] += (i - pos[i]) * row[i] @@ -227,10 +240,10 @@ def current_flow_betweenness_centrality(G, normalized=True, weight=None, return {ordering[k]: v for k, v in betweenness.items()} -@not_implemented_for('directed') -def edge_current_flow_betweenness_centrality(G, normalized=True, - weight=None, - dtype=float, solver='full'): +@not_implemented_for("directed") +def edge_current_flow_betweenness_centrality( + G, normalized=True, weight=None, dtype=float, solver="full" +): r"""Compute current-flow betweenness centrality for edges. Current-flow betweenness centrality uses an electrical current @@ -306,6 +319,7 @@ def edge_current_flow_betweenness_centrality(G, normalized=True, M. E. J. Newman, Social Networks 27, 39-54 (2005). """ from networkx.utils import reverse_cuthill_mckee_ordering + if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() @@ -319,12 +333,10 @@ def edge_current_flow_betweenness_centrality(G, normalized=True, nb = (n - 1.0) * (n - 2.0) # normalization factor else: nb = 2.0 - for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, - solver=solver): + for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): pos = dict(zip(row.argsort()[::-1], range(1, n + 1))) for i in range(n): betweenness[e] += (i + 1 - pos[i]) * row[i] betweenness[e] += (n - i - pos[i]) * row[i] betweenness[e] /= nb - return {(ordering[s], ordering[t]): float(v) - for (s, t), v in betweenness.items()} + return {(ordering[s], ordering[t]): float(v) for (s, t), v in betweenness.items()} diff --git a/networkx/algorithms/centrality/current_flow_betweenness_subset.py b/networkx/algorithms/centrality/current_flow_betweenness_subset.py index 7dd84a55..a9286077 100644 --- a/networkx/algorithms/centrality/current_flow_betweenness_subset.py +++ b/networkx/algorithms/centrality/current_flow_betweenness_subset.py @@ -3,15 +3,16 @@ import networkx as nx from networkx.algorithms.centrality.flow_matrix import flow_matrix_row from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering -__all__ = ['current_flow_betweenness_centrality_subset', - 'edge_current_flow_betweenness_centrality_subset'] +__all__ = [ + "current_flow_betweenness_centrality_subset", + "edge_current_flow_betweenness_centrality_subset", +] -@not_implemented_for('directed') -def current_flow_betweenness_centrality_subset(G, sources, targets, - normalized=True, - weight=None, - dtype=float, solver='lu'): +@not_implemented_for("directed") +def current_flow_betweenness_centrality_subset( + G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" +): r"""Compute current-flow betweenness centrality for subsets of nodes. Current-flow betweenness centrality uses an electrical current @@ -87,11 +88,13 @@ def current_flow_betweenness_centrality_subset(G, sources, targets, M. E. J. Newman, Social Networks 27, 39-54 (2005). """ from networkx.utils import reverse_cuthill_mckee_ordering + try: import numpy as np except ImportError as e: - raise ImportError('current_flow_betweenness_centrality requires NumPy ', - 'http://numpy.org/') from e + raise ImportError( + "current_flow_betweenness_centrality requires NumPy ", "http://numpy.org/" + ) from e if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() @@ -101,8 +104,7 @@ def current_flow_betweenness_centrality_subset(G, sources, targets, mapping = dict(zip(ordering, range(n))) H = nx.relabel_nodes(G, mapping) betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H - for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, - solver=solver): + for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): for ss in sources: i = mapping[ss] for tt in targets: @@ -118,11 +120,10 @@ def current_flow_betweenness_centrality_subset(G, sources, targets, return {ordering[k]: v for k, v in betweenness.items()} -@not_implemented_for('directed') -def edge_current_flow_betweenness_centrality_subset(G, sources, targets, - normalized=True, - weight=None, - dtype=float, solver='lu'): +@not_implemented_for("directed") +def edge_current_flow_betweenness_centrality_subset( + G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" +): r"""Compute current-flow betweenness centrality for edges using subsets of nodes. @@ -200,8 +201,9 @@ def edge_current_flow_betweenness_centrality_subset(G, sources, targets, try: import numpy as np except ImportError as e: - raise ImportError('current_flow_betweenness_centrality requires NumPy ' - 'http://numpy.org/') from e + raise ImportError( + "current_flow_betweenness_centrality requires NumPy " "http://numpy.org/" + ) from e if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() @@ -216,13 +218,11 @@ def edge_current_flow_betweenness_centrality_subset(G, sources, targets, nb = (n - 1.0) * (n - 2.0) # normalization factor else: nb = 2.0 - for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, - solver=solver): + for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): for ss in sources: i = mapping[ss] for tt in targets: j = mapping[tt] betweenness[e] += 0.5 * np.abs(row[i] - row[j]) betweenness[e] /= nb - return {(ordering[s], ordering[t]): v - for (s, t), v in betweenness.items()} + return {(ordering[s], ordering[t]): v for (s, t), v in betweenness.items()} diff --git a/networkx/algorithms/centrality/current_flow_closeness.py b/networkx/algorithms/centrality/current_flow_closeness.py index c962493c..518b9f59 100644 --- a/networkx/algorithms/centrality/current_flow_closeness.py +++ b/networkx/algorithms/centrality/current_flow_closeness.py @@ -9,12 +9,11 @@ from networkx.algorithms.centrality.flow_matrix import ( SuperLUInverseLaplacian, ) -__all__ = ['current_flow_closeness_centrality', 'information_centrality'] +__all__ = ["current_flow_closeness_centrality", "information_centrality"] -@not_implemented_for('directed') -def current_flow_closeness_centrality(G, weight=None, - dtype=float, solver='lu'): +@not_implemented_for("directed") +def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"): """Compute current-flow closeness centrality for nodes. Current-flow closeness centrality is variant of closeness @@ -69,9 +68,11 @@ def current_flow_closeness_centrality(G, weight=None, """ if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") - solvername = {"full": FullInverseLaplacian, - "lu": SuperLUInverseLaplacian, - "cg": CGInverseLaplacian} + solvername = { + "full": FullInverseLaplacian, + "lu": SuperLUInverseLaplacian, + "cg": CGInverseLaplacian, + } n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering @@ -79,8 +80,9 @@ def current_flow_closeness_centrality(G, weight=None, H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H n = H.number_of_nodes() - L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight, - dtype=dtype, format='csc') + L = laplacian_sparse_matrix( + H, nodelist=range(n), weight=weight, dtype=dtype, format="csc" + ) C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver for v in H: col = C2.get_row(v) diff --git a/networkx/algorithms/centrality/degree_alg.py b/networkx/algorithms/centrality/degree_alg.py index 60337dae..a7e7b925 100644 --- a/networkx/algorithms/centrality/degree_alg.py +++ b/networkx/algorithms/centrality/degree_alg.py @@ -1,9 +1,7 @@ """Degree centrality measures.""" from networkx.utils.decorators import not_implemented_for -__all__ = ['degree_centrality', - 'in_degree_centrality', - 'out_degree_centrality'] +__all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"] def degree_centrality(G): @@ -43,7 +41,7 @@ def degree_centrality(G): return centrality -@not_implemented_for('undirected') +@not_implemented_for("undirected") def in_degree_centrality(G): """Compute the in-degree centrality for nodes. @@ -86,7 +84,7 @@ def in_degree_centrality(G): return centrality -@not_implemented_for('undirected') +@not_implemented_for("undirected") def out_degree_centrality(G): """Compute the out-degree centrality for nodes. diff --git a/networkx/algorithms/centrality/dispersion.py b/networkx/algorithms/centrality/dispersion.py index 8bf06ab1..03fcd3ac 100644 --- a/networkx/algorithms/centrality/dispersion.py +++ b/networkx/algorithms/centrality/dispersion.py @@ -1,6 +1,6 @@ from itertools import combinations -__all__ = ['dispersion'] +__all__ = ["dispersion"] def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0): @@ -66,9 +66,9 @@ def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0): if normalized: if embededness + c != 0: - norm_disp = ((total + b)**alpha) / (embededness + c) + norm_disp = ((total + b) ** alpha) / (embededness + c) else: - norm_disp = (total + b)**alpha + norm_disp = (total + b) ** alpha dispersion = norm_disp else: diff --git a/networkx/algorithms/centrality/flow_matrix.py b/networkx/algorithms/centrality/flow_matrix.py index fb3ebd6b..dbf5e285 100644 --- a/networkx/algorithms/centrality/flow_matrix.py +++ b/networkx/algorithms/centrality/flow_matrix.py @@ -3,15 +3,19 @@ import networkx as nx -def flow_matrix_row(G, weight=None, dtype=float, solver='lu'): +def flow_matrix_row(G, weight=None, dtype=float, solver="lu"): # Generate a row of the current-flow matrix import numpy as np - solvername = {"full": FullInverseLaplacian, - "lu": SuperLUInverseLaplacian, - "cg": CGInverseLaplacian} + + solvername = { + "full": FullInverseLaplacian, + "lu": SuperLUInverseLaplacian, + "cg": CGInverseLaplacian, + } n = G.number_of_nodes() - L = laplacian_sparse_matrix(G, nodelist=range(n), weight=weight, - dtype=dtype, format='csc') + L = laplacian_sparse_matrix( + G, nodelist=range(n), weight=weight, dtype=dtype, format="csc" + ) C = solvername[solver](L, dtype=dtype) # initialize solver w = C.w # w is the Laplacian matrix width # row-by-row flow matrix @@ -33,6 +37,7 @@ class InverseLaplacian: def __init__(self, L, width=None, dtype=None): global np import numpy as np + (n, n) = L.shape self.dtype = dtype self.n = n @@ -91,6 +96,7 @@ class FullInverseLaplacian(InverseLaplacian): class SuperLUInverseLaplacian(InverseLaplacian): def init_solver(self, L): from scipy.sparse import linalg + self.lusolve = linalg.factorized(self.L1.tocsc()) def solve_inverse(self, r): @@ -108,6 +114,7 @@ class CGInverseLaplacian(InverseLaplacian): def init_solver(self, L): global linalg from scipy.sparse import linalg + ilu = linalg.spilu(self.L1.tocsc()) n = self.n - 1 self.M = linalg.LinearOperator(shape=(n, n), matvec=ilu.solve) @@ -124,12 +131,13 @@ class CGInverseLaplacian(InverseLaplacian): # graph laplacian, sparse version, will move to linalg/laplacianmatrix.py -def laplacian_sparse_matrix(G, nodelist=None, weight=None, dtype=None, - format='csr'): +def laplacian_sparse_matrix(G, nodelist=None, weight=None, dtype=None, format="csr"): import numpy as np import scipy.sparse - A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, - dtype=dtype, format=format) + + A = nx.to_scipy_sparse_matrix( + G, nodelist=nodelist, weight=weight, dtype=dtype, format=format + ) (n, n) = A.shape data = np.asarray(A.sum(axis=1).T) D = scipy.sparse.spdiags(data, 0, n, n, format=format) diff --git a/networkx/algorithms/centrality/group.py b/networkx/algorithms/centrality/group.py index 69d6f4d2..bd1d5f9e 100644 --- a/networkx/algorithms/centrality/group.py +++ b/networkx/algorithms/centrality/group.py @@ -6,11 +6,13 @@ import networkx as nx from networkx.utils.decorators import not_implemented_for -__all__ = ['group_betweenness_centrality', - 'group_closeness_centrality', - 'group_degree_centrality', - 'group_in_degree_centrality', - 'group_out_degree_centrality'] +__all__ = [ + "group_betweenness_centrality", + "group_closeness_centrality", + "group_degree_centrality", + "group_in_degree_centrality", + "group_out_degree_centrality", +] def group_betweenness_centrality(G, C, normalized=True, weight=None): @@ -96,16 +98,18 @@ def group_betweenness_centrality(G, C, normalized=True, weight=None): V = set(G) # set of nodes in G C = set(C) # set of nodes in C (group) if len(C - V) != 0: # element(s) of C not in V - raise nx.NodeNotFound('The node(s) ' + str(list(C - V)) + ' are not ' - 'in the graph.') + raise nx.NodeNotFound( + "The node(s) " + str(list(C - V)) + " are not " "in the graph." + ) V_C = V - C # set of nodes in V but not in C # accumulation for pair in combinations(V_C, 2): # (s, t) pairs of V_C try: paths = 0 paths_through_C = 0 - for path in nx.all_shortest_paths(G, source=pair[0], - target=pair[1], weight=weight): + for path in nx.all_shortest_paths( + G, source=pair[0], target=pair[1], weight=weight + ): if set(path) & C: paths_through_C += 1 paths += 1 @@ -206,8 +210,7 @@ def group_closeness_centrality(G, S, weight=None): V = set(G) # set of nodes in G S = set(S) # set of nodes in group S V_S = V - S # set of nodes in V but not S - shortest_path_lengths = nx.multi_source_dijkstra_path_length(G, S, - weight=weight) + shortest_path_lengths = nx.multi_source_dijkstra_path_length(G, S, weight=weight) # accumulation for v in V_S: try: @@ -266,13 +269,12 @@ def group_degree_centrality(G, S): Journal of Mathematical Sociology. 23(3): 181-201. 1999. http://www.analytictech.com/borgatti/group_centrality.htm """ - centrality = len(set().union(*list(set(G.neighbors(i)) - for i in S)) - set(S)) - centrality /= (len(G.nodes()) - len(S)) + centrality = len(set().union(*list(set(G.neighbors(i)) for i in S)) - set(S)) + centrality /= len(G.nodes()) - len(S) return centrality -@not_implemented_for('undirected') +@not_implemented_for("undirected") def group_in_degree_centrality(G, S): """Compute the group in-degree centrality for a group of nodes. @@ -318,7 +320,7 @@ def group_in_degree_centrality(G, S): return group_degree_centrality(G.reverse(), S) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def group_out_degree_centrality(G, S): """Compute the group out-degree centrality for a group of nodes. diff --git a/networkx/algorithms/centrality/harmonic.py b/networkx/algorithms/centrality/harmonic.py index dac81868..5b23210c 100644 --- a/networkx/algorithms/centrality/harmonic.py +++ b/networkx/algorithms/centrality/harmonic.py @@ -3,7 +3,7 @@ from functools import partial import networkx as nx -__all__ = ['harmonic_centrality'] +__all__ = ["harmonic_centrality"] def harmonic_centrality(G, nbunch=None, distance=None): @@ -57,5 +57,7 @@ def harmonic_centrality(G, nbunch=None, distance=None): if G.is_directed(): G = G.reverse() spl = partial(nx.shortest_path_length, G, weight=distance) - return {u: sum(1 / d if d > 0 else 0 for v, d in spl(source=u).items()) - for u in G.nbunch_iter(nbunch)} + return { + u: sum(1 / d if d > 0 else 0 for v, d in spl(source=u).items()) + for u in G.nbunch_iter(nbunch) + } diff --git a/networkx/algorithms/centrality/katz.py b/networkx/algorithms/centrality/katz.py index b88d8273..304c10c9 100644 --- a/networkx/algorithms/centrality/katz.py +++ b/networkx/algorithms/centrality/katz.py @@ -4,12 +4,20 @@ from math import sqrt import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['katz_centrality', 'katz_centrality_numpy'] - - -@not_implemented_for('multigraph') -def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, - nstart=None, normalized=True, weight=None): +__all__ = ["katz_centrality", "katz_centrality_numpy"] + + +@not_implemented_for("multigraph") +def katz_centrality( + G, + alpha=0.1, + beta=1.0, + max_iter=1000, + tol=1.0e-6, + nstart=None, + normalized=True, + weight=None, +): r"""Compute the Katz centrality for the nodes of the graph G. Katz centrality computes the centrality for a node based on the centrality @@ -151,8 +159,9 @@ def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, except (TypeError, ValueError, AttributeError) as e: b = beta if set(beta) != set(G): - raise nx.NetworkXError('beta dictionary ' - 'must have a value for every node') from e + raise nx.NetworkXError( + "beta dictionary " "must have a value for every node" + ) from e # make up to max_iter iterations for i in range(max_iter): @@ -171,7 +180,7 @@ def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, if normalized: # normalize vector try: - s = 1.0 / sqrt(sum(v**2 for v in x.values())) + s = 1.0 / sqrt(sum(v ** 2 for v in x.values())) # this should never be zero? except ZeroDivisionError: s = 1.0 @@ -183,9 +192,8 @@ def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, raise nx.PowerIterationFailedConvergence(max_iter) -@not_implemented_for('multigraph') -def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, - weight=None): +@not_implemented_for("multigraph") +def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None): r"""Compute the Katz centrality for the graph G. Katz centrality computes the centrality for a node based on the centrality @@ -297,21 +305,22 @@ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, try: import numpy as np except ImportError as e: - raise ImportError('Requires NumPy: http://numpy.org/') from e + raise ImportError("Requires NumPy: http://numpy.org/") from e if len(G) == 0: return {} try: nodelist = beta.keys() if set(nodelist) != set(G): - raise nx.NetworkXError('beta dictionary ' - 'must have a value for every node') + raise nx.NetworkXError( + "beta dictionary " "must have a value for every node" + ) b = np.array(list(beta.values()), dtype=float) except AttributeError: nodelist = list(G) try: b = np.ones((len(nodelist), 1)) * float(beta) except (TypeError, ValueError, AttributeError) as e: - raise nx.NetworkXError('beta must be a number') from e + raise nx.NetworkXError("beta must be a number") from e A = nx.adj_matrix(G, nodelist=nodelist, weight=weight).todense().T n = A.shape[0] diff --git a/networkx/algorithms/centrality/load.py b/networkx/algorithms/centrality/load.py index 3f07edd2..6c50c68e 100644 --- a/networkx/algorithms/centrality/load.py +++ b/networkx/algorithms/centrality/load.py @@ -3,11 +3,10 @@ from operator import itemgetter import networkx as nx -__all__ = ['load_centrality', 'edge_load_centrality'] +__all__ = ["load_centrality", "edge_load_centrality"] -def newman_betweenness_centrality(G, v=None, cutoff=None, - normalized=True, weight=None): +def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None): """Compute load centrality for nodes. The load centrality of a node is the fraction of all shortest @@ -55,7 +54,7 @@ def newman_betweenness_centrality(G, v=None, cutoff=None, Physical Review Letters 87(27):1–4, 2001. http://phya.snu.ac.kr/~dkim/PRL87278701.pdf """ - if v is not None: # only one node + if v is not None: # only one node betweenness = 0.0 for source in G: ubetween = _node_betweenness(G, source, cutoff, False, weight) @@ -82,8 +81,7 @@ def newman_betweenness_centrality(G, v=None, cutoff=None, return betweenness # all nodes -def _node_betweenness(G, source, cutoff=False, normalized=True, - weight=None): +def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None): """Node betweenness_centrality helper: See betweenness_centrality for what you probably want. @@ -100,11 +98,9 @@ def _node_betweenness(G, source, cutoff=False, normalized=True, """ # get the predecessor and path length data if weight is None: - (pred, length) = nx.predecessor(G, source, cutoff=cutoff, - return_seen=True) + (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) else: - (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, - cutoff, weight) + (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight) # order the nodes by path length onodes = [(l, vert) for (vert, l) in length.items()] @@ -118,9 +114,9 @@ def _node_betweenness(G, source, cutoff=False, normalized=True, v = onodes.pop() if v in pred: num_paths = len(pred[v]) # Discount betweenness if more than - for x in pred[v]: # one shortest path. + for x in pred[v]: # one shortest path. if x == source: # stop if hit source because all remaining v - break # also have pred[v]==[source] + break # also have pred[v]==[source] between[x] += between[v] / float(num_paths) # remove source for v in between: @@ -186,7 +182,7 @@ def _edge_betweenness(G, source, nodes=None, cutoff=False): between[(u, v)] = 1.0 between[(v, u)] = 1.0 - while onodes: # work through all paths + while onodes: # work through all paths v = onodes.pop() if v in pred: # Discount betweenness if more than one shortest path. diff --git a/networkx/algorithms/centrality/percolation.py b/networkx/algorithms/centrality/percolation.py index 9488b745..5867b54a 100644 --- a/networkx/algorithms/centrality/percolation.py +++ b/networkx/algorithms/centrality/percolation.py @@ -2,16 +2,17 @@ import networkx as nx -from networkx.algorithms.centrality.betweenness import\ - _single_source_dijkstra_path_basic as dijkstra -from networkx.algorithms.centrality.betweenness import\ - _single_source_shortest_path_basic as shortest_path +from networkx.algorithms.centrality.betweenness import ( + _single_source_dijkstra_path_basic as dijkstra, +) +from networkx.algorithms.centrality.betweenness import ( + _single_source_shortest_path_basic as shortest_path, +) -__all__ = ['percolation_centrality'] +__all__ = ["percolation_centrality"] -def percolation_centrality(G, attribute='percolation', - states=None, weight=None): +def percolation_centrality(G, attribute="percolation", states=None, weight=None): r"""Compute the percolation centrality for nodes. Percolation centrality of a node $v$, at a given time, is defined @@ -96,8 +97,9 @@ def percolation_centrality(G, attribute='percolation', else: # use Dijkstra's algorithm S, P, sigma = dijkstra(G, s, weight) # accumulation - percolation = _accumulate_percolation(percolation, G, S, P, sigma, s, - states, p_sigma_x_t) + percolation = _accumulate_percolation( + percolation, G, S, P, sigma, s, states, p_sigma_x_t + ) n = len(G) @@ -107,8 +109,7 @@ def percolation_centrality(G, attribute='percolation', return percolation -def _accumulate_percolation(percolation, G, S, P, sigma, s, - states, p_sigma_x_t): +def _accumulate_percolation(percolation, G, S, P, sigma, s, states, p_sigma_x_t): delta = dict.fromkeys(S, 0) while S: w = S.pop() diff --git a/networkx/algorithms/centrality/reaching.py b/networkx/algorithms/centrality/reaching.py index 488bf480..e5969778 100644 --- a/networkx/algorithms/centrality/reaching.py +++ b/networkx/algorithms/centrality/reaching.py @@ -4,7 +4,7 @@ import networkx as nx from networkx.utils import pairwise -__all__ = ['global_reaching_centrality', 'local_reaching_centrality'] +__all__ = ["global_reaching_centrality", "local_reaching_centrality"] def _average_weight(G, path, weight=None): @@ -86,10 +86,10 @@ def global_reaching_centrality(G, weight=None, normalized=True): https://doi.org/10.1371/journal.pone.0033799 """ if nx.is_negatively_weighted(G, weight=weight): - raise nx.NetworkXError('edge weights must be positive') + raise nx.NetworkXError("edge weights must be positive") total_weight = G.size(weight=weight) if total_weight <= 0: - raise nx.NetworkXError('Size of G must be positive') + raise nx.NetworkXError("Size of G must be positive") # If provided, weights must be interpreted as connection strength # (so higher weights are more likely to be chosen). However, the @@ -101,16 +101,20 @@ def global_reaching_centrality(G, weight=None, normalized=True): # If weight is None, we leave it as-is so that the shortest path # algorithm can use a faster, unweighted algorithm. if weight is not None: - def as_distance(u, v, d): return total_weight / d.get(weight, 1) + + def as_distance(u, v, d): + return total_weight / d.get(weight, 1) + shortest_paths = nx.shortest_path(G, weight=as_distance) else: shortest_paths = nx.shortest_path(G) centrality = local_reaching_centrality # TODO This can be trivially parallelized. - lrc = [centrality(G, node, paths=paths, weight=weight, - normalized=normalized) - for node, paths in shortest_paths.items()] + lrc = [ + centrality(G, node, paths=paths, weight=weight, normalized=normalized) + for node, paths in shortest_paths.items() + ] max_lrc = max(lrc) return sum(max_lrc - c for c in lrc) / (len(G) - 1) @@ -177,13 +181,15 @@ def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True): """ if paths is None: if nx.is_negatively_weighted(G, weight=weight): - raise nx.NetworkXError('edge weights must be positive') + raise nx.NetworkXError("edge weights must be positive") total_weight = G.size(weight=weight) if total_weight <= 0: - raise nx.NetworkXError('Size of G must be positive') + raise nx.NetworkXError("Size of G must be positive") if weight is not None: # Interpret weights as lengths. - def as_distance(u, v, d): return total_weight / d.get(weight, 1) + def as_distance(u, v, d): + return total_weight / d.get(weight, 1) + paths = nx.shortest_path(G, source=v, weight=as_distance) else: paths = nx.shortest_path(G, source=v) diff --git a/networkx/algorithms/centrality/second_order.py b/networkx/algorithms/centrality/second_order.py index 7b8b711e..513b38b4 100644 --- a/networkx/algorithms/centrality/second_order.py +++ b/networkx/algorithms/centrality/second_order.py @@ -1,4 +1,4 @@ -'''Copyright (c) 2015 – Thomson Licensing, SAS +"""Copyright (c) 2015 – Thomson Licensing, SAS Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the @@ -28,18 +28,18 @@ BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -''' +""" import networkx as nx from networkx.utils import not_implemented_for # Authors: Erwan Le Merrer (erwan.lemerrer@technicolor.com) -''' Second order centrality measure.''' +""" Second order centrality measure.""" -__all__ = ['second_order_centrality'] +__all__ = ["second_order_centrality"] -@not_implemented_for('directed') +@not_implemented_for("directed") def second_order_centrality(G): """Compute the second order centrality for nodes of G. @@ -99,7 +99,7 @@ def second_order_centrality(G): try: import numpy as np except ImportError as e: - raise ImportError('Requires NumPy: http://numpy.org/') from e + raise ImportError("Requires NumPy: http://numpy.org/") from e n = len(G) @@ -107,16 +107,16 @@ def second_order_centrality(G): raise nx.NetworkXException("Empty graph.") if not nx.is_connected(G): raise nx.NetworkXException("Non connected graph.") - if any(d.get('weight', 0) < 0 for u, v, d in G.edges(data=True)): + if any(d.get("weight", 0) < 0 for u, v, d in G.edges(data=True)): raise nx.NetworkXException("Graph has negative edge weights.") # balancing G for Metropolis-Hastings random walks G = nx.DiGraph(G) - in_deg = dict(G.in_degree(weight='weight')) + in_deg = dict(G.in_degree(weight="weight")) d_max = max(in_deg.values()) for i, deg in in_deg.items(): if deg < d_max: - G.add_edge(i, i, weight=d_max-deg) + G.add_edge(i, i, weight=d_max - deg) P = nx.to_numpy_matrix(G) P = P / P.sum(axis=1) # to transition probability matrix @@ -129,9 +129,10 @@ def second_order_centrality(G): M = np.empty([n, n]) for i in range(n): - M[:, i] = np.linalg.solve(np.identity(n) - _Qj(P, i), - np.ones([n, 1])[:, 0]) # eq 3 + M[:, i] = np.linalg.solve( + np.identity(n) - _Qj(P, i), np.ones([n, 1])[:, 0] + ) # eq 3 - return dict(zip(G.nodes, - [np.sqrt(2*np.sum(M[:, i])-n*(n+1)) for i in range(n)] - )) # eq 6 + return dict( + zip(G.nodes, [np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1)) for i in range(n)]) + ) # eq 6 diff --git a/networkx/algorithms/centrality/tests/test_betweenness_centrality.py b/networkx/algorithms/centrality/tests/test_betweenness_centrality.py index ab0b2a24..f827427b 100644 --- a/networkx/algorithms/centrality/tests/test_betweenness_centrality.py +++ b/networkx/algorithms/centrality/tests/test_betweenness_centrality.py @@ -21,9 +21,7 @@ class TestBetweennessCentrality: def test_K5(self): """Betweenness centrality: K5""" G = nx.complete_graph(5) - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -31,18 +29,12 @@ class TestBetweennessCentrality: def test_K5_endpoints(self): """Betweenness centrality: K5 endpoints""" G = nx.complete_graph(5) - b = nx.betweenness_centrality(G, - weight=None, - normalized=False, - endpoints=True) + b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) b_answer = {0: 4.0, 1: 4.0, 2: 4.0, 3: 4.0, 4: 4.0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) # normalized = True case - b = nx.betweenness_centrality(G, - weight=None, - normalized=True, - endpoints=True) + b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True) b_answer = {0: 0.4, 1: 0.4, 2: 0.4, 3: 0.4, 4: 0.4} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -50,9 +42,7 @@ class TestBetweennessCentrality: def test_P3_normalized(self): """Betweenness centrality: P3 normalized""" G = nx.path_graph(3) - b = nx.betweenness_centrality(G, - weight=None, - normalized=True) + b = nx.betweenness_centrality(G, weight=None, normalized=True) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -61,27 +51,17 @@ class TestBetweennessCentrality: """Betweenness centrality: P3""" G = nx.path_graph(3) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) def test_sample_from_P3(self): G = nx.path_graph(3) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} - b = nx.betweenness_centrality(G, - k=3, - weight=None, - normalized=False, - seed=1) + b = nx.betweenness_centrality(G, k=3, weight=None, normalized=False, seed=1) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) - b = nx.betweenness_centrality(G, - k=2, - weight=None, - normalized=False, - seed=1) + b = nx.betweenness_centrality(G, k=2, weight=None, normalized=False, seed=1) # python versions give different results with same seed b_approx1 = {0: 0.0, 1: 1.5, 2: 0.0} b_approx2 = {0: 0.0, 1: 0.75, 2: 0.0} @@ -92,171 +72,175 @@ class TestBetweennessCentrality: """Betweenness centrality: P3 endpoints""" G = nx.path_graph(3) b_answer = {0: 2.0, 1: 3.0, 2: 2.0} - b = nx.betweenness_centrality(G, - weight=None, - normalized=False, - endpoints=True) + b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) # normalized = True case - b_answer = {0: 2/3, 1: 1.0, 2: 2/3} - b = nx.betweenness_centrality(G, - weight=None, - normalized=True, - endpoints=True) + b_answer = {0: 2 / 3, 1: 1.0, 2: 2 / 3} + b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) def test_krackhardt_kite_graph(self): """Betweenness centrality: Krackhardt kite graph""" G = nx.krackhardt_kite_graph() - b_answer = {0: 1.667, 1: 1.667, 2: 0.000, 3: 7.333, 4: 0.000, - 5: 16.667, 6: 16.667, 7: 28.000, 8: 16.000, 9: 0.000} + b_answer = { + 0: 1.667, + 1: 1.667, + 2: 0.000, + 3: 7.333, + 4: 0.000, + 5: 16.667, + 6: 16.667, + 7: 28.000, + 8: 16.000, + 9: 0.000, + } for b in b_answer: b_answer[b] /= 2 - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) def test_krackhardt_kite_graph_normalized(self): """Betweenness centrality: Krackhardt kite graph normalized""" G = nx.krackhardt_kite_graph() - b_answer = {0: 0.023, 1: 0.023, 2: 0.000, 3: 0.102, 4: 0.000, - 5: 0.231, 6: 0.231, 7: 0.389, 8: 0.222, 9: 0.000} - b = nx.betweenness_centrality(G, - weight=None, - normalized=True) + b_answer = { + 0: 0.023, + 1: 0.023, + 2: 0.000, + 3: 0.102, + 4: 0.000, + 5: 0.231, + 6: 0.231, + 7: 0.389, + 8: 0.222, + 9: 0.000, + } + b = nx.betweenness_centrality(G, weight=None, normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) def test_florentine_families_graph(self): """Betweenness centrality: Florentine families graph""" G = nx.florentine_families_graph() - b_answer =\ - {'Acciaiuoli': 0.000, - 'Albizzi': 0.212, - 'Barbadori': 0.093, - 'Bischeri': 0.104, - 'Castellani': 0.055, - 'Ginori': 0.000, - 'Guadagni': 0.255, - 'Lamberteschi': 0.000, - 'Medici': 0.522, - 'Pazzi': 0.000, - 'Peruzzi': 0.022, - 'Ridolfi': 0.114, - 'Salviati': 0.143, - 'Strozzi': 0.103, - 'Tornabuoni': 0.092} - - b = nx.betweenness_centrality(G, - weight=None, - normalized=True) + b_answer = { + "Acciaiuoli": 0.000, + "Albizzi": 0.212, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.255, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.114, + "Salviati": 0.143, + "Strozzi": 0.103, + "Tornabuoni": 0.092, + } + + b = nx.betweenness_centrality(G, weight=None, normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) def test_les_miserables_graph(self): """Betweenness centrality: Les Miserables graph""" G = nx.les_miserables_graph() - b_answer = \ - {'Napoleon': 0.000, - 'Myriel': 0.177, - 'MlleBaptistine': 0.000, - 'MmeMagloire': 0.000, - 'CountessDeLo': 0.000, - 'Geborand': 0.000, - 'Champtercier': 0.000, - 'Cravatte': 0.000, - 'Count': 0.000, - 'OldMan': 0.000, - 'Valjean': 0.570, - 'Labarre': 0.000, - 'Marguerite': 0.000, - 'MmeDeR': 0.000, - 'Isabeau': 0.000, - 'Gervais': 0.000, - 'Listolier': 0.000, - 'Tholomyes': 0.041, - 'Fameuil': 0.000, - 'Blacheville': 0.000, - 'Favourite': 0.000, - 'Dahlia': 0.000, - 'Zephine': 0.000, - 'Fantine': 0.130, - 'MmeThenardier': 0.029, - 'Thenardier': 0.075, - 'Cosette': 0.024, - 'Javert': 0.054, - 'Fauchelevent': 0.026, - 'Bamatabois': 0.008, - 'Perpetue': 0.000, - 'Simplice': 0.009, - 'Scaufflaire': 0.000, - 'Woman1': 0.000, - 'Judge': 0.000, - 'Champmathieu': 0.000, - 'Brevet': 0.000, - 'Chenildieu': 0.000, - 'Cochepaille': 0.000, - 'Pontmercy': 0.007, - 'Boulatruelle': 0.000, - 'Eponine': 0.011, - 'Anzelma': 0.000, - 'Woman2': 0.000, - 'MotherInnocent': 0.000, - 'Gribier': 0.000, - 'MmeBurgon': 0.026, - 'Jondrette': 0.000, - 'Gavroche': 0.165, - 'Gillenormand': 0.020, - 'Magnon': 0.000, - 'MlleGillenormand': 0.048, - 'MmePontmercy': 0.000, - 'MlleVaubois': 0.000, - 'LtGillenormand': 0.000, - 'Marius': 0.132, - 'BaronessT': 0.000, - 'Mabeuf': 0.028, - 'Enjolras': 0.043, - 'Combeferre': 0.001, - 'Prouvaire': 0.000, - 'Feuilly': 0.001, - 'Courfeyrac': 0.005, - 'Bahorel': 0.002, - 'Bossuet': 0.031, - 'Joly': 0.002, - 'Grantaire': 0.000, - 'MotherPlutarch': 0.000, - 'Gueulemer': 0.005, - 'Babet': 0.005, - 'Claquesous': 0.005, - 'Montparnasse': 0.004, - 'Toussaint': 0.000, - 'Child1': 0.000, - 'Child2': 0.000, - 'Brujon': 0.000, - 'MmeHucheloup': 0.000} - - b = nx.betweenness_centrality(G, - weight=None, - normalized=True) + b_answer = { + "Napoleon": 0.000, + "Myriel": 0.177, + "MlleBaptistine": 0.000, + "MmeMagloire": 0.000, + "CountessDeLo": 0.000, + "Geborand": 0.000, + "Champtercier": 0.000, + "Cravatte": 0.000, + "Count": 0.000, + "OldMan": 0.000, + "Valjean": 0.570, + "Labarre": 0.000, + "Marguerite": 0.000, + "MmeDeR": 0.000, + "Isabeau": 0.000, + "Gervais": 0.000, + "Listolier": 0.000, + "Tholomyes": 0.041, + "Fameuil": 0.000, + "Blacheville": 0.000, + "Favourite": 0.000, + "Dahlia": 0.000, + "Zephine": 0.000, + "Fantine": 0.130, + "MmeThenardier": 0.029, + "Thenardier": 0.075, + "Cosette": 0.024, + "Javert": 0.054, + "Fauchelevent": 0.026, + "Bamatabois": 0.008, + "Perpetue": 0.000, + "Simplice": 0.009, + "Scaufflaire": 0.000, + "Woman1": 0.000, + "Judge": 0.000, + "Champmathieu": 0.000, + "Brevet": 0.000, + "Chenildieu": 0.000, + "Cochepaille": 0.000, + "Pontmercy": 0.007, + "Boulatruelle": 0.000, + "Eponine": 0.011, + "Anzelma": 0.000, + "Woman2": 0.000, + "MotherInnocent": 0.000, + "Gribier": 0.000, + "MmeBurgon": 0.026, + "Jondrette": 0.000, + "Gavroche": 0.165, + "Gillenormand": 0.020, + "Magnon": 0.000, + "MlleGillenormand": 0.048, + "MmePontmercy": 0.000, + "MlleVaubois": 0.000, + "LtGillenormand": 0.000, + "Marius": 0.132, + "BaronessT": 0.000, + "Mabeuf": 0.028, + "Enjolras": 0.043, + "Combeferre": 0.001, + "Prouvaire": 0.000, + "Feuilly": 0.001, + "Courfeyrac": 0.005, + "Bahorel": 0.002, + "Bossuet": 0.031, + "Joly": 0.002, + "Grantaire": 0.000, + "MotherPlutarch": 0.000, + "Gueulemer": 0.005, + "Babet": 0.005, + "Claquesous": 0.005, + "Montparnasse": 0.004, + "Toussaint": 0.000, + "Child1": 0.000, + "Child2": 0.000, + "Brujon": 0.000, + "MmeHucheloup": 0.000, + } + + b = nx.betweenness_centrality(G, weight=None, normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) def test_ladder_graph(self): """Betweenness centrality: Ladder graph""" G = nx.Graph() # ladder_graph(3) - G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), - (2, 4), (4, 5), (3, 5)]) - b_answer = {0: 1.667, 1: 1.667, 2: 6.667, - 3: 6.667, 4: 1.667, 5: 1.667} + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) + b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667} for b in b_answer: b_answer[b] /= 2 - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) @@ -266,9 +250,7 @@ class TestBetweennessCentrality: nx.add_path(G, [0, 1, 2]) nx.add_path(G, [3, 4, 5, 6]) b_answer = {0: 0, 1: 1, 2: 0, 3: 0, 4: 2, 5: 2, 6: 0} - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -278,17 +260,11 @@ class TestBetweennessCentrality: nx.add_path(G, [0, 1, 2]) nx.add_path(G, [3, 4, 5, 6]) b_answer = {0: 2, 1: 3, 2: 2, 3: 3, 4: 5, 5: 5, 6: 3} - b = nx.betweenness_centrality(G, - weight=None, - normalized=False, - endpoints=True) + b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) # normalized = True case - b = nx.betweenness_centrality(G, - weight=None, - normalized=True, - endpoints=True) + b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n] / 21) @@ -296,9 +272,7 @@ class TestBetweennessCentrality: """Betweenness centrality: directed path""" G = nx.DiGraph() nx.add_path(G, [0, 1, 2]) - b = nx.betweenness_centrality(G, - weight=None, - normalized=False) + b = nx.betweenness_centrality(G, weight=None, normalized=False) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -307,9 +281,7 @@ class TestBetweennessCentrality: """Betweenness centrality: directed path normalized""" G = nx.DiGraph() nx.add_path(G, [0, 1, 2]) - b = nx.betweenness_centrality(G, - weight=None, - normalized=True) + b = nx.betweenness_centrality(G, weight=None, normalized=True) b_answer = {0: 0.0, 1: 0.5, 2: 0.0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -319,9 +291,7 @@ class TestWeightedBetweennessCentrality: def test_K5(self): """Weighted betweenness centrality: K5""" G = nx.complete_graph(5) - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -329,9 +299,7 @@ class TestWeightedBetweennessCentrality: def test_P3_normalized(self): """Weighted betweenness centrality: P3 normalized""" G = nx.path_graph(3) - b = nx.betweenness_centrality(G, - weight='weight', - normalized=True) + b = nx.betweenness_centrality(G, weight="weight", normalized=True) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -340,23 +308,29 @@ class TestWeightedBetweennessCentrality: """Weighted betweenness centrality: P3""" G = nx.path_graph(3) b_answer = {0: 0.0, 1: 1.0, 2: 0.0} - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) def test_krackhardt_kite_graph(self): """Weighted betweenness centrality: Krackhardt kite graph""" G = nx.krackhardt_kite_graph() - b_answer = {0: 1.667, 1: 1.667, 2: 0.000, 3: 7.333, 4: 0.000, - 5: 16.667, 6: 16.667, 7: 28.000, 8: 16.000, 9: 0.000} + b_answer = { + 0: 1.667, + 1: 1.667, + 2: 0.000, + 3: 7.333, + 4: 0.000, + 5: 16.667, + 6: 16.667, + 7: 28.000, + 8: 16.000, + 9: 0.000, + } for b in b_answer: b_answer[b] /= 2 - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) @@ -366,11 +340,19 @@ class TestWeightedBetweennessCentrality: Krackhardt kite graph normalized """ G = nx.krackhardt_kite_graph() - b_answer = {0: 0.023, 1: 0.023, 2: 0.000, 3: 0.102, 4: 0.000, - 5: 0.231, 6: 0.231, 7: 0.389, 8: 0.222, 9: 0.000} - b = nx.betweenness_centrality(G, - weight='weight', - normalized=True) + b_answer = { + 0: 0.023, + 1: 0.023, + 2: 0.000, + 3: 0.102, + 4: 0.000, + 5: 0.231, + 6: 0.231, + 7: 0.389, + 8: 0.222, + 9: 0.000, + } + b = nx.betweenness_centrality(G, weight="weight", normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) @@ -379,129 +361,123 @@ class TestWeightedBetweennessCentrality: """Weighted betweenness centrality: Florentine families graph""" G = nx.florentine_families_graph() - b_answer = \ - {'Acciaiuoli': 0.000, - 'Albizzi': 0.212, - 'Barbadori': 0.093, - 'Bischeri': 0.104, - 'Castellani': 0.055, - 'Ginori': 0.000, - 'Guadagni': 0.255, - 'Lamberteschi': 0.000, - 'Medici': 0.522, - 'Pazzi': 0.000, - 'Peruzzi': 0.022, - 'Ridolfi': 0.114, - 'Salviati': 0.143, - 'Strozzi': 0.103, - 'Tornabuoni': 0.092} - - b = nx.betweenness_centrality(G, - weight='weight', - normalized=True) + b_answer = { + "Acciaiuoli": 0.000, + "Albizzi": 0.212, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.255, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.114, + "Salviati": 0.143, + "Strozzi": 0.103, + "Tornabuoni": 0.092, + } + + b = nx.betweenness_centrality(G, weight="weight", normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) def test_les_miserables_graph(self): """Weighted betweenness centrality: Les Miserables graph""" G = nx.les_miserables_graph() - b_answer = \ - {'Napoleon': 0.000, - 'Myriel': 0.177, - 'MlleBaptistine': 0.000, - 'MmeMagloire': 0.000, - 'CountessDeLo': 0.000, - 'Geborand': 0.000, - 'Champtercier': 0.000, - 'Cravatte': 0.000, - 'Count': 0.000, - 'OldMan': 0.000, - 'Valjean': 0.454, - 'Labarre': 0.000, - 'Marguerite': 0.009, - 'MmeDeR': 0.000, - 'Isabeau': 0.000, - 'Gervais': 0.000, - 'Listolier': 0.000, - 'Tholomyes': 0.066, - 'Fameuil': 0.000, - 'Blacheville': 0.000, - 'Favourite': 0.000, - 'Dahlia': 0.000, - 'Zephine': 0.000, - 'Fantine': 0.114, - 'MmeThenardier': 0.046, - 'Thenardier': 0.129, - 'Cosette': 0.075, - 'Javert': 0.193, - 'Fauchelevent': 0.026, - 'Bamatabois': 0.080, - 'Perpetue': 0.000, - 'Simplice': 0.001, - 'Scaufflaire': 0.000, - 'Woman1': 0.000, - 'Judge': 0.000, - 'Champmathieu': 0.000, - 'Brevet': 0.000, - 'Chenildieu': 0.000, - 'Cochepaille': 0.000, - 'Pontmercy': 0.023, - 'Boulatruelle': 0.000, - 'Eponine': 0.023, - 'Anzelma': 0.000, - 'Woman2': 0.000, - 'MotherInnocent': 0.000, - 'Gribier': 0.000, - 'MmeBurgon': 0.026, - 'Jondrette': 0.000, - 'Gavroche': 0.285, - 'Gillenormand': 0.024, - 'Magnon': 0.005, - 'MlleGillenormand': 0.036, - 'MmePontmercy': 0.005, - 'MlleVaubois': 0.000, - 'LtGillenormand': 0.015, - 'Marius': 0.072, - 'BaronessT': 0.004, - 'Mabeuf': 0.089, - 'Enjolras': 0.003, - 'Combeferre': 0.000, - 'Prouvaire': 0.000, - 'Feuilly': 0.004, - 'Courfeyrac': 0.001, - 'Bahorel': 0.007, - 'Bossuet': 0.028, - 'Joly': 0.000, - 'Grantaire': 0.036, - 'MotherPlutarch': 0.000, - 'Gueulemer': 0.025, - 'Babet': 0.015, - 'Claquesous': 0.042, - 'Montparnasse': 0.050, - 'Toussaint': 0.011, - 'Child1': 0.000, - 'Child2': 0.000, - 'Brujon': 0.002, - 'MmeHucheloup': 0.034} - - b = nx.betweenness_centrality(G, - weight='weight', - normalized=True) + b_answer = { + "Napoleon": 0.000, + "Myriel": 0.177, + "MlleBaptistine": 0.000, + "MmeMagloire": 0.000, + "CountessDeLo": 0.000, + "Geborand": 0.000, + "Champtercier": 0.000, + "Cravatte": 0.000, + "Count": 0.000, + "OldMan": 0.000, + "Valjean": 0.454, + "Labarre": 0.000, + "Marguerite": 0.009, + "MmeDeR": 0.000, + "Isabeau": 0.000, + "Gervais": 0.000, + "Listolier": 0.000, + "Tholomyes": 0.066, + "Fameuil": 0.000, + "Blacheville": 0.000, + "Favourite": 0.000, + "Dahlia": 0.000, + "Zephine": 0.000, + "Fantine": 0.114, + "MmeThenardier": 0.046, + "Thenardier": 0.129, + "Cosette": 0.075, + "Javert": 0.193, + "Fauchelevent": 0.026, + "Bamatabois": 0.080, + "Perpetue": 0.000, + "Simplice": 0.001, + "Scaufflaire": 0.000, + "Woman1": 0.000, + "Judge": 0.000, + "Champmathieu": 0.000, + "Brevet": 0.000, + "Chenildieu": 0.000, + "Cochepaille": 0.000, + "Pontmercy": 0.023, + "Boulatruelle": 0.000, + "Eponine": 0.023, + "Anzelma": 0.000, + "Woman2": 0.000, + "MotherInnocent": 0.000, + "Gribier": 0.000, + "MmeBurgon": 0.026, + "Jondrette": 0.000, + "Gavroche": 0.285, + "Gillenormand": 0.024, + "Magnon": 0.005, + "MlleGillenormand": 0.036, + "MmePontmercy": 0.005, + "MlleVaubois": 0.000, + "LtGillenormand": 0.015, + "Marius": 0.072, + "BaronessT": 0.004, + "Mabeuf": 0.089, + "Enjolras": 0.003, + "Combeferre": 0.000, + "Prouvaire": 0.000, + "Feuilly": 0.004, + "Courfeyrac": 0.001, + "Bahorel": 0.007, + "Bossuet": 0.028, + "Joly": 0.000, + "Grantaire": 0.036, + "MotherPlutarch": 0.000, + "Gueulemer": 0.025, + "Babet": 0.015, + "Claquesous": 0.042, + "Montparnasse": 0.050, + "Toussaint": 0.011, + "Child1": 0.000, + "Child2": 0.000, + "Brujon": 0.002, + "MmeHucheloup": 0.034, + } + + b = nx.betweenness_centrality(G, weight="weight", normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) def test_ladder_graph(self): """Weighted betweenness centrality: Ladder graph""" G = nx.Graph() # ladder_graph(3) - G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), - (2, 4), (4, 5), (3, 5)]) - b_answer = {0: 1.667, 1: 1.667, 2: 6.667, - 3: 6.667, 4: 1.667, 5: 1.667} + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) + b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667} for b in b_answer: b_answer[b] /= 2 - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=3) @@ -509,26 +485,31 @@ class TestWeightedBetweennessCentrality: """Weighted betweenness centrality: G""" G = weighted_G() b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0} - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) def test_G2(self): """Weighted betweenness centrality: G2""" G = nx.DiGraph() - G.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) - - b_answer = {'y': 5.0, 'x': 5.0, 's': 4.0, 'u': 2.0, 'v': 2.0} - - b = nx.betweenness_centrality(G, - weight='weight', - normalized=False) + G.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + + b_answer = {"y": 5.0, "x": 5.0, "s": 4.0, "u": 2.0, "v": 2.0} + + b = nx.betweenness_centrality(G, weight="weight", normalized=False) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -578,8 +559,7 @@ class TestEdgeBetweennessCentrality: """Edge betweenness centrality: balanced tree""" G = nx.balanced_tree(r=2, h=2) b = nx.edge_betweenness_centrality(G, weight=None, normalized=False) - b_answer = {(0, 1): 12, (0, 2): 12, - (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} + b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) @@ -588,7 +568,7 @@ class TestWeightedEdgeBetweennessCentrality: def test_K5(self): """Edge betweenness centrality: K5""" G = nx.complete_graph(5) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) b_answer = dict.fromkeys(G.edges(), 1) for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) @@ -596,7 +576,7 @@ class TestWeightedEdgeBetweennessCentrality: def test_C4(self): """Edge betweenness centrality: C4""" G = nx.cycle_graph(4) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2} for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) @@ -604,7 +584,7 @@ class TestWeightedEdgeBetweennessCentrality: def test_P4(self): """Edge betweenness centrality: P4""" G = nx.path_graph(4) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3} for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) @@ -612,47 +592,66 @@ class TestWeightedEdgeBetweennessCentrality: def test_balanced_tree(self): """Edge betweenness centrality: balanced tree""" G = nx.balanced_tree(r=2, h=2) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) - b_answer = {(0, 1): 12, (0, 2): 12, - (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) def test_weighted_graph(self): - eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3), - (0, 4, 2), (1, 2, 4), (1, 3, 1), - (1, 4, 3), (2, 4, 5), (3, 4, 4)] + eList = [ + (0, 1, 5), + (0, 2, 4), + (0, 3, 3), + (0, 4, 2), + (1, 2, 4), + (1, 3, 1), + (1, 4, 3), + (2, 4, 5), + (3, 4, 4), + ] G = nx.Graph() G.add_weighted_edges_from(eList) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) - b_answer = {(0, 1): 0.0, - (0, 2): 1.0, - (0, 3): 2.0, - (0, 4): 1.0, - (1, 2): 2.0, - (1, 3): 3.5, - (1, 4): 1.5, - (2, 4): 1.0, - (3, 4): 0.5} + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = { + (0, 1): 0.0, + (0, 2): 1.0, + (0, 3): 2.0, + (0, 4): 1.0, + (1, 2): 2.0, + (1, 3): 3.5, + (1, 4): 1.5, + (2, 4): 1.0, + (3, 4): 0.5, + } for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) def test_normalized_weighted_graph(self): - eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3), - (0, 4, 2), (1, 2, 4), (1, 3, 1), - (1, 4, 3), (2, 4, 5), (3, 4, 4)] + eList = [ + (0, 1, 5), + (0, 2, 4), + (0, 3, 3), + (0, 4, 2), + (1, 2, 4), + (1, 3, 1), + (1, 4, 3), + (2, 4, 5), + (3, 4, 4), + ] G = nx.Graph() G.add_weighted_edges_from(eList) - b = nx.edge_betweenness_centrality(G, weight='weight', normalized=True) - b_answer = {(0, 1): 0.0, - (0, 2): 1.0, - (0, 3): 2.0, - (0, 4): 1.0, - (1, 2): 2.0, - (1, 3): 3.5, - (1, 4): 1.5, - (2, 4): 1.0, - (3, 4): 0.5} + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=True) + b_answer = { + (0, 1): 0.0, + (0, 2): 1.0, + (0, 3): 2.0, + (0, 4): 1.0, + (1, 2): 2.0, + (1, 3): 3.5, + (1, 4): 1.5, + (2, 4): 1.0, + (3, 4): 0.5, + } norm = len(G) * (len(G) - 1) / 2 for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n] / norm) diff --git a/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py b/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py index b91a39e6..9c770fe0 100644 --- a/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py +++ b/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py @@ -3,12 +3,12 @@ from networkx.testing import almost_equal class TestSubsetBetweennessCentrality: - def test_K5(self): """Betweenness Centrality Subset: K5""" G = nx.complete_graph(5) - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[1, 3], - weight=None) + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[1, 3], weight=None + ) b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -18,8 +18,7 @@ class TestSubsetBetweennessCentrality: G = nx.DiGraph() nx.add_path(G, range(5)) b_answer = {0: 0, 1: 1, 2: 1, 3: 0, 4: 0, 5: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -28,8 +27,7 @@ class TestSubsetBetweennessCentrality: G = nx.Graph() nx.add_path(G, range(5)) b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0, 4: 0, 5: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -38,8 +36,9 @@ class TestSubsetBetweennessCentrality: G = nx.Graph() nx.add_path(G, range(5)) b_answer = {0: 0, 1: 1, 2: 1, 3: 0.5, 4: 0, 5: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3, 4], - weight=None) + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -48,8 +47,7 @@ class TestSubsetBetweennessCentrality: G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) b_answer = {0: 0, 1: 0.25, 2: 0.25, 3: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -58,8 +56,9 @@ class TestSubsetBetweennessCentrality: G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (4, 5)]) b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0.5, 4: 0, 5: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3, 4], - weight=None) + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -68,51 +67,49 @@ class TestSubsetBetweennessCentrality: G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 20), (20, 3), (3, 4)]) b_answer = {0: 0, 1: 1.0, 2: 0.5, 20: 0.5, 3: 0.5, 4: 0} - b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3, 4], - weight=None) + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) def test_diamond_multi_path(self): """Betweenness Centrality Subset: Diamond Multi Path""" G = nx.Graph() - G.add_edges_from([ - (1, 2), - (1, 3), - (1, 4), - (1, 5), - (1, 10), - (10, 11), - (11, 12), - (12, 9), - (2, 6), - (3, 6), - (4, 6), - (5, 7), - (7, 8), - (6, 8), - (8, 9) - ]) - b = nx.betweenness_centrality_subset( - G, - sources=[1], - targets=[9], - weight=None + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (1, 10), + (10, 11), + (11, 12), + (12, 9), + (2, 6), + (3, 6), + (4, 6), + (5, 7), + (7, 8), + (6, 8), + (8, 9), + ] ) + b = nx.betweenness_centrality_subset(G, sources=[1], targets=[9], weight=None) expected_b = { 1: 0, - 2: 1./10, - 3: 1./10, - 4: 1./10, - 5: 1./10, - 6: 3./10, - 7: 1./10, - 8: 4./10, + 2: 1.0 / 10, + 3: 1.0 / 10, + 4: 1.0 / 10, + 5: 1.0 / 10, + 6: 3.0 / 10, + 7: 1.0 / 10, + 8: 4.0 / 10, 9: 0, - 10: 1./10, - 11: 1./10, - 12: 1./10, + 10: 1.0 / 10, + 11: 1.0 / 10, + 12: 1.0 / 10, } for n in sorted(G): @@ -120,7 +117,6 @@ class TestSubsetBetweennessCentrality: class TestBetweennessCentralitySources: - def test_K5(self): """Betweenness Centrality Sources: K5""" G = nx.complete_graph(5) @@ -139,12 +135,12 @@ class TestBetweennessCentralitySources: class TestEdgeSubsetBetweennessCentrality: - def test_K5(self): """Edge betweenness subset centrality: K5""" G = nx.complete_graph(5) - b = nx.edge_betweenness_centrality_subset(G, sources=[0], - targets=[1, 3], weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[1, 3], weight=None + ) b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 3)] = b_answer[(0, 1)] = 0.5 for n in sorted(G.edges()): @@ -156,8 +152,9 @@ class TestEdgeSubsetBetweennessCentrality: nx.add_path(G, range(5)) b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], weight=None + ) for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) @@ -167,8 +164,9 @@ class TestEdgeSubsetBetweennessCentrality: nx.add_path(G, range(5)) b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], weight=None + ) for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) @@ -179,8 +177,9 @@ class TestEdgeSubsetBetweennessCentrality: b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1 b_answer[(3, 4)] = 0.5 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], - targets=[3, 4], weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) @@ -191,8 +190,9 @@ class TestEdgeSubsetBetweennessCentrality: b_answer = dict.fromkeys(G.edges(), 0) b_answer[(0, 1)] = b_answer[(0, 2)] = 0.25 b_answer[(1, 3)] = b_answer[(2, 3)] = 0.25 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], targets=[3], - weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], weight=None + ) for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) @@ -204,8 +204,9 @@ class TestEdgeSubsetBetweennessCentrality: b_answer[(0, 1)] = b_answer[(0, 2)] = 0.5 b_answer[(1, 3)] = b_answer[(2, 3)] = 0.5 b_answer[(3, 4)] = 0.5 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], - targets=[3, 4], weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) @@ -218,7 +219,8 @@ class TestEdgeSubsetBetweennessCentrality: b_answer[(1, 20)] = b_answer[(3, 20)] = 0.5 b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5 b_answer[(3, 4)] = 0.5 - b = nx.edge_betweenness_centrality_subset(G, sources=[0], - targets=[3, 4], weight=None) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) for n in sorted(G.edges()): assert almost_equal(b[n], b_answer[n]) diff --git a/networkx/algorithms/centrality/tests/test_closeness_centrality.py b/networkx/algorithms/centrality/tests/test_closeness_centrality.py index a1fb8b02..b3f883b8 100644 --- a/networkx/algorithms/centrality/tests/test_closeness_centrality.py +++ b/networkx/algorithms/centrality/tests/test_closeness_centrality.py @@ -17,8 +17,7 @@ class TestClosenessCentrality: cls.C4 = nx.cycle_graph(4) cls.T = nx.balanced_tree(r=2, h=2) cls.Gb = nx.Graph() - cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), - (2, 4), (4, 5), (3, 5)]) + cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) F = nx.florentine_families_graph() cls.F = F @@ -33,10 +32,8 @@ class TestClosenessCentrality: G = nx.union(self.P4, nx.path_graph([4, 5, 6])) c = nx.closeness_centrality(G) cwf = nx.closeness_centrality(G, wf_improved=False) - res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, - 4: 0.222, 5: 0.333, 6: 0.222} - wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, - 4: 0.667, 5: 1.0, 6: 0.667} + res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, 4: 0.222, 5: 0.333, 6: 0.222} + wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, 4: 0.667, 5: 1.0, 6: 0.667} for n in G: assert almost_equal(c[n], res[n], places=3) assert almost_equal(cwf[n], wf_res[n], places=3) @@ -53,151 +50,156 @@ class TestClosenessCentrality: def test_k5_closeness(self): c = nx.closeness_centrality(self.K5) - d = {0: 1.000, - 1: 1.000, - 2: 1.000, - 3: 1.000, - 4: 1.000} + d = {0: 1.000, 1: 1.000, 2: 1.000, 3: 1.000, 4: 1.000} for n in sorted(self.K5): assert almost_equal(c[n], d[n], places=3) def test_p3_closeness(self): c = nx.closeness_centrality(self.P3) - d = {0: 0.667, - 1: 1.000, - 2: 0.667} + d = {0: 0.667, 1: 1.000, 2: 0.667} for n in sorted(self.P3): assert almost_equal(c[n], d[n], places=3) def test_krackhardt_closeness(self): c = nx.closeness_centrality(self.K) - d = {0: 0.529, - 1: 0.529, - 2: 0.500, - 3: 0.600, - 4: 0.500, - 5: 0.643, - 6: 0.643, - 7: 0.600, - 8: 0.429, - 9: 0.310} + d = { + 0: 0.529, + 1: 0.529, + 2: 0.500, + 3: 0.600, + 4: 0.500, + 5: 0.643, + 6: 0.643, + 7: 0.600, + 8: 0.429, + 9: 0.310, + } for n in sorted(self.K): assert almost_equal(c[n], d[n], places=3) def test_florentine_families_closeness(self): c = nx.closeness_centrality(self.F) - d = {'Acciaiuoli': 0.368, - 'Albizzi': 0.483, - 'Barbadori': 0.4375, - 'Bischeri': 0.400, - 'Castellani': 0.389, - 'Ginori': 0.333, - 'Guadagni': 0.467, - 'Lamberteschi': 0.326, - 'Medici': 0.560, - 'Pazzi': 0.286, - 'Peruzzi': 0.368, - 'Ridolfi': 0.500, - 'Salviati': 0.389, - 'Strozzi': 0.4375, - 'Tornabuoni': 0.483} + d = { + "Acciaiuoli": 0.368, + "Albizzi": 0.483, + "Barbadori": 0.4375, + "Bischeri": 0.400, + "Castellani": 0.389, + "Ginori": 0.333, + "Guadagni": 0.467, + "Lamberteschi": 0.326, + "Medici": 0.560, + "Pazzi": 0.286, + "Peruzzi": 0.368, + "Ridolfi": 0.500, + "Salviati": 0.389, + "Strozzi": 0.4375, + "Tornabuoni": 0.483, + } for n in sorted(self.F): assert almost_equal(c[n], d[n], places=3) def test_les_miserables_closeness(self): c = nx.closeness_centrality(self.LM) - d = {'Napoleon': 0.302, - 'Myriel': 0.429, - 'MlleBaptistine': 0.413, - 'MmeMagloire': 0.413, - 'CountessDeLo': 0.302, - 'Geborand': 0.302, - 'Champtercier': 0.302, - 'Cravatte': 0.302, - 'Count': 0.302, - 'OldMan': 0.302, - 'Valjean': 0.644, - 'Labarre': 0.394, - 'Marguerite': 0.413, - 'MmeDeR': 0.394, - 'Isabeau': 0.394, - 'Gervais': 0.394, - 'Listolier': 0.341, - 'Tholomyes': 0.392, - 'Fameuil': 0.341, - 'Blacheville': 0.341, - 'Favourite': 0.341, - 'Dahlia': 0.341, - 'Zephine': 0.341, - 'Fantine': 0.461, - 'MmeThenardier': 0.461, - 'Thenardier': 0.517, - 'Cosette': 0.478, - 'Javert': 0.517, - 'Fauchelevent': 0.402, - 'Bamatabois': 0.427, - 'Perpetue': 0.318, - 'Simplice': 0.418, - 'Scaufflaire': 0.394, - 'Woman1': 0.396, - 'Judge': 0.404, - 'Champmathieu': 0.404, - 'Brevet': 0.404, - 'Chenildieu': 0.404, - 'Cochepaille': 0.404, - 'Pontmercy': 0.373, - 'Boulatruelle': 0.342, - 'Eponine': 0.396, - 'Anzelma': 0.352, - 'Woman2': 0.402, - 'MotherInnocent': 0.398, - 'Gribier': 0.288, - 'MmeBurgon': 0.344, - 'Jondrette': 0.257, - 'Gavroche': 0.514, - 'Gillenormand': 0.442, - 'Magnon': 0.335, - 'MlleGillenormand': 0.442, - 'MmePontmercy': 0.315, - 'MlleVaubois': 0.308, - 'LtGillenormand': 0.365, - 'Marius': 0.531, - 'BaronessT': 0.352, - 'Mabeuf': 0.396, - 'Enjolras': 0.481, - 'Combeferre': 0.392, - 'Prouvaire': 0.357, - 'Feuilly': 0.392, - 'Courfeyrac': 0.400, - 'Bahorel': 0.394, - 'Bossuet': 0.475, - 'Joly': 0.394, - 'Grantaire': 0.358, - 'MotherPlutarch': 0.285, - 'Gueulemer': 0.463, - 'Babet': 0.463, - 'Claquesous': 0.452, - 'Montparnasse': 0.458, - 'Toussaint': 0.402, - 'Child1': 0.342, - 'Child2': 0.342, - 'Brujon': 0.380, - 'MmeHucheloup': 0.353} + d = { + "Napoleon": 0.302, + "Myriel": 0.429, + "MlleBaptistine": 0.413, + "MmeMagloire": 0.413, + "CountessDeLo": 0.302, + "Geborand": 0.302, + "Champtercier": 0.302, + "Cravatte": 0.302, + "Count": 0.302, + "OldMan": 0.302, + "Valjean": 0.644, + "Labarre": 0.394, + "Marguerite": 0.413, + "MmeDeR": 0.394, + "Isabeau": 0.394, + "Gervais": 0.394, + "Listolier": 0.341, + "Tholomyes": 0.392, + "Fameuil": 0.341, + "Blacheville": 0.341, + "Favourite": 0.341, + "Dahlia": 0.341, + "Zephine": 0.341, + "Fantine": 0.461, + "MmeThenardier": 0.461, + "Thenardier": 0.517, + "Cosette": 0.478, + "Javert": 0.517, + "Fauchelevent": 0.402, + "Bamatabois": 0.427, + "Perpetue": 0.318, + "Simplice": 0.418, + "Scaufflaire": 0.394, + "Woman1": 0.396, + "Judge": 0.404, + "Champmathieu": 0.404, + "Brevet": 0.404, + "Chenildieu": 0.404, + "Cochepaille": 0.404, + "Pontmercy": 0.373, + "Boulatruelle": 0.342, + "Eponine": 0.396, + "Anzelma": 0.352, + "Woman2": 0.402, + "MotherInnocent": 0.398, + "Gribier": 0.288, + "MmeBurgon": 0.344, + "Jondrette": 0.257, + "Gavroche": 0.514, + "Gillenormand": 0.442, + "Magnon": 0.335, + "MlleGillenormand": 0.442, + "MmePontmercy": 0.315, + "MlleVaubois": 0.308, + "LtGillenormand": 0.365, + "Marius": 0.531, + "BaronessT": 0.352, + "Mabeuf": 0.396, + "Enjolras": 0.481, + "Combeferre": 0.392, + "Prouvaire": 0.357, + "Feuilly": 0.392, + "Courfeyrac": 0.400, + "Bahorel": 0.394, + "Bossuet": 0.475, + "Joly": 0.394, + "Grantaire": 0.358, + "MotherPlutarch": 0.285, + "Gueulemer": 0.463, + "Babet": 0.463, + "Claquesous": 0.452, + "Montparnasse": 0.458, + "Toussaint": 0.402, + "Child1": 0.342, + "Child2": 0.342, + "Brujon": 0.380, + "MmeHucheloup": 0.353, + } for n in sorted(self.LM): assert almost_equal(c[n], d[n], places=3) def test_weighted_closeness(self): - edges = ([('s', 'u', 10), ('s', 'x', 5), ('u', 'v', 1), - ('u', 'x', 2), ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), ('y', 's', 7), ('y', 'v', 6)]) + edges = [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] XG = nx.Graph() XG.add_weighted_edges_from(edges) - c = nx.closeness_centrality(XG, distance='weight') - d = {'y': 0.200, - 'x': 0.286, - 's': 0.138, - 'u': 0.235, - 'v': 0.200} + c = nx.closeness_centrality(XG, distance="weight") + d = {"y": 0.200, "x": 0.286, "s": 0.138, "u": 0.235, "v": 0.200} for n in sorted(XG): assert almost_equal(c[n], d[n], places=3) @@ -252,8 +254,7 @@ class TestClosenessCentrality: G = nx.path_graph(3) prev_cc = nx.closeness_centrality(G) edge = self.pick_remove_edge(G) - test_cc = nx.incremental_closeness_centrality( - G, edge, prev_cc, insertion=False) + test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=False) G.remove_edges_from([edge]) real_cc = nx.closeness_centrality(G) shared_items = set(test_cc.items()) & set(real_cc.items()) @@ -275,8 +276,7 @@ class TestClosenessCentrality: edge = self.pick_add_edge(G) # start = timeit.default_timer() - test_cc = nx.incremental_closeness_centrality( - G, edge, prev_cc, insert) + test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insert) # inc_elapsed = (timeit.default_timer() - start) # print(f"incremental time: {inc_elapsed}") diff --git a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py index 967d8150..d3dde1ec 100644 --- a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py +++ b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py @@ -2,15 +2,13 @@ import pytest import networkx as nx from networkx.testing import almost_equal -from networkx import edge_current_flow_betweenness_centrality \ - as edge_current_flow -from networkx import approximate_current_flow_betweenness_centrality \ - as approximate_cfbc +from networkx import edge_current_flow_betweenness_centrality as edge_current_flow +from networkx import approximate_current_flow_betweenness_centrality as approximate_cfbc -np = pytest.importorskip('numpy') -npt = pytest.importorskip('numpy.testing') -scipy = pytest.importorskip('scipy') +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") +scipy = pytest.importorskip("scipy") class TestFlowBetweennessCentrality: @@ -26,20 +24,21 @@ class TestFlowBetweennessCentrality: for n in sorted(G): assert almost_equal(b[n], b_answer[n]) wb_answer = {0: 0.2222222, 1: 0.2222222, 2: 0.30555555, 3: 0.30555555} - b = nx.current_flow_betweenness_centrality(G, normalized=True, weight='weight') + b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="weight") for n in sorted(G): assert almost_equal(b[n], wb_answer[n]) wb_answer = {0: 0.2051282, 1: 0.2051282, 2: 0.33974358, 3: 0.33974358} - b = nx.current_flow_betweenness_centrality(G, normalized=True, weight='other') + b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="other") for n in sorted(G): assert almost_equal(b[n], wb_answer[n]) def test_K4(self): """Betweenness centrality: K4""" G = nx.complete_graph(4) - for solver in ['full', 'lu', 'cg']: - b = nx.current_flow_betweenness_centrality(G, normalized=False, - solver=solver) + for solver in ["full", "lu", "cg"]: + b = nx.current_flow_betweenness_centrality( + G, normalized=False, solver=solver + ) b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -48,7 +47,7 @@ class TestFlowBetweennessCentrality: """Betweenness centrality: P4 normalized""" G = nx.path_graph(4) b = nx.current_flow_betweenness_centrality(G, normalized=True) - b_answer = {0: 0, 1: 2. / 3, 2: 2. / 3, 3: 0} + b_answer = {0: 0, 1: 2.0 / 3, 2: 2.0 / 3, 3: 0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -63,25 +62,25 @@ class TestFlowBetweennessCentrality: def test_star(self): """Betweenness centrality: star """ G = nx.Graph() - nx.add_star(G, ['a', 'b', 'c', 'd']) + nx.add_star(G, ["a", "b", "c", "d"]) b = nx.current_flow_betweenness_centrality(G, normalized=True) - b_answer = {'a': 1.0, 'b': 0.0, 'c': 0.0, 'd': 0.0} + b_answer = {"a": 1.0, "b": 0.0, "c": 0.0, "d": 0.0} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) def test_solvers2(self): """Betweenness centrality: alternate solvers""" G = nx.complete_graph(4) - for solver in ['full', 'lu', 'cg']: - b = nx.current_flow_betweenness_centrality(G, normalized=False, - solver=solver) + for solver in ["full", "lu", "cg"]: + b = nx.current_flow_betweenness_centrality( + G, normalized=False, solver=solver + ) b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) class TestApproximateFlowBetweennessCentrality: - def test_K4_normalized(self): "Approximate current-flow betweenness centrality: K4 normalized" G = nx.complete_graph(4) @@ -98,12 +97,12 @@ class TestApproximateFlowBetweennessCentrality: epsilon = 0.1 ba = approximate_cfbc(G, normalized=False, epsilon=0.5 * epsilon) for n in sorted(G): - npt.assert_allclose(b[n], ba[n], atol=epsilon * len(G)**2) + npt.assert_allclose(b[n], ba[n], atol=epsilon * len(G) ** 2) def test_star(self): "Approximate current-flow betweenness centrality: star" G = nx.Graph() - nx.add_star(G, ['a', 'b', 'c', 'd']) + nx.add_star(G, ["a", "b", "c", "d"]) b = nx.current_flow_betweenness_centrality(G, normalized=True) epsilon = 0.1 ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon) @@ -130,9 +129,10 @@ class TestApproximateFlowBetweennessCentrality: "Approximate current-flow betweenness centrality: solvers" G = nx.complete_graph(4) epsilon = 0.1 - for solver in ['full', 'lu', 'cg']: - b = approximate_cfbc(G, normalized=False, solver=solver, - epsilon=0.5 * epsilon) + for solver in ["full", "lu", "cg"]: + b = approximate_cfbc( + G, normalized=False, solver=solver, epsilon=0.5 * epsilon + ) b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} for n in sorted(G): npt.assert_allclose(b[n], b_answer[n], atol=epsilon) @@ -143,7 +143,6 @@ class TestWeightedFlowBetweennessCentrality: class TestEdgeFlowBetweennessCentrality: - def test_K4(self): """Edge flow betweenness centrality: K4""" G = nx.complete_graph(4) @@ -178,4 +177,4 @@ class TestEdgeFlowBetweennessCentrality: b_answer = {(0, 1): 1.5, (1, 2): 2.0, (2, 3): 1.5} for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) - assert almost_equal(v1, v2) + assert almost_equal(v1, v2) diff --git a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py index 823028a1..1ec1b080 100644 --- a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py +++ b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py @@ -1,26 +1,25 @@ import pytest -np = pytest.importorskip('numpy') -scipy = pytest.importorskip('scipy') + +np = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") import networkx as nx from networkx.testing import almost_equal -from networkx import edge_current_flow_betweenness_centrality \ - as edge_current_flow +from networkx import edge_current_flow_betweenness_centrality as edge_current_flow -from networkx import edge_current_flow_betweenness_centrality_subset \ - as edge_current_flow_subset +from networkx import ( + edge_current_flow_betweenness_centrality_subset as edge_current_flow_subset, +) class TestFlowBetweennessCentrality: - def test_K4_normalized(self): """Betweenness centrality: K4""" G = nx.complete_graph(4) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -28,45 +27,40 @@ class TestFlowBetweennessCentrality: def test_K4(self): """Betweenness centrality: K4""" G = nx.complete_graph(4) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) # test weighted network G.add_edge(0, 1, weight=0.5, other=0.3) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True, - weight=None) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True, weight=None + ) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True, - weight='other') - b_answer = nx.current_flow_betweenness_centrality(G, normalized=True, weight='other') + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True, weight="other" + ) + b_answer = nx.current_flow_betweenness_centrality( + G, normalized=True, weight="other" + ) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) def test_P4_normalized(self): """Betweenness centrality: P4 normalized""" G = nx.path_graph(4) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -74,10 +68,9 @@ class TestFlowBetweennessCentrality: def test_P4(self): """Betweenness centrality: P4""" G = nx.path_graph(4) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -85,11 +78,10 @@ class TestFlowBetweennessCentrality: def test_star(self): """Betweenness centrality: star """ G = nx.Graph() - nx.add_star(G, ['a', 'b', 'c', 'd']) - b = nx.current_flow_betweenness_centrality_subset(G, - list(G), - list(G), - normalized=True) + nx.add_star(G, ["a", "b", "c", "d"]) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) for n in sorted(G): assert almost_equal(b[n], b_answer[n]) @@ -100,7 +92,6 @@ class TestFlowBetweennessCentrality: class TestEdgeFlowBetweennessCentrality: - def test_K4_normalized(self): """Betweenness centrality: K4""" G = nx.complete_graph(4) @@ -132,8 +123,10 @@ class TestEdgeFlowBetweennessCentrality: v2 = b.get((s, t), b.get((t, s))) assert almost_equal(v1, v2) - b = edge_current_flow_subset(G, list(G), list(G), normalized=False, weight='other') - b_answer = edge_current_flow(G, normalized=False, weight='other') + b = edge_current_flow_subset( + G, list(G), list(G), normalized=False, weight="other" + ) + b_answer = edge_current_flow(G, normalized=False, weight="other") for (s, t), v1 in b_answer.items(): v2 = b.get((s, t), b.get((t, s))) assert almost_equal(v1, v2) diff --git a/networkx/algorithms/centrality/tests/test_current_flow_closeness.py b/networkx/algorithms/centrality/tests/test_current_flow_closeness.py index fa60e600..e6a38949 100644 --- a/networkx/algorithms/centrality/tests/test_current_flow_closeness.py +++ b/networkx/algorithms/centrality/tests/test_current_flow_closeness.py @@ -1,13 +1,13 @@ import pytest -np = pytest.importorskip('numpy') -scipy = pytest.importorskip('scipy') + +np = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") import networkx as nx from networkx.testing import almost_equal class TestFlowClosenessCentrality: - def test_K4(self): """Closeness centrality: K4""" G = nx.complete_graph(4) @@ -27,9 +27,9 @@ class TestFlowClosenessCentrality: def test_star(self): """Closeness centrality: star """ G = nx.Graph() - nx.add_star(G, ['a', 'b', 'c', 'd']) + nx.add_star(G, ["a", "b", "c", "d"]) b = nx.current_flow_closeness_centrality(G) - b_answer = {'a': 1.0 / 3, 'b': 0.6 / 3, 'c': 0.6 / 3, 'd': 0.6 / 3} + b_answer = {"a": 1.0 / 3, "b": 0.6 / 3, "c": 0.6 / 3, "d": 0.6 / 3} for n in sorted(G): assert almost_equal(b[n], b_answer[n]) diff --git a/networkx/algorithms/centrality/tests/test_degree_centrality.py b/networkx/algorithms/centrality/tests/test_degree_centrality.py index c8e33dde..2b204cfb 100644 --- a/networkx/algorithms/centrality/tests/test_degree_centrality.py +++ b/networkx/algorithms/centrality/tests/test_degree_centrality.py @@ -15,26 +15,26 @@ class TestDegreeCentrality: self.K5 = nx.complete_graph(5) F = nx.Graph() # Florentine families - F.add_edge('Acciaiuoli', 'Medici') - F.add_edge('Castellani', 'Peruzzi') - F.add_edge('Castellani', 'Strozzi') - F.add_edge('Castellani', 'Barbadori') - F.add_edge('Medici', 'Barbadori') - F.add_edge('Medici', 'Ridolfi') - F.add_edge('Medici', 'Tornabuoni') - F.add_edge('Medici', 'Albizzi') - F.add_edge('Medici', 'Salviati') - F.add_edge('Salviati', 'Pazzi') - F.add_edge('Peruzzi', 'Strozzi') - F.add_edge('Peruzzi', 'Bischeri') - F.add_edge('Strozzi', 'Ridolfi') - F.add_edge('Strozzi', 'Bischeri') - F.add_edge('Ridolfi', 'Tornabuoni') - F.add_edge('Tornabuoni', 'Guadagni') - F.add_edge('Albizzi', 'Ginori') - F.add_edge('Albizzi', 'Guadagni') - F.add_edge('Bischeri', 'Guadagni') - F.add_edge('Guadagni', 'Lamberteschi') + F.add_edge("Acciaiuoli", "Medici") + F.add_edge("Castellani", "Peruzzi") + F.add_edge("Castellani", "Strozzi") + F.add_edge("Castellani", "Barbadori") + F.add_edge("Medici", "Barbadori") + F.add_edge("Medici", "Ridolfi") + F.add_edge("Medici", "Tornabuoni") + F.add_edge("Medici", "Albizzi") + F.add_edge("Medici", "Salviati") + F.add_edge("Salviati", "Pazzi") + F.add_edge("Peruzzi", "Strozzi") + F.add_edge("Peruzzi", "Bischeri") + F.add_edge("Strozzi", "Ridolfi") + F.add_edge("Strozzi", "Bischeri") + F.add_edge("Ridolfi", "Tornabuoni") + F.add_edge("Tornabuoni", "Guadagni") + F.add_edge("Albizzi", "Ginori") + F.add_edge("Albizzi", "Guadagni") + F.add_edge("Bischeri", "Guadagni") + F.add_edge("Guadagni", "Lamberteschi") self.F = F G = nx.DiGraph() @@ -62,31 +62,74 @@ class TestDegreeCentrality: def test_degree_centrality_3(self): d = nx.degree_centrality(self.K) - exact = {0: .444, 1: .444, 2: .333, 3: .667, 4: .333, - 5: .556, 6: .556, 7: .333, 8: .222, 9: .111} + exact = { + 0: 0.444, + 1: 0.444, + 2: 0.333, + 3: 0.667, + 4: 0.333, + 5: 0.556, + 6: 0.556, + 7: 0.333, + 8: 0.222, + 9: 0.111, + } for n, dc in d.items(): assert almost_equal(exact[n], float(f"{dc:.3f}")) def test_degree_centrality_4(self): d = nx.degree_centrality(self.F) names = sorted(self.F.nodes()) - dcs = [0.071, 0.214, 0.143, 0.214, 0.214, 0.071, 0.286, - 0.071, 0.429, 0.071, 0.214, 0.214, 0.143, 0.286, 0.214] + dcs = [ + 0.071, + 0.214, + 0.143, + 0.214, + 0.214, + 0.071, + 0.286, + 0.071, + 0.429, + 0.071, + 0.214, + 0.214, + 0.143, + 0.286, + 0.214, + ] exact = dict(zip(names, dcs)) for n, dc in d.items(): assert almost_equal(exact[n], float(f"{dc:.3f}")) def test_indegree_centrality(self): d = nx.in_degree_centrality(self.G) - exact = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.625, 6: 0.125, 7: 0.125, 8: 0.125} + exact = { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.625, + 6: 0.125, + 7: 0.125, + 8: 0.125, + } for n, dc in d.items(): assert almost_equal(exact[n], dc) def test_outdegree_centrality(self): d = nx.out_degree_centrality(self.G) - exact = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, - 4: 0.125, 5: 0.375, 6: 0.0, 7: 0.0, 8: 0.0} + exact = { + 0: 0.125, + 1: 0.125, + 2: 0.125, + 3: 0.125, + 4: 0.125, + 5: 0.375, + 6: 0.0, + 7: 0.0, + 8: 0.0, + } for n, dc in d.items(): assert almost_equal(exact[n], dc) diff --git a/networkx/algorithms/centrality/tests/test_dispersion.py b/networkx/algorithms/centrality/tests/test_dispersion.py index 16b25db3..fb27efd4 100644 --- a/networkx/algorithms/centrality/tests/test_dispersion.py +++ b/networkx/algorithms/centrality/tests/test_dispersion.py @@ -3,11 +3,36 @@ import networkx as nx def small_ego_G(): """The sample network from https://arxiv.org/pdf/1310.6753v1.pdf""" - edges = [('a', 'b'), ('a', 'c'), ('b', 'c'), ('b', 'd'), - ('b', 'e'), ('b', 'f'), ('c', 'd'), ('c', 'f'), ('c', 'h'), ('d', 'f'), ('e', 'f'), - ('f', 'h'), ('h', 'j'), ('h', 'k'), ('i', 'j'), ('i', 'k'), ('j', 'k'), ('u', 'a'), - ('u', 'b'), ('u', 'c'), ('u', 'd'), ('u', 'e'), ('u', 'f'), ('u', 'g'), ('u', 'h'), - ('u', 'i'), ('u', 'j'), ('u', 'k')] + edges = [ + ("a", "b"), + ("a", "c"), + ("b", "c"), + ("b", "d"), + ("b", "e"), + ("b", "f"), + ("c", "d"), + ("c", "f"), + ("c", "h"), + ("d", "f"), + ("e", "f"), + ("f", "h"), + ("h", "j"), + ("h", "k"), + ("i", "j"), + ("i", "k"), + ("j", "k"), + ("u", "a"), + ("u", "b"), + ("u", "c"), + ("u", "d"), + ("u", "e"), + ("u", "f"), + ("u", "g"), + ("u", "h"), + ("u", "i"), + ("u", "j"), + ("u", "k"), + ] G = nx.Graph() G.add_edges_from(edges) @@ -15,12 +40,11 @@ def small_ego_G(): class TestDispersion: - def test_article(self): """our algorithm matches article's""" G = small_ego_G() - disp_uh = nx.dispersion(G, 'u', 'h', normalized=False) - disp_ub = nx.dispersion(G, 'u', 'b', normalized=False) + disp_uh = nx.dispersion(G, "u", "h", normalized=False) + disp_ub = nx.dispersion(G, "u", "b", normalized=False) assert disp_uh == 4 assert disp_ub == 1 @@ -28,8 +52,8 @@ class TestDispersion: """there is a result for every node""" G = small_ego_G() disp = nx.dispersion(G) - disp_Gu = nx.dispersion(G, 'u') - disp_uv = nx.dispersion(G, 'u', 'h') + disp_Gu = nx.dispersion(G, "u") + disp_uv = nx.dispersion(G, "u", "h") assert len(disp) == len(G) assert len(disp_Gu) == len(G) - 1 assert type(disp_uv) is float diff --git a/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py b/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py index 27c846d8..a2f72ecd 100644 --- a/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py +++ b/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py @@ -1,7 +1,8 @@ import math import pytest -np = pytest.importorskip('numpy') -scipy = pytest.importorskip('scipy') + +np = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") import networkx as nx @@ -9,7 +10,6 @@ from networkx.testing import almost_equal class TestEigenvectorCentrality: - def test_K5(self): """Eigenvector centrality: K5""" G = nx.complete_graph(5) @@ -53,30 +53,77 @@ class TestEigenvectorCentrality: class TestEigenvectorCentralityDirected: - @classmethod def setup_class(cls): G = nx.DiGraph() - edges = [(1, 2), (1, 3), (2, 4), (3, 2), (3, 5), (4, 2), (4, 5), (4, 6), - (5, 6), (5, 7), (5, 8), (6, 8), (7, 1), (7, 5), - (7, 8), (8, 6), (8, 7)] + edges = [ + (1, 2), + (1, 3), + (2, 4), + (3, 2), + (3, 5), + (4, 2), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (5, 8), + (6, 8), + (7, 1), + (7, 5), + (7, 8), + (8, 6), + (8, 7), + ] G.add_edges_from(edges, weight=2.0) cls.G = G.reverse() - cls.G.evc = [0.25368793, 0.19576478, 0.32817092, 0.40430835, - 0.48199885, 0.15724483, 0.51346196, 0.32475403] + cls.G.evc = [ + 0.25368793, + 0.19576478, + 0.32817092, + 0.40430835, + 0.48199885, + 0.15724483, + 0.51346196, + 0.32475403, + ] H = nx.DiGraph() - edges = [(1, 2), (1, 3), (2, 4), (3, 2), (3, 5), (4, 2), (4, 5), (4, 6), - (5, 6), (5, 7), (5, 8), (6, 8), (7, 1), (7, 5), - (7, 8), (8, 6), (8, 7)] + edges = [ + (1, 2), + (1, 3), + (2, 4), + (3, 2), + (3, 5), + (4, 2), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (5, 8), + (6, 8), + (7, 1), + (7, 5), + (7, 8), + (8, 6), + (8, 7), + ] G.add_edges_from(edges) cls.H = G.reverse() - cls.H.evc = [0.25368793, 0.19576478, 0.32817092, 0.40430835, - 0.48199885, 0.15724483, 0.51346196, 0.32475403] + cls.H.evc = [ + 0.25368793, + 0.19576478, + 0.32817092, + 0.40430835, + 0.48199885, + 0.15724483, + 0.51346196, + 0.32475403, + ] def test_eigenvector_centrality_weighted(self): G = self.G @@ -104,7 +151,6 @@ class TestEigenvectorCentralityDirected: class TestEigenvectorCentralityExceptions: - def test_multigraph(self): with pytest.raises(nx.NetworkXException): e = nx.eigenvector_centrality(nx.MultiGraph()) diff --git a/networkx/algorithms/centrality/tests/test_group.py b/networkx/algorithms/centrality/tests/test_group.py index b4a15565..d2d333d8 100644 --- a/networkx/algorithms/centrality/tests/test_group.py +++ b/networkx/algorithms/centrality/tests/test_group.py @@ -8,15 +8,13 @@ import networkx as nx class TestGroupBetweennessCentrality: - def test_group_betweenness_single_node(self): """ Group betweenness centrality for single node group """ G = nx.path_graph(5) C = [1] - b = nx.group_betweenness_centrality(G, C, - weight=None, normalized=False) + b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False) b_answer = 3.0 assert b == b_answer @@ -27,8 +25,7 @@ class TestGroupBetweennessCentrality: """ G = nx.path_graph(5) C = [1, 3] - b = nx.group_betweenness_centrality(G, C, - weight=None, normalized=True) + b = nx.group_betweenness_centrality(G, C, weight=None, normalized=True) b_answer = 1.0 assert b == b_answer @@ -62,7 +59,6 @@ class TestGroupBetweennessCentrality: class TestGroupClosenessCentrality: - def test_group_closeness_single_node(self): """ Group closeness centrality for a single node group @@ -101,7 +97,6 @@ class TestGroupClosenessCentrality: class TestGroupDegreeCentrality: - def test_group_degree_centrality_single_node(self): """ Group degree centrality for a single node group @@ -118,8 +113,9 @@ class TestGroupDegreeCentrality: """ G = nx.Graph() G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) - G.add_edges_from([(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), - (2, 3), (2, 4), (2, 5)]) + G.add_edges_from( + [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)] + ) d = nx.group_degree_centrality(G, [1, 2]) d_answer = 1 assert d == d_answer @@ -130,8 +126,9 @@ class TestGroupDegreeCentrality: """ G = nx.DiGraph() G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) - G.add_edges_from([(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), - (2, 3), (2, 4), (2, 5)]) + G.add_edges_from( + [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)] + ) d = nx.group_in_degree_centrality(G, [1, 2]) d_answer = 0 assert d == d_answer @@ -142,8 +139,9 @@ class TestGroupDegreeCentrality: """ G = nx.DiGraph() G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) - G.add_edges_from([(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), - (2, 3), (2, 4), (2, 5)]) + G.add_edges_from( + [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)] + ) d = nx.group_out_degree_centrality(G, [1, 2]) d_answer = 1 assert d == d_answer diff --git a/networkx/algorithms/centrality/tests/test_harmonic_centrality.py b/networkx/algorithms/centrality/tests/test_harmonic_centrality.py index 6b9ee9de..164d2021 100644 --- a/networkx/algorithms/centrality/tests/test_harmonic_centrality.py +++ b/networkx/algorithms/centrality/tests/test_harmonic_centrality.py @@ -19,102 +19,81 @@ class TestClosenessCentrality: cls.T = nx.balanced_tree(r=2, h=2) cls.Gb = nx.DiGraph() - cls.Gb.add_edges_from([(0, 1), (0, 2), (0, 4), (2, 1), - (2, 3), (4, 3)]) + cls.Gb.add_edges_from([(0, 1), (0, 2), (0, 4), (2, 1), (2, 3), (4, 3)]) def test_p3_harmonic(self): c = harmonic_centrality(self.P3) - d = {0: 1.5, - 1: 2, - 2: 1.5} + d = {0: 1.5, 1: 2, 2: 1.5} for n in sorted(self.P3): assert almost_equal(c[n], d[n], places=3) def test_p4_harmonic(self): c = harmonic_centrality(self.P4) - d = {0: 1.8333333, - 1: 2.5, - 2: 2.5, - 3: 1.8333333} + d = {0: 1.8333333, 1: 2.5, 2: 2.5, 3: 1.8333333} for n in sorted(self.P4): assert almost_equal(c[n], d[n], places=3) def test_clique_complete(self): c = harmonic_centrality(self.K5) - d = {0: 4, - 1: 4, - 2: 4, - 3: 4, - 4: 4} + d = {0: 4, 1: 4, 2: 4, 3: 4, 4: 4} for n in sorted(self.P3): assert almost_equal(c[n], d[n], places=3) def test_cycle_C4(self): c = harmonic_centrality(self.C4) - d = {0: 2.5, - 1: 2.5, - 2: 2.5, - 3: 2.5, } + d = { + 0: 2.5, + 1: 2.5, + 2: 2.5, + 3: 2.5, + } for n in sorted(self.C4): assert almost_equal(c[n], d[n], places=3) def test_cycle_C5(self): c = harmonic_centrality(self.C5) - d = {0: 3, - 1: 3, - 2: 3, - 3: 3, - 4: 3, - 5: 4} + d = {0: 3, 1: 3, 2: 3, 3: 3, 4: 3, 5: 4} for n in sorted(self.C5): assert almost_equal(c[n], d[n], places=3) def test_bal_tree(self): c = harmonic_centrality(self.T) - d = {0: 4.0, - 1: 4.1666, - 2: 4.1666, - 3: 2.8333, - 4: 2.8333, - 5: 2.8333, - 6: 2.8333} + d = {0: 4.0, 1: 4.1666, 2: 4.1666, 3: 2.8333, 4: 2.8333, 5: 2.8333, 6: 2.8333} for n in sorted(self.T): assert almost_equal(c[n], d[n], places=3) def test_exampleGraph(self): c = harmonic_centrality(self.Gb) - d = {0: 0, - 1: 2, - 2: 1, - 3: 2.5, - 4: 1} + d = {0: 0, 1: 2, 2: 1, 3: 2.5, 4: 1} for n in sorted(self.Gb): assert almost_equal(c[n], d[n], places=3) def test_weighted_harmonic(self): XG = nx.DiGraph() - XG.add_weighted_edges_from([('a', 'b', 10), ('d', 'c', 5), ('a', 'c', 1), - ('e', 'f', 2), ('f', 'c', 1), ('a', 'f', 3), - ]) - c = harmonic_centrality(XG, distance='weight') - d = {'a': 0, - 'b': 0.1, - 'c': 2.533, - 'd': 0, - 'e': 0, - 'f': 0.83333} + XG.add_weighted_edges_from( + [ + ("a", "b", 10), + ("d", "c", 5), + ("a", "c", 1), + ("e", "f", 2), + ("f", "c", 1), + ("a", "f", 3), + ] + ) + c = harmonic_centrality(XG, distance="weight") + d = {"a": 0, "b": 0.1, "c": 2.533, "d": 0, "e": 0, "f": 0.83333} for n in sorted(XG): assert almost_equal(c[n], d[n], places=3) def test_empty(self): G = nx.DiGraph() - c = harmonic_centrality(G, distance='weight') + c = harmonic_centrality(G, distance="weight") d = {} assert c == d def test_singleton(self): G = nx.DiGraph() G.add_node(0) - c = harmonic_centrality(G, distance='weight') + c = harmonic_centrality(G, distance="weight") d = {0: 0} assert c == d diff --git a/networkx/algorithms/centrality/tests/test_katz_centrality.py b/networkx/algorithms/centrality/tests/test_katz_centrality.py index d995e4bf..7810f519 100644 --- a/networkx/algorithms/centrality/tests/test_katz_centrality.py +++ b/networkx/algorithms/centrality/tests/test_katz_centrality.py @@ -6,7 +6,6 @@ import pytest class TestKatzCentrality: - def test_K5(self): """Katz centrality: K5""" G = nx.complete_graph(5) @@ -25,8 +24,7 @@ class TestKatzCentrality: """Katz centrality: P3""" alpha = 0.1 G = nx.path_graph(3) - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} b = nx.katz_centrality(G, alpha) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=4) @@ -45,8 +43,7 @@ class TestKatzCentrality: def test_beta_as_scalar(self): alpha = 0.1 beta = 0.1 - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = nx.path_graph(3) b = nx.katz_centrality(G, alpha, beta) for n in sorted(G): @@ -55,8 +52,7 @@ class TestKatzCentrality: def test_beta_as_dict(self): alpha = 0.1 beta = {0: 1.0, 1: 1.0, 2: 1.0} - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = nx.path_graph(3) b = nx.katz_centrality(G, alpha, beta) for n in sorted(G): @@ -65,18 +61,38 @@ class TestKatzCentrality: def test_multiple_alpha(self): alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] for alpha in alpha_list: - b_answer = {0.1: {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162}, - 0.2: {0: 0.5454545454545454, 1: 0.6363636363636365, - 2: 0.5454545454545454}, - 0.3: {0: 0.5333964609104419, 1: 0.6564879518897746, - 2: 0.5333964609104419}, - 0.4: {0: 0.5232045649263551, 1: 0.6726915834767423, - 2: 0.5232045649263551}, - 0.5: {0: 0.5144957746691622, 1: 0.6859943117075809, - 2: 0.5144957746691622}, - 0.6: {0: 0.5069794004195823, 1: 0.6970966755769258, - 2: 0.5069794004195823}} + b_answer = { + 0.1: { + 0: 0.5598852584152165, + 1: 0.6107839182711449, + 2: 0.5598852584152162, + }, + 0.2: { + 0: 0.5454545454545454, + 1: 0.6363636363636365, + 2: 0.5454545454545454, + }, + 0.3: { + 0: 0.5333964609104419, + 1: 0.6564879518897746, + 2: 0.5333964609104419, + }, + 0.4: { + 0: 0.5232045649263551, + 1: 0.6726915834767423, + 2: 0.5232045649263551, + }, + 0.5: { + 0: 0.5144957746691622, + 1: 0.6859943117075809, + 2: 0.5144957746691622, + }, + 0.6: { + 0: 0.5069794004195823, + 1: 0.6970966755769258, + 2: 0.5069794004195823, + }, + } G = nx.path_graph(3) b = nx.katz_centrality(G, alpha) for n in sorted(G): @@ -99,16 +115,15 @@ class TestKatzCentrality: def test_bad_beta_numbe(self): with pytest.raises(nx.NetworkXException): G = nx.Graph([(0, 1)]) - e = nx.katz_centrality(G, 0.1, beta='foo') + e = nx.katz_centrality(G, 0.1, beta="foo") class TestKatzCentralityNumpy: - @classmethod def setup_class(cls): global np - np = pytest.importorskip('numpy') - scipy = pytest.importorskip('scipy') + np = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") def test_K5(self): """Katz centrality: K5""" @@ -128,8 +143,7 @@ class TestKatzCentralityNumpy: """Katz centrality: P3""" alpha = 0.1 G = nx.path_graph(3) - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} b = nx.katz_centrality_numpy(G, alpha) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=4) @@ -137,8 +151,7 @@ class TestKatzCentralityNumpy: def test_beta_as_scalar(self): alpha = 0.1 beta = 0.1 - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = nx.path_graph(3) b = nx.katz_centrality_numpy(G, alpha, beta) for n in sorted(G): @@ -147,8 +160,7 @@ class TestKatzCentralityNumpy: def test_beta_as_dict(self): alpha = 0.1 beta = {0: 1.0, 1: 1.0, 2: 1.0} - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = nx.path_graph(3) b = nx.katz_centrality_numpy(G, alpha, beta) for n in sorted(G): @@ -157,18 +169,38 @@ class TestKatzCentralityNumpy: def test_multiple_alpha(self): alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] for alpha in alpha_list: - b_answer = {0.1: {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162}, - 0.2: {0: 0.5454545454545454, 1: 0.6363636363636365, - 2: 0.5454545454545454}, - 0.3: {0: 0.5333964609104419, 1: 0.6564879518897746, - 2: 0.5333964609104419}, - 0.4: {0: 0.5232045649263551, 1: 0.6726915834767423, - 2: 0.5232045649263551}, - 0.5: {0: 0.5144957746691622, 1: 0.6859943117075809, - 2: 0.5144957746691622}, - 0.6: {0: 0.5069794004195823, 1: 0.6970966755769258, - 2: 0.5069794004195823}} + b_answer = { + 0.1: { + 0: 0.5598852584152165, + 1: 0.6107839182711449, + 2: 0.5598852584152162, + }, + 0.2: { + 0: 0.5454545454545454, + 1: 0.6363636363636365, + 2: 0.5454545454545454, + }, + 0.3: { + 0: 0.5333964609104419, + 1: 0.6564879518897746, + 2: 0.5333964609104419, + }, + 0.4: { + 0: 0.5232045649263551, + 1: 0.6726915834767423, + 2: 0.5232045649263551, + }, + 0.5: { + 0: 0.5144957746691622, + 1: 0.6859943117075809, + 2: 0.5144957746691622, + }, + 0.6: { + 0: 0.5069794004195823, + 1: 0.6970966755769258, + 2: 0.5069794004195823, + }, + } G = nx.path_graph(3) b = nx.katz_centrality_numpy(G, alpha) for n in sorted(G): @@ -191,7 +223,7 @@ class TestKatzCentralityNumpy: def test_bad_beta_numbe(self): with pytest.raises(nx.NetworkXException): G = nx.Graph([(0, 1)]) - e = nx.katz_centrality_numpy(G, 0.1, beta='foo') + e = nx.katz_centrality_numpy(G, 0.1, beta="foo") def test_K5_unweighted(self): """Katz centrality: K5""" @@ -211,8 +243,7 @@ class TestKatzCentralityNumpy: """Katz centrality: P3""" alpha = 0.1 G = nx.path_graph(3) - b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, - 2: 0.5598852584152162} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} b = nx.katz_centrality_numpy(G, alpha, weight=None) for n in sorted(G): assert almost_equal(b[n], b_answer[n], places=4) @@ -222,9 +253,25 @@ class TestKatzCentralityDirected: @classmethod def setup_class(cls): G = nx.DiGraph() - edges = [(1, 2), (1, 3), (2, 4), (3, 2), (3, 5), (4, 2), (4, 5), - (4, 6), (5, 6), (5, 7), (5, 8), (6, 8), (7, 1), (7, 5), - (7, 8), (8, 6), (8, 7)] + edges = [ + (1, 2), + (1, 3), + (2, 4), + (3, 2), + (3, 5), + (4, 2), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (5, 8), + (6, 8), + (7, 1), + (7, 5), + (7, 8), + (8, 6), + (8, 7), + ] G.add_edges_from(edges, weight=2.0) cls.G = G.reverse() cls.G.alpha = 0.1 @@ -256,49 +303,47 @@ class TestKatzCentralityDirected: def test_katz_centrality_weighted(self): G = self.G alpha = self.G.alpha - p = nx.katz_centrality(G, alpha, weight='weight') + p = nx.katz_centrality(G, alpha, weight="weight") for (a, b) in zip(list(p.values()), self.G.evc): assert almost_equal(a, b) def test_katz_centrality_unweighted(self): H = self.H alpha = self.H.alpha - p = nx.katz_centrality(H, alpha, weight='weight') + p = nx.katz_centrality(H, alpha, weight="weight") for (a, b) in zip(list(p.values()), self.H.evc): assert almost_equal(a, b) class TestKatzCentralityDirectedNumpy(TestKatzCentralityDirected): - @classmethod def setup_class(cls): global np - np = pytest.importorskip('numpy') - scipy = pytest.importorskip('scipy') + np = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") def test_katz_centrality_weighted(self): G = self.G alpha = self.G.alpha - p = nx.katz_centrality_numpy(G, alpha, weight='weight') + p = nx.katz_centrality_numpy(G, alpha, weight="weight") for (a, b) in zip(list(p.values()), self.G.evc): assert almost_equal(a, b) def test_katz_centrality_unweighted(self): H = self.H alpha = self.H.alpha - p = nx.katz_centrality_numpy(H, alpha, weight='weight') + p = nx.katz_centrality_numpy(H, alpha, weight="weight") for (a, b) in zip(list(p.values()), self.H.evc): assert almost_equal(a, b) class TestKatzEigenvectorVKatz: - @classmethod def setup_class(cls): global np global eigvals - np = pytest.importorskip('numpy') - scipy = pytest.importorskip('scipy') + np = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") from numpy.linalg import eigvals def test_eigenvector_v_katz_random(self): diff --git a/networkx/algorithms/centrality/tests/test_load_centrality.py b/networkx/algorithms/centrality/tests/test_load_centrality.py index 6eacde61..66d0ea56 100644 --- a/networkx/algorithms/centrality/tests/test_load_centrality.py +++ b/networkx/algorithms/centrality/tests/test_load_centrality.py @@ -3,7 +3,6 @@ from networkx.testing import almost_equal class TestLoadCentrality: - @classmethod def setup_class(cls): @@ -28,8 +27,7 @@ class TestLoadCentrality: cls.C4 = nx.cycle_graph(4) cls.T = nx.balanced_tree(r=2, h=2) cls.Gb = nx.Graph() - cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), - (2, 4), (4, 5), (3, 5)]) + cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) cls.F = nx.florentine_families_graph() cls.LM = nx.les_miserables_graph() cls.D = nx.cycle_graph(3, create_using=nx.DiGraph()) @@ -37,37 +35,27 @@ class TestLoadCentrality: def test_not_strongly_connected(self): b = nx.load_centrality(self.D) - result = {0: 5. / 12, - 1: 1. / 4, - 2: 1. / 12, - 3: 1. / 4, - 4: 0.000} + result = {0: 5.0 / 12, 1: 1.0 / 4, 2: 1.0 / 12, 3: 1.0 / 4, 4: 0.000} for n in sorted(self.D): assert almost_equal(result[n], b[n], places=3) assert almost_equal(result[n], nx.load_centrality(self.D, n), places=3) def test_weighted_load(self): - b = nx.load_centrality(self.G, weight='weight', normalized=False) + b = nx.load_centrality(self.G, weight="weight", normalized=False) for n in sorted(self.G): assert b[n] == self.exact_weighted[n] def test_k5_load(self): G = self.K5 c = nx.load_centrality(G) - d = {0: 0.000, - 1: 0.000, - 2: 0.000, - 3: 0.000, - 4: 0.000} + d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000} for n in sorted(G): assert almost_equal(c[n], d[n], places=3) def test_p3_load(self): G = self.P3 c = nx.load_centrality(G) - d = {0: 0.000, - 1: 1.000, - 2: 0.000} + d = {0: 0.000, 1: 1.000, 2: 0.000} for n in sorted(G): assert almost_equal(c[n], d[n], places=3) c = nx.load_centrality(G, v=1) @@ -78,164 +66,165 @@ class TestLoadCentrality: def test_p2_load(self): G = nx.path_graph(2) c = nx.load_centrality(G) - d = {0: 0.000, - 1: 0.000} + d = {0: 0.000, 1: 0.000} for n in sorted(G): assert almost_equal(c[n], d[n], places=3) def test_krackhardt_load(self): G = self.K c = nx.load_centrality(G) - d = {0: 0.023, - 1: 0.023, - 2: 0.000, - 3: 0.102, - 4: 0.000, - 5: 0.231, - 6: 0.231, - 7: 0.389, - 8: 0.222, - 9: 0.000} + d = { + 0: 0.023, + 1: 0.023, + 2: 0.000, + 3: 0.102, + 4: 0.000, + 5: 0.231, + 6: 0.231, + 7: 0.389, + 8: 0.222, + 9: 0.000, + } for n in sorted(G): assert almost_equal(c[n], d[n], places=3) def test_florentine_families_load(self): G = self.F c = nx.load_centrality(G) - d = {'Acciaiuoli': 0.000, - 'Albizzi': 0.211, - 'Barbadori': 0.093, - 'Bischeri': 0.104, - 'Castellani': 0.055, - 'Ginori': 0.000, - 'Guadagni': 0.251, - 'Lamberteschi': 0.000, - 'Medici': 0.522, - 'Pazzi': 0.000, - 'Peruzzi': 0.022, - 'Ridolfi': 0.117, - 'Salviati': 0.143, - 'Strozzi': 0.106, - 'Tornabuoni': 0.090} + d = { + "Acciaiuoli": 0.000, + "Albizzi": 0.211, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.251, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.117, + "Salviati": 0.143, + "Strozzi": 0.106, + "Tornabuoni": 0.090, + } for n in sorted(G): assert almost_equal(c[n], d[n], places=3) def test_les_miserables_load(self): G = self.LM c = nx.load_centrality(G) - d = {'Napoleon': 0.000, - 'Myriel': 0.177, - 'MlleBaptistine': 0.000, - 'MmeMagloire': 0.000, - 'CountessDeLo': 0.000, - 'Geborand': 0.000, - 'Champtercier': 0.000, - 'Cravatte': 0.000, - 'Count': 0.000, - 'OldMan': 0.000, - 'Valjean': 0.567, - 'Labarre': 0.000, - 'Marguerite': 0.000, - 'MmeDeR': 0.000, - 'Isabeau': 0.000, - 'Gervais': 0.000, - 'Listolier': 0.000, - 'Tholomyes': 0.043, - 'Fameuil': 0.000, - 'Blacheville': 0.000, - 'Favourite': 0.000, - 'Dahlia': 0.000, - 'Zephine': 0.000, - 'Fantine': 0.128, - 'MmeThenardier': 0.029, - 'Thenardier': 0.075, - 'Cosette': 0.024, - 'Javert': 0.054, - 'Fauchelevent': 0.026, - 'Bamatabois': 0.008, - 'Perpetue': 0.000, - 'Simplice': 0.009, - 'Scaufflaire': 0.000, - 'Woman1': 0.000, - 'Judge': 0.000, - 'Champmathieu': 0.000, - 'Brevet': 0.000, - 'Chenildieu': 0.000, - 'Cochepaille': 0.000, - 'Pontmercy': 0.007, - 'Boulatruelle': 0.000, - 'Eponine': 0.012, - 'Anzelma': 0.000, - 'Woman2': 0.000, - 'MotherInnocent': 0.000, - 'Gribier': 0.000, - 'MmeBurgon': 0.026, - 'Jondrette': 0.000, - 'Gavroche': 0.164, - 'Gillenormand': 0.021, - 'Magnon': 0.000, - 'MlleGillenormand': 0.047, - 'MmePontmercy': 0.000, - 'MlleVaubois': 0.000, - 'LtGillenormand': 0.000, - 'Marius': 0.133, - 'BaronessT': 0.000, - 'Mabeuf': 0.028, - 'Enjolras': 0.041, - 'Combeferre': 0.001, - 'Prouvaire': 0.000, - 'Feuilly': 0.001, - 'Courfeyrac': 0.006, - 'Bahorel': 0.002, - 'Bossuet': 0.032, - 'Joly': 0.002, - 'Grantaire': 0.000, - 'MotherPlutarch': 0.000, - 'Gueulemer': 0.005, - 'Babet': 0.005, - 'Claquesous': 0.005, - 'Montparnasse': 0.004, - 'Toussaint': 0.000, - 'Child1': 0.000, - 'Child2': 0.000, - 'Brujon': 0.000, - 'MmeHucheloup': 0.000} + d = { + "Napoleon": 0.000, + "Myriel": 0.177, + "MlleBaptistine": 0.000, + "MmeMagloire": 0.000, + "CountessDeLo": 0.000, + "Geborand": 0.000, + "Champtercier": 0.000, + "Cravatte": 0.000, + "Count": 0.000, + "OldMan": 0.000, + "Valjean": 0.567, + "Labarre": 0.000, + "Marguerite": 0.000, + "MmeDeR": 0.000, + "Isabeau": 0.000, + "Gervais": 0.000, + "Listolier": 0.000, + "Tholomyes": 0.043, + "Fameuil": 0.000, + "Blacheville": 0.000, + "Favourite": 0.000, + "Dahlia": 0.000, + "Zephine": 0.000, + "Fantine": 0.128, + "MmeThenardier": 0.029, + "Thenardier": 0.075, + "Cosette": 0.024, + "Javert": 0.054, + "Fauchelevent": 0.026, + "Bamatabois": 0.008, + "Perpetue": 0.000, + "Simplice": 0.009, + "Scaufflaire": 0.000, + "Woman1": 0.000, + "Judge": 0.000, + "Champmathieu": 0.000, + "Brevet": 0.000, + "Chenildieu": 0.000, + "Cochepaille": 0.000, + "Pontmercy": 0.007, + "Boulatruelle": 0.000, + "Eponine": 0.012, + "Anzelma": 0.000, + "Woman2": 0.000, + "MotherInnocent": 0.000, + "Gribier": 0.000, + "MmeBurgon": 0.026, + "Jondrette": 0.000, + "Gavroche": 0.164, + "Gillenormand": 0.021, + "Magnon": 0.000, + "MlleGillenormand": 0.047, + "MmePontmercy": 0.000, + "MlleVaubois": 0.000, + "LtGillenormand": 0.000, + "Marius": 0.133, + "BaronessT": 0.000, + "Mabeuf": 0.028, + "Enjolras": 0.041, + "Combeferre": 0.001, + "Prouvaire": 0.000, + "Feuilly": 0.001, + "Courfeyrac": 0.006, + "Bahorel": 0.002, + "Bossuet": 0.032, + "Joly": 0.002, + "Grantaire": 0.000, + "MotherPlutarch": 0.000, + "Gueulemer": 0.005, + "Babet": 0.005, + "Claquesous": 0.005, + "Montparnasse": 0.004, + "Toussaint": 0.000, + "Child1": 0.000, + "Child2": 0.000, + "Brujon": 0.000, + "MmeHucheloup": 0.000, + } for n in sorted(G): assert almost_equal(c[n], d[n], places=3) def test_unnormalized_k5_load(self): G = self.K5 c = nx.load_centrality(G, normalized=False) - d = {0: 0.000, - 1: 0.000, - 2: 0.000, - 3: 0.000, - 4: 0.000} + d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000} for n in sorted(G): assert almost_equal(c[n], d[n], places=3) def test_unnormalized_p3_load(self): G = self.P3 c = nx.load_centrality(G, normalized=False) - d = {0: 0.000, - 1: 2.000, - 2: 0.000} + d = {0: 0.000, 1: 2.000, 2: 0.000} for n in sorted(G): assert almost_equal(c[n], d[n], places=3) def test_unnormalized_krackhardt_load(self): G = self.K c = nx.load_centrality(G, normalized=False) - d = {0: 1.667, - 1: 1.667, - 2: 0.000, - 3: 7.333, - 4: 0.000, - 5: 16.667, - 6: 16.667, - 7: 28.000, - 8: 16.000, - 9: 0.000} + d = { + 0: 1.667, + 1: 1.667, + 2: 0.000, + 3: 7.333, + 4: 0.000, + 5: 16.667, + 6: 16.667, + 7: 28.000, + 8: 16.000, + 9: 0.000, + } for n in sorted(G): assert almost_equal(c[n], d[n], places=3) @@ -244,21 +233,23 @@ class TestLoadCentrality: G = self.F c = nx.load_centrality(G, normalized=False) - d = {'Acciaiuoli': 0.000, - 'Albizzi': 38.333, - 'Barbadori': 17.000, - 'Bischeri': 19.000, - 'Castellani': 10.000, - 'Ginori': 0.000, - 'Guadagni': 45.667, - 'Lamberteschi': 0.000, - 'Medici': 95.000, - 'Pazzi': 0.000, - 'Peruzzi': 4.000, - 'Ridolfi': 21.333, - 'Salviati': 26.000, - 'Strozzi': 19.333, - 'Tornabuoni': 16.333} + d = { + "Acciaiuoli": 0.000, + "Albizzi": 38.333, + "Barbadori": 17.000, + "Bischeri": 19.000, + "Castellani": 10.000, + "Ginori": 0.000, + "Guadagni": 45.667, + "Lamberteschi": 0.000, + "Medici": 95.000, + "Pazzi": 0.000, + "Peruzzi": 4.000, + "Ridolfi": 21.333, + "Salviati": 26.000, + "Strozzi": 19.333, + "Tornabuoni": 16.333, + } for n in sorted(G): assert almost_equal(c[n], d[n], places=3) @@ -294,58 +285,52 @@ class TestLoadCentrality: B = nx.Graph() # ladder_graph(3) B.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) c = nx.load_centrality(B, normalized=False) - d = {0: 1.750, - 1: 1.750, - 2: 6.500, - 3: 6.500, - 4: 1.750, - 5: 1.750} + d = {0: 1.750, 1: 1.750, 2: 6.500, 3: 6.500, 4: 1.750, 5: 1.750} for n in sorted(B): assert almost_equal(c[n], d[n], places=3) def test_c4_edge_load(self): G = self.C4 c = nx.edge_load_centrality(G) - d = {(0, 1): 6.000, - (0, 3): 6.000, - (1, 2): 6.000, - (2, 3): 6.000} + d = {(0, 1): 6.000, (0, 3): 6.000, (1, 2): 6.000, (2, 3): 6.000} for n in G.edges(): assert almost_equal(c[n], d[n], places=3) def test_p4_edge_load(self): G = self.P4 c = nx.edge_load_centrality(G) - d = {(0, 1): 6.000, - (1, 2): 8.000, - (2, 3): 6.000} + d = {(0, 1): 6.000, (1, 2): 8.000, (2, 3): 6.000} for n in G.edges(): assert almost_equal(c[n], d[n], places=3) def test_k5_edge_load(self): G = self.K5 c = nx.edge_load_centrality(G) - d = {(0, 1): 5.000, - (0, 2): 5.000, - (0, 3): 5.000, - (0, 4): 5.000, - (1, 2): 5.000, - (1, 3): 5.000, - (1, 4): 5.000, - (2, 3): 5.000, - (2, 4): 5.000, - (3, 4): 5.000} + d = { + (0, 1): 5.000, + (0, 2): 5.000, + (0, 3): 5.000, + (0, 4): 5.000, + (1, 2): 5.000, + (1, 3): 5.000, + (1, 4): 5.000, + (2, 3): 5.000, + (2, 4): 5.000, + (3, 4): 5.000, + } for n in G.edges(): assert almost_equal(c[n], d[n], places=3) def test_tree_edge_load(self): G = self.T c = nx.edge_load_centrality(G) - d = {(0, 1): 24.000, - (0, 2): 24.000, - (1, 3): 12.000, - (1, 4): 12.000, - (2, 5): 12.000, - (2, 6): 12.000} + d = { + (0, 1): 24.000, + (0, 2): 24.000, + (1, 3): 12.000, + (1, 4): 12.000, + (2, 5): 12.000, + (2, 6): 12.000, + } for n in G.edges(): assert almost_equal(c[n], d[n], places=3) diff --git a/networkx/algorithms/centrality/tests/test_percolation_centrality.py b/networkx/algorithms/centrality/tests/test_percolation_centrality.py index 01ef2cd2..4311edb9 100644 --- a/networkx/algorithms/centrality/tests/test_percolation_centrality.py +++ b/networkx/algorithms/centrality/tests/test_percolation_centrality.py @@ -52,22 +52,23 @@ class TestPercolationCentrality: centrality when all nodes are percolated the same""" # taken from betweenness test test_florentine_families_graph G = nx.florentine_families_graph() - b_answer =\ - {'Acciaiuoli': 0.000, - 'Albizzi': 0.212, - 'Barbadori': 0.093, - 'Bischeri': 0.104, - 'Castellani': 0.055, - 'Ginori': 0.000, - 'Guadagni': 0.255, - 'Lamberteschi': 0.000, - 'Medici': 0.522, - 'Pazzi': 0.000, - 'Peruzzi': 0.022, - 'Ridolfi': 0.114, - 'Salviati': 0.143, - 'Strozzi': 0.103, - 'Tornabuoni': 0.092} + b_answer = { + "Acciaiuoli": 0.000, + "Albizzi": 0.212, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.255, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.114, + "Salviati": 0.143, + "Strozzi": 0.103, + "Tornabuoni": 0.092, + } p_states = {k: 1.0 for k, v in b_answer.items()} p_answer = nx.percolation_centrality(G, states=p_states) diff --git a/networkx/algorithms/centrality/tests/test_reaching.py b/networkx/algorithms/centrality/tests/test_reaching.py index ef8770cd..7e9a3f89 100644 --- a/networkx/algorithms/centrality/tests/test_reaching.py +++ b/networkx/algorithms/centrality/tests/test_reaching.py @@ -11,19 +11,19 @@ class TestGlobalReachingCentrality: def test_non_positive_weights(self): with pytest.raises(nx.NetworkXError): G = nx.DiGraph() - nx.global_reaching_centrality(G, weight='weight') + nx.global_reaching_centrality(G, weight="weight") def test_negatively_weighted(self): with pytest.raises(nx.NetworkXError): G = nx.Graph() G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) - nx.global_reaching_centrality(G, weight='weight') + nx.global_reaching_centrality(G, weight="weight") def test_directed_star(self): G = nx.DiGraph() G.add_weighted_edges_from([(1, 2, 0.5), (1, 3, 0.5)]) grc = nx.global_reaching_centrality - assert grc(G, normalized=False, weight='weight') == 0.5 + assert grc(G, normalized=False, weight="weight") == 0.5 assert grc(G) == 1 def test_undirected_unweighted_star(self): @@ -35,7 +35,7 @@ class TestGlobalReachingCentrality: G = nx.Graph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)]) grc = nx.global_reaching_centrality - assert grc(G, normalized=False, weight='weight') == 0.375 + assert grc(G, normalized=False, weight="weight") == 0.375 def test_cycle_directed_unweighted(self): G = nx.DiGraph() @@ -77,7 +77,7 @@ class TestGlobalReachingCentrality: max_local = max(local_reach_ctrs) expected = sum(max_local - lrc for lrc in local_reach_ctrs) / denom grc = nx.global_reaching_centrality - actual = grc(G, normalized=False, weight='weight') + actual = grc(G, normalized=False, weight="weight") assert almost_equal(expected, actual, places=7) @@ -88,13 +88,13 @@ class TestLocalReachingCentrality: with pytest.raises(nx.NetworkXError): G = nx.DiGraph() G.add_weighted_edges_from([(0, 1, 0)]) - nx.local_reaching_centrality(G, 0, weight='weight') + nx.local_reaching_centrality(G, 0, weight="weight") def test_negatively_weighted(self): with pytest.raises(nx.NetworkXError): G = nx.Graph() G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) - nx.local_reaching_centrality(G, 0, weight='weight') + nx.local_reaching_centrality(G, 0, weight="weight") def test_undirected_unweighted_star(self): G = nx.star_graph(2) @@ -104,5 +104,7 @@ class TestLocalReachingCentrality: def test_undirected_weighted_star(self): G = nx.Graph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)]) - centrality = nx.local_reaching_centrality(G, 1, normalized=False, weight='weight') + centrality = nx.local_reaching_centrality( + G, 1, normalized=False, weight="weight" + ) assert centrality == 1.5 diff --git a/networkx/algorithms/centrality/tests/test_second_order_centrality.py b/networkx/algorithms/centrality/tests/test_second_order_centrality.py index e04a91ff..58eee18c 100644 --- a/networkx/algorithms/centrality/tests/test_second_order_centrality.py +++ b/networkx/algorithms/centrality/tests/test_second_order_centrality.py @@ -3,15 +3,15 @@ Tests for second order centrality. """ import pytest -np = pytest.importorskip('numpy') -scipy = pytest.importorskip('scipy') + +np = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") import networkx as nx from networkx.testing import almost_equal class TestSecondOrderCentrality: - def test_empty(self): with pytest.raises(nx.NetworkXException): G = nx.empty_graph() @@ -60,8 +60,7 @@ class TestSecondOrderCentrality: def test_ring_graph(self): """Second order centrality: ring graph, as defined in paper""" G = nx.cycle_graph(5) - b_answer = {0: 4.472, 1: 4.472, 2: 4.472, - 3: 4.472, 4: 4.472} + b_answer = {0: 4.472, 1: 4.472, 2: 4.472, 3: 4.472, 4: 4.472} b = nx.second_order_centrality(G) diff --git a/networkx/algorithms/centrality/tests/test_subgraph.py b/networkx/algorithms/centrality/tests/test_subgraph.py index a7896268..1a5f5c0c 100644 --- a/networkx/algorithms/centrality/tests/test_subgraph.py +++ b/networkx/algorithms/centrality/tests/test_subgraph.py @@ -1,32 +1,42 @@ import pytest -numpy = pytest.importorskip('numpy') -scipy = pytest.importorskip('scipy') + +numpy = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") import networkx as nx from networkx.algorithms.centrality.subgraph_alg import ( estrada_index, communicability_betweenness_centrality, subgraph_centrality, - subgraph_centrality_exp + subgraph_centrality_exp, ) from networkx.testing import almost_equal class TestSubgraph: - def test_subgraph_centrality(self): answer = {0: 1.5430806348152433, 1: 1.5430806348152433} result = subgraph_centrality(nx.path_graph(2)) for k, v in result.items(): assert almost_equal(answer[k], result[k], places=7) - answer1 = {'1': 1.6445956054135658, - 'Albert': 2.4368257358712189, - 'Aric': 2.4368257358712193, - 'Dan': 3.1306328496328168, - 'Franck': 2.3876142275231915} - G1 = nx.Graph([('Franck', 'Aric'), ('Aric', 'Dan'), ('Dan', 'Albert'), - ('Albert', 'Franck'), ('Dan', '1'), ('Franck', 'Albert')]) + answer1 = { + "1": 1.6445956054135658, + "Albert": 2.4368257358712189, + "Aric": 2.4368257358712193, + "Dan": 3.1306328496328168, + "Franck": 2.3876142275231915, + } + G1 = nx.Graph( + [ + ("Franck", "Aric"), + ("Aric", "Dan"), + ("Dan", "Albert"), + ("Albert", "Franck"), + ("Dan", "1"), + ("Franck", "Albert"), + ] + ) result1 = subgraph_centrality(G1) for k, v in result1.items(): assert almost_equal(answer1[k], result1[k], places=7) @@ -45,20 +55,33 @@ class TestSubgraph: comm200_exp = nx.subgraph_centrality_exp(g200) def test_communicability_betweenness_centrality(self): - answer = {0: 0.07017447951484615, 1: 0.71565598701107991, - 2: 0.71565598701107991, 3: 0.07017447951484615} + answer = { + 0: 0.07017447951484615, + 1: 0.71565598701107991, + 2: 0.71565598701107991, + 3: 0.07017447951484615, + } result = communicability_betweenness_centrality(nx.path_graph(4)) for k, v in result.items(): assert almost_equal(answer[k], result[k], places=7) - answer1 = {'1': 0.060039074193949521, - 'Albert': 0.315470761661372, - 'Aric': 0.31547076166137211, - 'Dan': 0.68297778678316201, - 'Franck': 0.21977926617449497} - G1 = nx.Graph([('Franck', 'Aric'), - ('Aric', 'Dan'), ('Dan', 'Albert'), ('Albert', 'Franck'), - ('Dan', '1'), ('Franck', 'Albert')]) + answer1 = { + "1": 0.060039074193949521, + "Albert": 0.315470761661372, + "Aric": 0.31547076166137211, + "Dan": 0.68297778678316201, + "Franck": 0.21977926617449497, + } + G1 = nx.Graph( + [ + ("Franck", "Aric"), + ("Aric", "Dan"), + ("Dan", "Albert"), + ("Albert", "Franck"), + ("Dan", "1"), + ("Franck", "Albert"), + ] + ) result1 = communicability_betweenness_centrality(G1) for k, v in result1.items(): assert almost_equal(answer1[k], result1[k], places=7) diff --git a/networkx/algorithms/centrality/tests/test_trophic.py b/networkx/algorithms/centrality/tests/test_trophic.py index c75235ad..176ea410 100644 --- a/networkx/algorithms/centrality/tests/test_trophic.py +++ b/networkx/algorithms/centrality/tests/test_trophic.py @@ -1,7 +1,8 @@ """Test trophic levels, trophic differences and trophic coherence """ import pytest -np = pytest.importorskip('numpy') + +np = pytest.importorskip("numpy") import networkx as nx from networkx.testing import almost_equal @@ -67,13 +68,7 @@ def test_trophic_levels_levine(): expected_y = np.array([1, 2.07906977, 1.46511628, 2.3255814]) assert np.allclose(y, expected_y) - expected_d = { - 1: 1, - 2: 2, - 3: 3.07906977, - 4: 2.46511628, - 5: 3.3255814 - } + expected_d = {1: 1, 2: 2, 3: 3.07906977, 4: 2.46511628, 5: 3.3255814} d = nx.trophic_levels(S2) @@ -158,9 +153,11 @@ def test_trophic_levels_singular_matrix(): G = nx.from_numpy_matrix(matrix, create_using=nx.DiGraph) with pytest.raises(nx.NetworkXError) as e: nx.trophic_levels(G) - msg = "Trophic levels are only defined for graphs where every node " + \ - "has a path from a basal node (basal nodes are nodes with no " + \ - "incoming edges)." + msg = ( + "Trophic levels are only defined for graphs where every node " + + "has a path from a basal node (basal nodes are nodes with no " + + "incoming edges)." + ) assert msg in str(e.value) @@ -170,33 +167,37 @@ def test_trophic_levels_singular_with_basal(): """ G = nx.DiGraph() # a has in-degree zero - G.add_edge('a', 'b') + G.add_edge("a", "b") # b is one level above a, c and d - G.add_edge('c', 'b') - G.add_edge('d', 'b') + G.add_edge("c", "b") + G.add_edge("d", "b") # c and d form a loop, neither are reachable from a - G.add_edge('c', 'd') - G.add_edge('d', 'c') + G.add_edge("c", "d") + G.add_edge("d", "c") with pytest.raises(nx.NetworkXError) as e: nx.trophic_levels(G) - msg = "Trophic levels are only defined for graphs where every node " + \ - "has a path from a basal node (basal nodes are nodes with no " + \ - "incoming edges)." + msg = ( + "Trophic levels are only defined for graphs where every node " + + "has a path from a basal node (basal nodes are nodes with no " + + "incoming edges)." + ) assert msg in str(e.value) # if self-loops are allowed, smaller example: G = nx.DiGraph() - G.add_edge('a', 'b') # a has in-degree zero - G.add_edge('c', 'b') # b is one level above a and c - G.add_edge('c', 'c') # c has a self-loop + G.add_edge("a", "b") # a has in-degree zero + G.add_edge("c", "b") # b is one level above a and c + G.add_edge("c", "c") # c has a self-loop with pytest.raises(nx.NetworkXError) as e: nx.trophic_levels(G) - msg = "Trophic levels are only defined for graphs where every node " + \ - "has a path from a basal node (basal nodes are nodes with no " + \ - "incoming edges)." + msg = ( + "Trophic levels are only defined for graphs where every node " + + "has a path from a basal node (basal nodes are nodes with no " + + "incoming edges)." + ) assert msg in str(e.value) @@ -270,7 +271,6 @@ def test_trophic_incoherence_parameter_no_cannibalism(): assert almost_equal(q, np.std([1, 1.5, 0.5, 0.75, 1.25])) - def test_trophic_incoherence_parameter_cannibalism(): matrix_a = np.array([[0, 1], [0, 0]]) G = nx.from_numpy_matrix(matrix_a, create_using=nx.DiGraph) diff --git a/networkx/algorithms/centrality/tests/test_voterank.py b/networkx/algorithms/centrality/tests/test_voterank.py index 7548a43f..aa653ae9 100644 --- a/networkx/algorithms/centrality/tests/test_voterank.py +++ b/networkx/algorithms/centrality/tests/test_voterank.py @@ -10,15 +10,29 @@ class TestVoteRankCentrality: # Example Graph present in reference paper def test_voterank_centrality_1(self): G = nx.Graph() - G.add_edges_from([(7, 8), (7, 5), (7, 9), (5, 0), (0, 1), (0, 2), - (0, 3), (0, 4), (1, 6), (2, 6), (3, 6), (4, 6)]) + G.add_edges_from( + [ + (7, 8), + (7, 5), + (7, 9), + (5, 0), + (0, 1), + (0, 2), + (0, 3), + (0, 4), + (1, 6), + (2, 6), + (3, 6), + (4, 6), + ] + ) assert [0, 7, 6] == nx.voterank(G) # Graph unit test def test_voterank_centrality_2(self): G = nx.florentine_families_graph() d = nx.voterank(G, 4) - exact = ['Medici', 'Strozzi', 'Guadagni', 'Castellani'] + exact = ["Medici", "Strozzi", "Guadagni", "Castellani"] assert exact == d # DiGraph unit test @@ -31,17 +45,17 @@ class TestVoteRankCentrality: # MultiGraph unit test def test_voterank_centrality_4(self): G = nx.MultiGraph() - G.add_edges_from([(0, 1), (0, 1), (1, 2), - (2, 5), (2, 5), (5, 6), - (5, 6), (2, 4), (4, 3)]) + G.add_edges_from( + [(0, 1), (0, 1), (1, 2), (2, 5), (2, 5), (5, 6), (5, 6), (2, 4), (4, 3)] + ) exact = [2, 1, 5, 4] assert exact == nx.voterank(G) # MultiDiGraph unit test def test_voterank_centrality_5(self): G = nx.MultiDiGraph() - G.add_edges_from([(0, 1), (0, 1), (1, 2), - (2, 5), (2, 5), (5, 6), - (5, 6), (2, 4), (4, 3)]) + G.add_edges_from( + [(0, 1), (0, 1), (1, 2), (2, 5), (2, 5), (5, 6), (5, 6), (2, 4), (4, 3)] + ) exact = [2, 0, 5, 4] assert exact == nx.voterank(G) diff --git a/networkx/algorithms/centrality/trophic.py b/networkx/algorithms/centrality/trophic.py index bcd8508e..7b8ee261 100644 --- a/networkx/algorithms/centrality/trophic.py +++ b/networkx/algorithms/centrality/trophic.py @@ -3,12 +3,11 @@ import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['trophic_levels', 'trophic_differences', - 'trophic_incoherence_parameter'] +__all__ = ["trophic_levels", "trophic_differences", "trophic_incoherence_parameter"] -@not_implemented_for('undirected') -def trophic_levels(G, weight='weight'): +@not_implemented_for("undirected") +def trophic_levels(G, weight="weight"): r"""Compute the trophic levels of nodes. The trophic level of a node $i$ is @@ -44,8 +43,7 @@ def trophic_levels(G, weight='weight'): try: import numpy as np except ImportError as e: - raise ImportError( - "trophic_levels() requires NumPy: http://numpy.org/") from e + raise ImportError("trophic_levels() requires NumPy: http://numpy.org/") from e # find adjacency matrix a = nx.adjacency_matrix(G, weight=weight).T.toarray() @@ -63,9 +61,11 @@ def trophic_levels(G, weight='weight'): n = np.linalg.inv(i - p) except np.linalg.LinAlgError as err: # LinAlgError is raised when there is a non-basal node - msg = "Trophic levels are only defined for graphs where every " + \ - "node has a path from a basal node (basal nodes are nodes " + \ - "with no incoming edges)." + msg = ( + "Trophic levels are only defined for graphs where every " + + "node has a path from a basal node (basal nodes are nodes " + + "with no incoming edges)." + ) raise nx.NetworkXError(msg) from err y = n.sum(axis=1) + 1 @@ -77,16 +77,15 @@ def trophic_levels(G, weight='weight'): levels[node_id] = 1 # all other nodes have levels as calculated - nonzero_node_ids = (node_id for node_id, degree in G.in_degree - if degree != 0) + nonzero_node_ids = (node_id for node_id, degree in G.in_degree if degree != 0) for i, node_id in enumerate(nonzero_node_ids): levels[node_id] = y[i] return levels -@not_implemented_for('undirected') -def trophic_differences(G, weight='weight'): +@not_implemented_for("undirected") +def trophic_differences(G, weight="weight"): r"""Compute the trophic differences of the edges of a directed graph. The trophic difference $x_ij$ for each edge is defined in Johnson et al. @@ -119,8 +118,8 @@ def trophic_differences(G, weight='weight'): return diffs -@not_implemented_for('undirected') -def trophic_incoherence_parameter(G, weight='weight', cannibalism=False): +@not_implemented_for("undirected") +def trophic_incoherence_parameter(G, weight="weight", cannibalism=False): r"""Compute the trophic incoherence parameter of a graph. Trophic coherence is defined as the homogeneity of the distribution of @@ -150,8 +149,8 @@ def trophic_incoherence_parameter(G, weight='weight', cannibalism=False): import numpy as np except ImportError as e: raise ImportError( - "trophic_incoherence_parameter() requires NumPy: " - "http://scipy.org/") from e + "trophic_incoherence_parameter() requires NumPy: " "http://scipy.org/" + ) from e if cannibalism: diffs = trophic_differences(G, weight=weight) diff --git a/networkx/algorithms/centrality/voterank_alg.py b/networkx/algorithms/centrality/voterank_alg.py index 75a0f568..6aab4089 100644 --- a/networkx/algorithms/centrality/voterank_alg.py +++ b/networkx/algorithms/centrality/voterank_alg.py @@ -1,6 +1,6 @@ """Algorithm to select influential nodes in a graph using VoteRank.""" -__all__ = ['voterank'] +__all__ = ["voterank"] def voterank(G, number_of_nodes=None): @@ -62,8 +62,7 @@ def voterank(G, number_of_nodes=None): for n in influential_nodes: voterank[n][0] = 0 # step 3 - select top node - n = max(G.nodes, - key=lambda x: voterank[x][0]) + n = max(G.nodes, key=lambda x: voterank[x][0]) if voterank[n][0] == 0: return influential_nodes influential_nodes.append(n) diff --git a/networkx/algorithms/chains.py b/networkx/algorithms/chains.py index 8796c7fe..a76941fb 100644 --- a/networkx/algorithms/chains.py +++ b/networkx/algorithms/chains.py @@ -4,8 +4,8 @@ import networkx as nx from networkx.utils import not_implemented_for -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def chain_decomposition(G, root=None): """Returns the chain decomposition of a graph. @@ -89,7 +89,7 @@ def chain_decomposition(G, root=None): H = nx.DiGraph() nodes = [] for u, v, d in nx.dfs_labeled_edges(G, source=root): - if d == 'forward': + if d == "forward": # `dfs_labeled_edges()` yields (root, root, 'forward') # if it is beginning the search on a new connected # component. @@ -103,7 +103,7 @@ def chain_decomposition(G, root=None): # `dfs_labeled_edges` considers nontree edges in both # orientations, so we need to not add the edge if it its # other orientation has been added. - elif d == 'nontree' and v not in H[u]: + elif d == "nontree" and v not in H[u]: H.add_edge(v, u, nontree=True) else: # Do nothing on 'reverse' edges; we only care about @@ -130,7 +130,7 @@ def chain_decomposition(G, root=None): while v not in visited: yield u, v visited.add(v) - u, v = v, G.nodes[v]['parent'] + u, v = v, G.nodes[v]["parent"] yield u, v # Create a directed version of H that has the DFS edges directed @@ -151,7 +151,7 @@ def chain_decomposition(G, root=None): for u in nodes: visited.add(u) # For each nontree edge going out of node u... - edges = ((u, v) for u, v, d in H.out_edges(u, data='nontree') if d) + edges = ((u, v) for u, v, d in H.out_edges(u, data="nontree") if d) for u, v in edges: # Create the cycle or cycle prefix starting with the # nontree edge. diff --git a/networkx/algorithms/chordal.py b/networkx/algorithms/chordal.py index e2b9a069..f5602a23 100644 --- a/networkx/algorithms/chordal.py +++ b/networkx/algorithms/chordal.py @@ -12,12 +12,14 @@ from networkx.algorithms.components import connected_components from networkx.utils import arbitrary_element, not_implemented_for -__all__ = ['is_chordal', - 'find_induced_nodes', - 'chordal_graph_cliques', - 'chordal_graph_treewidth', - 'NetworkXTreewidthBoundExceeded', - 'complete_to_chordal_graph'] +__all__ = [ + "is_chordal", + "find_induced_nodes", + "chordal_graph_cliques", + "chordal_graph_treewidth", + "NetworkXTreewidthBoundExceeded", + "complete_to_chordal_graph", +] class NetworkXTreewidthBoundExceeded(nx.NetworkXException): @@ -70,9 +72,9 @@ def is_chordal(G): pp. 566–579. """ if G.is_directed(): - raise nx.NetworkXError('Directed graphs not supported') + raise nx.NetworkXError("Directed graphs not supported") if G.is_multigraph(): - raise nx.NetworkXError('Multiply connected graphs not supported.') + raise nx.NetworkXError("Multiply connected graphs not supported.") if len(_find_chordality_breaker(G)) == 0: return True else: @@ -251,7 +253,7 @@ def _is_complete_graph(G): if n < 2: return True e = G.number_of_edges() - max_edges = ((n * (n - 1)) / 2) + max_edges = (n * (n - 1)) / 2 return e == max_edges @@ -302,7 +304,9 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): # The graph seems to be chordal by now. We update the treewidth current_treewidth = max(current_treewidth, len(clique_wanna_be)) if current_treewidth > treewidth_bound: - raise nx.NetworkXTreewidthBoundExceeded(f"treewidth_bound exceeded: {current_treewidth}") + raise nx.NetworkXTreewidthBoundExceeded( + f"treewidth_bound exceeded: {current_treewidth}" + ) else: # sg is not a clique, # look for an edge that is not included in sg @@ -340,7 +344,7 @@ def _connected_chordal_graph_cliques(G): return cliques -@not_implemented_for('directed') +@not_implemented_for("directed") def complete_to_chordal_graph(G): """Return a copy of G completed to a chordal graph @@ -401,8 +405,9 @@ def complete_to_chordal_graph(G): else: # y_weight will be bigger than node weights between y and z y_weight = weight[y] - lower_nodes = [node for node in unnumbered_nodes - if weight[node] < y_weight] + lower_nodes = [ + node for node in unnumbered_nodes if weight[node] < y_weight + ] if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z): update_nodes.append(y) chords.add((z, y)) diff --git a/networkx/algorithms/clique.py b/networkx/algorithms/clique.py index 1b784e60..f236603f 100644 --- a/networkx/algorithms/clique.py +++ b/networkx/algorithms/clique.py @@ -14,14 +14,21 @@ from itertools import islice import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['find_cliques', 'find_cliques_recursive', 'make_max_clique_graph', - 'make_clique_bipartite', 'graph_clique_number', - 'graph_number_of_cliques', 'node_clique_number', - 'number_of_cliques', 'cliques_containing_node', - 'enumerate_all_cliques'] - - -@not_implemented_for('directed') +__all__ = [ + "find_cliques", + "find_cliques_recursive", + "make_max_clique_graph", + "make_clique_bipartite", + "graph_clique_number", + "graph_number_of_cliques", + "node_clique_number", + "number_of_cliques", + "cliques_containing_node", + "enumerate_all_cliques", +] + + +@not_implemented_for("directed") def enumerate_all_cliques(G): """Returns all cliques in an undirected graph. @@ -84,12 +91,15 @@ def enumerate_all_cliques(G): yield base for i, u in enumerate(cnbrs): # Use generators to reduce memory consumption. - queue.append((chain(base, [u]), - filter(nbrs[u].__contains__, - islice(cnbrs, i + 1, None)))) + queue.append( + ( + chain(base, [u]), + filter(nbrs[u].__contains__, islice(cnbrs, i + 1, None)), + ) + ) -@not_implemented_for('directed') +@not_implemented_for("directed") def find_cliques(G): """Returns all maximal cliques in an undirected graph. @@ -468,9 +478,9 @@ def node_clique_number(G, nodes=None, cliques=None): cliques = list(find_cliques(G)) if nodes is None: - nodes = list(G.nodes()) # none, get entire graph + nodes = list(G.nodes()) # none, get entire graph - if not isinstance(nodes, list): # check for a list + if not isinstance(nodes, list): # check for a list v = nodes # assume it is a single value d = max([len(c) for c in cliques if v in c]) @@ -506,9 +516,9 @@ def number_of_cliques(G, nodes=None, cliques=None): cliques = list(find_cliques(G)) if nodes is None: - nodes = list(G.nodes()) # none, get entire graph + nodes = list(G.nodes()) # none, get entire graph - if not isinstance(nodes, list): # check for a list + if not isinstance(nodes, list): # check for a list v = nodes # assume it is a single value numcliq = len([1 for c in cliques if v in c]) @@ -529,9 +539,9 @@ def cliques_containing_node(G, nodes=None, cliques=None): cliques = list(find_cliques(G)) if nodes is None: - nodes = list(G.nodes()) # none, get entire graph + nodes = list(G.nodes()) # none, get entire graph - if not isinstance(nodes, list): # check for a list + if not isinstance(nodes, list): # check for a list v = nodes # assume it is a single value vcliques = [c for c in cliques if v in c] diff --git a/networkx/algorithms/cluster.py b/networkx/algorithms/cluster.py index 1de29050..55637cae 100644 --- a/networkx/algorithms/cluster.py +++ b/networkx/algorithms/cluster.py @@ -6,11 +6,17 @@ from collections import Counter from networkx.utils import not_implemented_for -__all__ = ['triangles', 'average_clustering', 'clustering', 'transitivity', - 'square_clustering', 'generalized_degree'] +__all__ = [ + "triangles", + "average_clustering", + "clustering", + "transitivity", + "square_clustering", + "generalized_degree", +] -@not_implemented_for('directed') +@not_implemented_for("directed") def triangles(G, nodes=None): """Compute the number of triangles. @@ -53,7 +59,7 @@ def triangles(G, nodes=None): return {v: t // 2 for v, d, t, _ in _triangles_and_degree_iter(G, nodes)} -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def _triangles_and_degree_iter(G, nodes=None): """ Return an iterator of (node, degree, triangles, generalized degree). @@ -74,8 +80,8 @@ def _triangles_and_degree_iter(G, nodes=None): yield (v, len(vs), ntriangles, gen_degree) -@not_implemented_for('multigraph') -def _weighted_triangles_and_degree_iter(G, nodes=None, weight='weight'): +@not_implemented_for("multigraph") +def _weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"): """ Return an iterator of (node, degree, weighted_triangles). Used for weighted clustering. @@ -104,12 +110,13 @@ def _weighted_triangles_and_degree_iter(G, nodes=None, weight='weight'): # Only compute the edge weight once, before the inner inner # loop. wij = wt(i, j) - weighted_triangles += sum((wij * wt(j, k) * wt(k, i)) ** (1 / 3) - for k in inbrs & jnbrs) + weighted_triangles += sum( + (wij * wt(j, k) * wt(k, i)) ** (1 / 3) for k in inbrs & jnbrs + ) yield (i, len(inbrs), 2 * weighted_triangles) -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def _directed_triangles_and_degree_iter(G, nodes=None): """ Return an iterator of (node, total_degree, reciprocal_degree, directed_triangles). @@ -127,18 +134,22 @@ def _directed_triangles_and_degree_iter(G, nodes=None): for j in chain(ipreds, isuccs): jpreds = set(G._pred[j]) - {j} jsuccs = set(G._succ[j]) - {j} - directed_triangles += sum(1 for k in - chain((ipreds & jpreds), - (ipreds & jsuccs), - (isuccs & jpreds), - (isuccs & jsuccs))) + directed_triangles += sum( + 1 + for k in chain( + (ipreds & jpreds), + (ipreds & jsuccs), + (isuccs & jpreds), + (isuccs & jsuccs), + ) + ) dtotal = len(ipreds) + len(isuccs) dbidirectional = len(ipreds & isuccs) yield (i, dtotal, dbidirectional, directed_triangles) -@not_implemented_for('multigraph') -def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight='weight'): +@not_implemented_for("multigraph") +def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"): """ Return an iterator of (node, total_degree, reciprocal_degree, directed_weighted_triangles). @@ -163,26 +174,34 @@ def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight='weight') for j in ipreds: jpreds = set(G._pred[j]) - {j} jsuccs = set(G._succ[j]) - {j} - directed_triangles += sum((wt(j, i) * wt(k, i) * wt(k, j))**(1 / 3) - for k in ipreds & jpreds) - directed_triangles += sum((wt(j, i) * wt(k, i) * wt(j, k))**(1 / 3) - for k in ipreds & jsuccs) - directed_triangles += sum((wt(j, i) * wt(i, k) * wt(k, j))**(1 / 3) - for k in isuccs & jpreds) - directed_triangles += sum((wt(j, i) * wt(i, k) * wt(j, k))**(1 / 3) - for k in isuccs & jsuccs) + directed_triangles += sum( + (wt(j, i) * wt(k, i) * wt(k, j)) ** (1 / 3) for k in ipreds & jpreds + ) + directed_triangles += sum( + (wt(j, i) * wt(k, i) * wt(j, k)) ** (1 / 3) for k in ipreds & jsuccs + ) + directed_triangles += sum( + (wt(j, i) * wt(i, k) * wt(k, j)) ** (1 / 3) for k in isuccs & jpreds + ) + directed_triangles += sum( + (wt(j, i) * wt(i, k) * wt(j, k)) ** (1 / 3) for k in isuccs & jsuccs + ) for j in isuccs: jpreds = set(G._pred[j]) - {j} jsuccs = set(G._succ[j]) - {j} - directed_triangles += sum((wt(i, j) * wt(k, i) * wt(k, j))**(1 / 3) - for k in ipreds & jpreds) - directed_triangles += sum((wt(i, j) * wt(k, i) * wt(j, k))**(1 / 3) - for k in ipreds & jsuccs) - directed_triangles += sum((wt(i, j) * wt(i, k) * wt(k, j))**(1 / 3) - for k in isuccs & jpreds) - directed_triangles += sum((wt(i, j) * wt(i, k) * wt(j, k))**(1 / 3) - for k in isuccs & jsuccs) + directed_triangles += sum( + (wt(i, j) * wt(k, i) * wt(k, j)) ** (1 / 3) for k in ipreds & jpreds + ) + directed_triangles += sum( + (wt(i, j) * wt(k, i) * wt(j, k)) ** (1 / 3) for k in ipreds & jsuccs + ) + directed_triangles += sum( + (wt(i, j) * wt(i, k) * wt(k, j)) ** (1 / 3) for k in isuccs & jpreds + ) + directed_triangles += sum( + (wt(i, j) * wt(i, k) * wt(j, k)) ** (1 / 3) for k in isuccs & jsuccs + ) dtotal = len(ipreds) + len(isuccs) dbidirectional = len(ipreds & isuccs) @@ -331,23 +350,24 @@ def clustering(G, nodes=None, weight=None): """ if G.is_directed(): if weight is not None: - td_iter = _directed_weighted_triangles_and_degree_iter( - G, nodes, weight) - clusterc = {v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2) - for v, dt, db, t in td_iter} + td_iter = _directed_weighted_triangles_and_degree_iter(G, nodes, weight) + clusterc = { + v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2) + for v, dt, db, t in td_iter + } else: td_iter = _directed_triangles_and_degree_iter(G, nodes) - clusterc = {v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2) - for v, dt, db, t in td_iter} + clusterc = { + v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2) + for v, dt, db, t in td_iter + } else: if weight is not None: td_iter = _weighted_triangles_and_degree_iter(G, nodes, weight) - clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for - v, d, t in td_iter} + clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t in td_iter} else: td_iter = _triangles_and_degree_iter(G, nodes) - clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for - v, d, t, _ in td_iter} + clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t, _ in td_iter} if nodes in G: # Return the value of the sole entry in the dictionary. return clusterc[nodes] @@ -461,7 +481,7 @@ def square_clustering(G, nodes=None): return clustering -@not_implemented_for('directed') +@not_implemented_for("directed") def generalized_degree(G, nodes=None): r""" Compute the generalized degree for nodes. diff --git a/networkx/algorithms/coloring/__init__.py b/networkx/algorithms/coloring/__init__.py index 69b082ff..39381d9f 100644 --- a/networkx/algorithms/coloring/__init__.py +++ b/networkx/algorithms/coloring/__init__.py @@ -1,3 +1,4 @@ from networkx.algorithms.coloring.greedy_coloring import * from networkx.algorithms.coloring.equitable_coloring import equitable_color -__all__ = ['greedy_color', 'equitable_color'] + +__all__ = ["greedy_color", "equitable_color"] diff --git a/networkx/algorithms/coloring/equitable_coloring.py b/networkx/algorithms/coloring/equitable_coloring.py index b62e9cd2..711f4b48 100644 --- a/networkx/algorithms/coloring/equitable_coloring.py +++ b/networkx/algorithms/coloring/equitable_coloring.py @@ -5,7 +5,7 @@ Equitable coloring of graphs with bounded degree. import networkx as nx from collections import defaultdict -__all__ = ['equitable_color'] +__all__ = ["equitable_color"] def is_coloring(G, coloring): @@ -43,7 +43,7 @@ def is_equitable(G, coloring, num_colors=None): elif len(all_set_sizes) == 2: a, b = list(all_set_sizes) return abs(a - b) <= 1 - else: # len(all_set_sizes) > 2: + else: # len(all_set_sizes) > 2: return False @@ -58,13 +58,19 @@ def make_C_from_F(F): def make_N_from_L_C(L, C): nodes = L.keys() colors = C.keys() - return {(node, color): sum(1 for v in L[node] if v in C[color]) - for node in nodes for color in colors} + return { + (node, color): sum(1 for v in L[node] if v in C[color]) + for node in nodes + for color in colors + } def make_H_from_C_N(C, N): - return {(c1, c2): sum(1 for node in C[c1] if N[(node, c2)] == 0) - for c1 in C.keys() for c2 in C.keys()} + return { + (c1, c2): sum(1 for node in C[c1] if N[(node, c2)] == 0) + for c1 in C.keys() + for c2 in C.keys() + } def change_color(u, X, Y, N, H, F, C, L): @@ -127,8 +133,7 @@ def pad_graph(G, num_colors): s += 1 # Complete graph K_p between (imaginary) nodes [n_, ... , n_ + p] - K = nx.relabel_nodes(nx.complete_graph(p), - {idx: idx + n_ for idx in range(p)}) + K = nx.relabel_nodes(nx.complete_graph(p), {idx: idx + n_ for idx in range(p)}) G.add_edges_from(K.edges) return s @@ -161,10 +166,12 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): # logarithmic factor. next_layer = [] for k in C.keys(): - if H[(k, pop)] > 0 and \ - k not in A_cal and \ - k not in excluded_colors and \ - k not in marked: + if ( + H[(k, pop)] > 0 + and k not in A_cal + and k not in excluded_colors + and k not in marked + ): next_layer.append(k) for dst in next_layer: @@ -175,7 +182,7 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): reachable.extend(next_layer) # Variables for the algorithm - b = (len(C) - len(A_cal)) + b = len(C) - len(A_cal) if V_plus in A_cal: # Easy case: V+ is in A_cal @@ -212,8 +219,11 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): w = v # Finding the solo neighbor of w in X_prime - y_candidates = [node for node in L[w] - if F[node] == X_prime and N[(node, W_1)] == 1] + y_candidates = [ + node + for node in L[w] + if F[node] == X_prime and N[(node, W_1)] == 1 + ] if len(y_candidates) > 0: y = y_candidates[0] @@ -224,16 +234,31 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): # Move witness from X to V_minus, making the coloring # equitable. - move_witnesses(src_color=X, dst_color=V_minus, - N=N, H=H, F=F, C=C, T_cal=T_cal, L=L) + move_witnesses( + src_color=X, + dst_color=V_minus, + N=N, + H=H, + F=F, + C=C, + T_cal=T_cal, + L=L, + ) # Move y from X_prime to W, making W the correct size. change_color(y, X_prime, W, N=N, H=H, F=F, C=C, L=L) # Then call the procedure on G[B - y] - procedure_P(V_minus=X_prime, V_plus=V_plus, - N=N, H=H, C=C, F=F, L=L, - excluded_colors=excluded_colors.union(A_cal)) + procedure_P( + V_minus=X_prime, + V_plus=V_plus, + N=N, + H=H, + C=C, + F=F, + L=L, + excluded_colors=excluded_colors.union(A_cal), + ) made_equitable = True break @@ -265,10 +290,11 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): # No need to check for excluded_colors here because # they only exclude colors from A_cal - next_layer = [k for k in C.keys() - if H[(pop, k)] > 0 and - k not in B_cal_prime and - k not in marked] + next_layer = [ + k + for k in C.keys() + if H[(pop, k)] > 0 and k not in B_cal_prime and k not in marked + ] for dst in next_layer: T_cal_prime[pop] = dst @@ -306,34 +332,51 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): W = F[w] # shift nodes along W, V- - move_witnesses(W, V_minus, - N=N, H=H, F=F, C=C, - T_cal=T_cal, L=L) + move_witnesses( + W, V_minus, N=N, H=H, F=F, C=C, T_cal=T_cal, L=L + ) # shift nodes along V+ to Z - move_witnesses(V_plus, Z, - N=N, H=H, F=F, C=C, - T_cal=T_cal_prime, L=L) + move_witnesses( + V_plus, + Z, + N=N, + H=H, + F=F, + C=C, + T_cal=T_cal_prime, + L=L, + ) # change color of z_1 to W - change_color(z_1, Z, W, - N=N, H=H, F=F, C=C, L=L) + change_color(z_1, Z, W, N=N, H=H, F=F, C=C, L=L) # change color of w to some color in B_cal - W_plus = [k for k in C.keys() - if N[(w, k)] == 0 and - k not in A_cal][0] - change_color(w, W, W_plus, - N=N, H=H, F=F, C=C, L=L) + W_plus = [ + k + for k in C.keys() + if N[(w, k)] == 0 and k not in A_cal + ][0] + change_color(w, W, W_plus, N=N, H=H, F=F, C=C, L=L) # recurse with G[B \cup W*] - excluded_colors.update([ - k for k in C.keys() - if k != W and k not in B_cal_prime - ]) - procedure_P(V_minus=W, V_plus=W_plus, - N=N, H=H, C=C, F=F, L=L, - excluded_colors=excluded_colors) + excluded_colors.update( + [ + k + for k in C.keys() + if k != W and k not in B_cal_prime + ] + ) + procedure_P( + V_minus=W, + V_plus=W_plus, + N=N, + H=H, + C=C, + F=F, + L=L, + excluded_colors=excluded_colors, + ) made_equitable = True break @@ -341,8 +384,10 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): if made_equitable: break else: - assert False, "Must find a w which is the solo neighbor " \ - "of two vertices in B_cal_prime." + assert False, ( + "Must find a w which is the solo neighbor " + "of two vertices in B_cal_prime." + ) if made_equitable: break @@ -463,7 +508,6 @@ def equitable_color(G, num_colors): change_color(u, X, Y, N=N, H=H, F=F, C=C, L=L_) # Procedure P - procedure_P(V_minus=X, V_plus=Y, - N=N, H=H, F=F, C=C, L=L_) + procedure_P(V_minus=X, V_plus=Y, N=N, H=H, F=F, C=C, L=L_) return {int_to_nodes[x]: F[x] for x in int_to_nodes} diff --git a/networkx/algorithms/coloring/greedy_coloring.py b/networkx/algorithms/coloring/greedy_coloring.py index c0103eec..293bff0a 100644 --- a/networkx/algorithms/coloring/greedy_coloring.py +++ b/networkx/algorithms/coloring/greedy_coloring.py @@ -9,11 +9,17 @@ from networkx.utils import arbitrary_element from networkx.utils import py_random_state from . import greedy_coloring_with_interchange as _interchange -__all__ = ['greedy_color', 'strategy_connected_sequential', - 'strategy_connected_sequential_bfs', - 'strategy_connected_sequential_dfs', 'strategy_independent_set', - 'strategy_largest_first', 'strategy_random_sequential', - 'strategy_saturation_largest_first', 'strategy_smallest_last'] +__all__ = [ + "greedy_color", + "strategy_connected_sequential", + "strategy_connected_sequential_bfs", + "strategy_connected_sequential_dfs", + "strategy_independent_set", + "strategy_largest_first", + "strategy_random_sequential", + "strategy_saturation_largest_first", + "strategy_smallest_last", +] def strategy_largest_first(G, colors): @@ -65,7 +71,7 @@ def strategy_smallest_last(G, colors): # Build initial degree list (i.e. the bucket queue data structure) degrees = defaultdict(set) # set(), for fast random-access removals - lbound = float('inf') + lbound = float("inf") for node, d in H.degree(): degrees[d].add(node) lbound = min(lbound, d) # Lower bound on min-degree. @@ -148,7 +154,7 @@ def strategy_connected_sequential_bfs(G, colors): ``G`` is a NetworkX graph. ``colors`` is ignored. """ - return strategy_connected_sequential(G, colors, 'bfs') + return strategy_connected_sequential(G, colors, "bfs") def strategy_connected_sequential_dfs(G, colors): @@ -161,10 +167,10 @@ def strategy_connected_sequential_dfs(G, colors): ``G`` is a NetworkX graph. ``colors`` is ignored. """ - return strategy_connected_sequential(G, colors, 'dfs') + return strategy_connected_sequential(G, colors, "dfs") -def strategy_connected_sequential(G, colors, traversal='bfs'): +def strategy_connected_sequential(G, colors, traversal="bfs"): """Returns an iterable over nodes in ``G`` in the order given by a breadth-first or depth-first traversal. @@ -178,13 +184,15 @@ def strategy_connected_sequential(G, colors, traversal='bfs'): ``G`` is a NetworkX graph. ``colors`` is ignored. """ - if traversal == 'bfs': + if traversal == "bfs": traverse = nx.bfs_edges - elif traversal == 'dfs': + elif traversal == "dfs": traverse = nx.dfs_edges else: - raise nx.NetworkXError("Please specify one of the strings 'bfs' or" - " 'dfs' for connected sequential ordering") + raise nx.NetworkXError( + "Please specify one of the strings 'bfs' or" + " 'dfs' for connected sequential ordering" + ) for component in nx.connected_components(G): source = arbitrary_element(component) # Yield the source node, then all the nodes in the specified @@ -215,8 +223,9 @@ def strategy_saturation_largest_first(G, colors): else: # Compute the maximum saturation and the set of nodes that # achieve that saturation. - saturation = {v: len(c) for v, c in distinct_colors.items() - if v not in colors} + saturation = { + v: len(c) for v, c in distinct_colors.items() if v not in colors + } # Yield the node with the highest saturation, and break ties by # degree. node = max(saturation, key=lambda v: (saturation[v], G.degree(v))) @@ -229,19 +238,19 @@ def strategy_saturation_largest_first(G, colors): #: Dictionary mapping name of a strategy as a string to the strategy function. STRATEGIES = { - 'largest_first': strategy_largest_first, - 'random_sequential': strategy_random_sequential, - 'smallest_last': strategy_smallest_last, - 'independent_set': strategy_independent_set, - 'connected_sequential_bfs': strategy_connected_sequential_bfs, - 'connected_sequential_dfs': strategy_connected_sequential_dfs, - 'connected_sequential': strategy_connected_sequential, - 'saturation_largest_first': strategy_saturation_largest_first, - 'DSATUR': strategy_saturation_largest_first, + "largest_first": strategy_largest_first, + "random_sequential": strategy_random_sequential, + "smallest_last": strategy_smallest_last, + "independent_set": strategy_independent_set, + "connected_sequential_bfs": strategy_connected_sequential_bfs, + "connected_sequential_dfs": strategy_connected_sequential_dfs, + "connected_sequential": strategy_connected_sequential, + "saturation_largest_first": strategy_saturation_largest_first, + "DSATUR": strategy_saturation_largest_first, } -def greedy_color(G, strategy='largest_first', interchange=False): +def greedy_color(G, strategy="largest_first", interchange=False): """Color a graph using various strategies of greedy graph coloring. Attempts to color a graph using as few colors as possible, where no @@ -325,17 +334,17 @@ def greedy_color(G, strategy='largest_first', interchange=False): # Determine the strategy provided by the caller. strategy = STRATEGIES.get(strategy, strategy) if not callable(strategy): - raise nx.NetworkXError('strategy must be callable or a valid string. ' - f'{strategy} not valid.') + raise nx.NetworkXError( + "strategy must be callable or a valid string. " f"{strategy} not valid." + ) # Perform some validation on the arguments before executing any # strategy functions. if interchange: if strategy is strategy_independent_set: - msg = 'interchange cannot be used with independent_set' + msg = "interchange cannot be used with independent_set" raise nx.NetworkXPointlessConcept(msg) if strategy is strategy_saturation_largest_first: - msg = ('interchange cannot be used with' - ' saturation_largest_first') + msg = "interchange cannot be used with" " saturation_largest_first" raise nx.NetworkXPointlessConcept(msg) colors = {} nodes = strategy(G, colors) diff --git a/networkx/algorithms/coloring/greedy_coloring_with_interchange.py b/networkx/algorithms/coloring/greedy_coloring_with_interchange.py index 0d321126..1ef226bb 100644 --- a/networkx/algorithms/coloring/greedy_coloring_with_interchange.py +++ b/networkx/algorithms/coloring/greedy_coloring_with_interchange.py @@ -1,11 +1,11 @@ import itertools -__all__ = ['greedy_coloring_with_interchange'] +__all__ = ["greedy_coloring_with_interchange"] class Node: - __slots__ = ['node_id', 'color', 'adj_list', 'adj_color'] + __slots__ = ["node_id", "color", "adj_list", "adj_color"] def __init__(self, node_id, n): self.node_id = node_id @@ -49,7 +49,7 @@ class Node: class AdjEntry: - __slots__ = ['node_id', 'next', 'mate', 'col_next', 'col_prev'] + __slots__ = ["node_id", "next", "mate", "col_next", "col_prev"] def __init__(self, node_id): self.node_id = node_id @@ -105,8 +105,7 @@ def greedy_coloring_with_interchange(original_graph, nodes): neighbors = graph[node].iter_neighbors() col_used = {graph[adj_node.node_id].color for adj_node in neighbors} col_used.discard(-1) - k1 = next(itertools.dropwhile( - lambda x: x in col_used, itertools.count())) + k1 = next(itertools.dropwhile(lambda x: x in col_used, itertools.count())) # k1 is now the lowest available color if k1 > k: @@ -116,8 +115,7 @@ def greedy_coloring_with_interchange(original_graph, nodes): col2 = -1 while connected and col1 < k: col1 += 1 - neighbor_cols = ( - graph[node].iter_neighbors_color(col1)) + neighbor_cols = graph[node].iter_neighbors_color(col1) col1_adj = [it for it in neighbor_cols] col2 = col1 @@ -129,10 +127,8 @@ def greedy_coloring_with_interchange(original_graph, nodes): while i < len(frontier): search_node = frontier[i] i += 1 - col_opp = ( - col2 if graph[search_node].color == col1 else col1) - neighbor_cols = ( - graph[search_node].iter_neighbors_color(col_opp)) + col_opp = col2 if graph[search_node].color == col1 else col1 + neighbor_cols = graph[search_node].iter_neighbors_color(col_opp) for neighbor in neighbor_cols: if neighbor not in visited: @@ -140,18 +136,24 @@ def greedy_coloring_with_interchange(original_graph, nodes): frontier.append(neighbor) # Search if node is not adj to any col2 vertex - connected = len(visited.intersection( - graph[node].iter_neighbors_color(col2))) > 0 + connected = ( + len( + visited.intersection(graph[node].iter_neighbors_color(col2)) + ) + > 0 + ) # If connected is false then we can swap !!! if not connected: # Update all the nodes in the component for search_node in visited: graph[search_node].color = ( - col2 if graph[search_node].color == col1 else col1) + col2 if graph[search_node].color == col1 else col1 + ) col2_adj = graph[search_node].adj_color[col2] - graph[search_node].adj_color[col2] = ( - graph[search_node].adj_color[col1]) + graph[search_node].adj_color[col2] = graph[search_node].adj_color[ + col1 + ] graph[search_node].adj_color[col1] = col2_adj # Update all the neighboring nodes @@ -162,8 +164,7 @@ def greedy_coloring_with_interchange(original_graph, nodes): if graph[adj_node.node_id].color != col_opp: # Direct reference to entry adj_mate = adj_node.mate - graph[adj_node.node_id].clear_color( - adj_mate, col_opp) + graph[adj_node.node_id].clear_color(adj_mate, col_opp) graph[adj_node.node_id].assign_color(adj_mate, col) k1 = col1 diff --git a/networkx/algorithms/coloring/tests/test_coloring.py b/networkx/algorithms/coloring/tests/test_coloring.py index 47cf3be6..2558657e 100644 --- a/networkx/algorithms/coloring/tests/test_coloring.py +++ b/networkx/algorithms/coloring/tests/test_coloring.py @@ -11,32 +11,28 @@ is_equitable = nx.algorithms.coloring.equitable_coloring.is_equitable ALL_STRATEGIES = [ - 'largest_first', - 'random_sequential', - 'smallest_last', - 'independent_set', - 'connected_sequential_bfs', - 'connected_sequential_dfs', - 'connected_sequential', - 'saturation_largest_first', - 'DSATUR', + "largest_first", + "random_sequential", + "smallest_last", + "independent_set", + "connected_sequential_bfs", + "connected_sequential_dfs", + "connected_sequential", + "saturation_largest_first", + "DSATUR", ] # List of strategies where interchange=True results in an error -INTERCHANGE_INVALID = [ - 'independent_set', - 'saturation_largest_first', - 'DSATUR' -] +INTERCHANGE_INVALID = ["independent_set", "saturation_largest_first", "DSATUR"] class TestColoring: def test_basic_cases(self): def check_basic_case(graph_func, n_nodes, strategy, interchange): graph = graph_func() - coloring = nx.coloring.greedy_color(graph, - strategy=strategy, - interchange=interchange) + coloring = nx.coloring.greedy_color( + graph, strategy=strategy, interchange=interchange + ) assert verify_length(coloring, n_nodes) assert verify_coloring(graph, coloring) @@ -50,13 +46,12 @@ class TestColoring: def test_special_cases(self): def check_special_case(strategy, graph_func, interchange, colors): graph = graph_func() - coloring = nx.coloring.greedy_color(graph, - strategy=strategy, - interchange=interchange) - if not hasattr(colors, '__len__'): + coloring = nx.coloring.greedy_color( + graph, strategy=strategy, interchange=interchange + ) + if not hasattr(colors, "__len__"): colors = [colors] - assert any(verify_length(coloring, n_colors) - for n_colors in colors) + assert any(verify_length(coloring, n_colors) for n_colors in colors) assert verify_coloring(graph, coloring) for strategy, arglist in SPECIAL_TEST_CASES.items(): @@ -66,21 +61,27 @@ class TestColoring: def test_interchange_invalid(self): graph = one_node_graph() for strategy in INTERCHANGE_INVALID: - pytest.raises(nx.NetworkXPointlessConcept, - nx.coloring.greedy_color, - graph, strategy=strategy, interchange=True) + pytest.raises( + nx.NetworkXPointlessConcept, + nx.coloring.greedy_color, + graph, + strategy=strategy, + interchange=True, + ) def test_bad_inputs(self): graph = one_node_graph() - pytest.raises(nx.NetworkXError, nx.coloring.greedy_color, - graph, strategy='invalid strategy') + pytest.raises( + nx.NetworkXError, + nx.coloring.greedy_color, + graph, + strategy="invalid strategy", + ) def test_strategy_as_function(self): graph = lf_shc() - colors_1 = nx.coloring.greedy_color(graph, - 'largest_first') - colors_2 = nx.coloring.greedy_color(graph, - nx.coloring.strategy_largest_first) + colors_1 = nx.coloring.greedy_color(graph, "largest_first") + colors_2 = nx.coloring.greedy_color(graph, nx.coloring.strategy_largest_first) assert colors_1 == colors_2 def test_seed_argument(self): @@ -116,8 +117,7 @@ class TestColoring: def test_num_colors(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (0, 3)]) - pytest.raises(nx.NetworkXAlgorithmError, - nx.coloring.equitable_color, G, 2) + pytest.raises(nx.NetworkXAlgorithmError, nx.coloring.equitable_color, G, 2) def test_equitable_color(self): G = nx.fast_gnp_random_graph(n=10, p=0.2, seed=42) @@ -152,13 +152,11 @@ class TestColoring: # Color 0 0: 0, 1: 0, - # Color 1 2: 1, 3: 1, 4: 1, 5: 1, - # Color 2 6: 2, 7: 2, @@ -178,15 +176,12 @@ class TestColoring: L = { 0: [8, 9], 1: [10, 11], - 2: [8], 3: [9], 4: [10, 11], - 5: [8], 6: [9], 7: [10, 11], - 8: [0, 2, 5], 9: [0, 3, 6], 10: [1, 4, 7], @@ -196,15 +191,12 @@ class TestColoring: F = { 0: 0, 1: 0, - 2: 2, 3: 2, 4: 2, - 5: 3, 6: 3, 7: 3, - 8: 1, 9: 1, 10: 1, @@ -226,14 +218,54 @@ class TestColoring: G = nx.Graph() G.add_edges_from( - [(0, 10), (0, 11), (0, 12), (0, 23), (10, 4), (10, 9), - (10, 20), (11, 4), (11, 8), (11, 16), (12, 9), (12, 22), - (12, 23), (23, 7), (1, 17), (1, 18), (1, 19), (1, 24), - (17, 5), (17, 13), (17, 22), (18, 5), (19, 5), (19, 6), - (19, 8), (24, 7), (24, 16), (2, 4), (2, 13), (2, 14), - (2, 15), (4, 6), (13, 5), (13, 21), (14, 6), (14, 15), - (15, 6), (15, 21), (3, 16), (3, 20), (3, 21), (3, 22), - (16, 8), (20, 8), (21, 9), (22, 7)] + [ + (0, 10), + (0, 11), + (0, 12), + (0, 23), + (10, 4), + (10, 9), + (10, 20), + (11, 4), + (11, 8), + (11, 16), + (12, 9), + (12, 22), + (12, 23), + (23, 7), + (1, 17), + (1, 18), + (1, 19), + (1, 24), + (17, 5), + (17, 13), + (17, 22), + (18, 5), + (19, 5), + (19, 6), + (19, 8), + (24, 7), + (24, 16), + (2, 4), + (2, 13), + (2, 14), + (2, 15), + (4, 6), + (13, 5), + (13, 21), + (14, 6), + (14, 15), + (15, 6), + (15, 21), + (3, 16), + (3, 20), + (3, 21), + (3, 22), + (16, 8), + (20, 8), + (21, 9), + (22, 7), + ] ) F = {node: node // s for node in range(num_colors * s)} F[s - 1] = num_colors - 1 @@ -251,30 +283,154 @@ class TestColoring: G = nx.Graph() G.add_edges_from( - [(0, 19), (0, 24), (0, 29), (0, 30), (0, 35), (19, 3), (19, 7), - (19, 9), (19, 15), (19, 21), (19, 24), (19, 30), (19, 38), - (24, 5), (24, 11), (24, 13), (24, 20), (24, 30), (24, 37), - (24, 38), (29, 6), (29, 10), (29, 13), (29, 15), (29, 16), - (29, 17), (29, 20), (29, 26), (30, 6), (30, 10), (30, 15), - (30, 22), (30, 23), (30, 39), (35, 6), (35, 9), (35, 14), - (35, 18), (35, 22), (35, 23), (35, 25), (35, 27), (1, 20), - (1, 26), (1, 31), (1, 34), (1, 38), (20, 4), (20, 8), (20, 14), - (20, 18), (20, 28), (20, 33), (26, 7), (26, 10), (26, 14), - (26, 18), (26, 21), (26, 32), (26, 39), (31, 5), (31, 8), - (31, 13), (31, 16), (31, 17), (31, 21), (31, 25), (31, 27), - (34, 7), (34, 8), (34, 13), (34, 18), (34, 22), (34, 23), - (34, 25), (34, 27), (38, 4), (38, 9), (38, 12), (38, 14), - (38, 21), (38, 27), (2, 3), (2, 18), (2, 21), (2, 28), (2, 32), - (2, 33), (2, 36), (2, 37), (2, 39), (3, 5), (3, 9), (3, 13), - (3, 22), (3, 23), (3, 25), (3, 27), (18, 6), (18, 11), (18, 15), - (18, 39), (21, 4), (21, 10), (21, 14), (21, 36), (28, 6), - (28, 10), (28, 14), (28, 16), (28, 17), (28, 25), (28, 27), - (32, 5), (32, 10), (32, 12), (32, 16), (32, 17), (32, 22), - (32, 23), (33, 7), (33, 10), (33, 12), (33, 16), (33, 17), - (33, 25), (33, 27), (36, 5), (36, 8), (36, 15), (36, 16), - (36, 17), (36, 25), (36, 27), (37, 5), (37, 11), (37, 15), - (37, 16), (37, 17), (37, 22), (37, 23), (39, 7), (39, 8), - (39, 15), (39, 22), (39, 23)] + [ + (0, 19), + (0, 24), + (0, 29), + (0, 30), + (0, 35), + (19, 3), + (19, 7), + (19, 9), + (19, 15), + (19, 21), + (19, 24), + (19, 30), + (19, 38), + (24, 5), + (24, 11), + (24, 13), + (24, 20), + (24, 30), + (24, 37), + (24, 38), + (29, 6), + (29, 10), + (29, 13), + (29, 15), + (29, 16), + (29, 17), + (29, 20), + (29, 26), + (30, 6), + (30, 10), + (30, 15), + (30, 22), + (30, 23), + (30, 39), + (35, 6), + (35, 9), + (35, 14), + (35, 18), + (35, 22), + (35, 23), + (35, 25), + (35, 27), + (1, 20), + (1, 26), + (1, 31), + (1, 34), + (1, 38), + (20, 4), + (20, 8), + (20, 14), + (20, 18), + (20, 28), + (20, 33), + (26, 7), + (26, 10), + (26, 14), + (26, 18), + (26, 21), + (26, 32), + (26, 39), + (31, 5), + (31, 8), + (31, 13), + (31, 16), + (31, 17), + (31, 21), + (31, 25), + (31, 27), + (34, 7), + (34, 8), + (34, 13), + (34, 18), + (34, 22), + (34, 23), + (34, 25), + (34, 27), + (38, 4), + (38, 9), + (38, 12), + (38, 14), + (38, 21), + (38, 27), + (2, 3), + (2, 18), + (2, 21), + (2, 28), + (2, 32), + (2, 33), + (2, 36), + (2, 37), + (2, 39), + (3, 5), + (3, 9), + (3, 13), + (3, 22), + (3, 23), + (3, 25), + (3, 27), + (18, 6), + (18, 11), + (18, 15), + (18, 39), + (21, 4), + (21, 10), + (21, 14), + (21, 36), + (28, 6), + (28, 10), + (28, 14), + (28, 16), + (28, 17), + (28, 25), + (28, 27), + (32, 5), + (32, 10), + (32, 12), + (32, 16), + (32, 17), + (32, 22), + (32, 23), + (33, 7), + (33, 10), + (33, 12), + (33, 16), + (33, 17), + (33, 25), + (33, 27), + (36, 5), + (36, 8), + (36, 15), + (36, 16), + (36, 17), + (36, 25), + (36, 27), + (37, 5), + (37, 11), + (37, 15), + (37, 16), + (37, 17), + (37, 22), + (37, 23), + (39, 7), + (39, 8), + (39, 15), + (39, 22), + (39, 23), + ] ) F = {node: node // s for node in range(num_colors * s)} F[s - 1] = num_colors - 1 # V- = 0, V+ = num_colors - 1 @@ -318,6 +474,7 @@ def dict_to_sets(colors): return sets + # ############################ Graph Generation ############################ @@ -347,324 +504,268 @@ def three_node_clique(): def disconnected(): graph = nx.Graph() - graph.add_edges_from([ - (1, 2), - (2, 3), - (4, 5), - (5, 6) - ]) + graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)]) return graph def rs_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4]) - graph.add_edges_from([ - (1, 2), - (2, 3), - (3, 4) - ]) + graph.add_edges_from([(1, 2), (2, 3), (3, 4)]) return graph def slf_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (1, 6), - (2, 3), - (2, 7), - (3, 4), - (3, 7), - (4, 5), - (4, 6), - (5, 6) - ]) + graph.add_edges_from( + [(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)] + ) return graph def slf_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (1, 4), - (1, 5), - (2, 3), - (2, 4), - (2, 6), - (5, 7), - (5, 8), - (6, 7), - (6, 8), - (7, 8) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 4), + (2, 6), + (5, 7), + (5, 8), + (6, 7), + (6, 8), + (7, 8), + ] + ) return graph def lf_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6]) - graph.add_edges_from([ - (6, 1), - (1, 4), - (4, 3), - (3, 2), - (2, 5) - ]) + graph.add_edges_from([(6, 1), (1, 4), (4, 3), (3, 2), (2, 5)]) return graph def lf_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) - graph.add_edges_from([ - (1, 7), - (1, 6), - (1, 3), - (1, 4), - (7, 2), - (2, 6), - (2, 3), - (2, 5), - (5, 3), - (5, 4), - (4, 3) - ]) + graph.add_edges_from( + [ + (1, 7), + (1, 6), + (1, 3), + (1, 4), + (7, 2), + (2, 6), + (2, 3), + (2, 5), + (5, 3), + (5, 4), + (4, 3), + ] + ) return graph def sl_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (2, 3), - (1, 4), - (2, 5), - (3, 6), - (4, 5), - (4, 6), - (5, 6) - ]) + graph.add_edges_from( + [(1, 2), (1, 3), (2, 3), (1, 4), (2, 5), (3, 6), (4, 5), (4, 6), (5, 6)] + ) return graph def sl_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (1, 5), - (1, 7), - (2, 3), - (2, 4), - (2, 8), - (8, 4), - (8, 6), - (8, 7), - (7, 5), - (7, 6), - (3, 4), - (4, 6), - (6, 5), - (5, 3) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 5), + (1, 7), + (2, 3), + (2, 4), + (2, 8), + (8, 4), + (8, 6), + (8, 7), + (7, 5), + (7, 6), + (3, 4), + (4, 6), + (6, 5), + (5, 3), + ] + ) return graph def gis_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4]) - graph.add_edges_from([ - (1, 2), - (2, 3), - (3, 4) - ]) + graph.add_edges_from([(1, 2), (2, 3), (3, 4)]) return graph def gis_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6]) - graph.add_edges_from([ - (1, 5), - (2, 5), - (3, 6), - (4, 6), - (5, 6) - ]) + graph.add_edges_from([(1, 5), (2, 5), (3, 6), (4, 6), (5, 6)]) return graph def cs_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (2, 3), - (2, 4), - (2, 5), - (3, 4), - (4, 5) - ]) + graph.add_edges_from([(1, 2), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (4, 5)]) return graph def rsi_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (1, 6), - (2, 3), - (3, 4), - (4, 5), - (4, 6), - (5, 6) - ]) + graph.add_edges_from( + [(1, 2), (1, 5), (1, 6), (2, 3), (3, 4), (4, 5), (4, 6), (5, 6)] + ) return graph def lfi_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (1, 6), - (2, 3), - (2, 7), - (3, 4), - (3, 7), - (4, 5), - (4, 6), - (5, 6) - ]) + graph.add_edges_from( + [(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)] + ) return graph def lfi_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9]) - graph.add_edges_from([ - (1, 2), - (1, 5), - (1, 6), - (1, 7), - (2, 3), - (2, 8), - (2, 9), - (3, 4), - (3, 8), - (3, 9), - (4, 5), - (4, 6), - (4, 7), - (5, 6) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 5), + (1, 6), + (1, 7), + (2, 3), + (2, 8), + (2, 9), + (3, 4), + (3, 8), + (3, 9), + (4, 5), + (4, 6), + (4, 7), + (5, 6), + ] + ) return graph def sli_shc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (1, 5), - (1, 7), - (2, 3), - (2, 6), - (3, 4), - (4, 5), - (4, 6), - (5, 7), - (6, 7) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 5), + (1, 7), + (2, 3), + (2, 6), + (3, 4), + (4, 5), + (4, 6), + (5, 7), + (6, 7), + ] + ) return graph def sli_hc(): graph = nx.Graph() graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9]) - graph.add_edges_from([ - (1, 2), - (1, 3), - (1, 4), - (1, 5), - (2, 3), - (2, 7), - (2, 8), - (2, 9), - (3, 6), - (3, 7), - (3, 9), - (4, 5), - (4, 6), - (4, 8), - (4, 9), - (5, 6), - (5, 7), - (5, 8), - (6, 7), - (6, 9), - (7, 8), - (8, 9) - ]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 7), + (2, 8), + (2, 9), + (3, 6), + (3, 7), + (3, 9), + (4, 5), + (4, 6), + (4, 8), + (4, 9), + (5, 6), + (5, 7), + (5, 8), + (6, 7), + (6, 9), + (7, 8), + (8, 9), + ] + ) return graph # -------------------------------------------------------------------------- # Basic tests for all strategies # For each basic graph function, specify the number of expected colors. -BASIC_TEST_CASES = {empty_graph: 0, - one_node_graph: 1, - two_node_graph: 2, - disconnected: 2, - three_node_clique: 3} +BASIC_TEST_CASES = { + empty_graph: 0, + one_node_graph: 1, + two_node_graph: 2, + disconnected: 2, + three_node_clique: 3, +} # -------------------------------------------------------------------------- # Special test cases. Each strategy has a list of tuples of the form # (graph function, interchange, valid # of colors) SPECIAL_TEST_CASES = { - 'random_sequential': [ + "random_sequential": [ (rs_shc, False, (2, 3)), (rs_shc, True, 2), - (rsi_shc, True, (3, 4))], - 'saturation_largest_first': [ - (slf_shc, False, (3, 4)), - (slf_hc, False, 4)], - 'largest_first': [ + (rsi_shc, True, (3, 4)), + ], + "saturation_largest_first": [(slf_shc, False, (3, 4)), (slf_hc, False, 4)], + "largest_first": [ (lf_shc, False, (2, 3)), (lf_hc, False, 4), (lf_shc, True, 2), (lf_hc, True, 3), (lfi_shc, True, (3, 4)), - (lfi_hc, True, 4)], - 'smallest_last': [ + (lfi_hc, True, 4), + ], + "smallest_last": [ (sl_shc, False, (3, 4)), (sl_hc, False, 5), (sl_shc, True, 3), (sl_hc, True, 4), (sli_shc, True, (3, 4)), - (sli_hc, True, 5)], - 'independent_set': [ - (gis_shc, False, (2, 3)), - (gis_hc, False, 3)], - 'connected_sequential': [ - (cs_shc, False, (3, 4)), - (cs_shc, True, 3)], - 'connected_sequential_dfs': [ - (cs_shc, False, (3, 4))], + (sli_hc, True, 5), + ], + "independent_set": [(gis_shc, False, (2, 3)), (gis_hc, False, 3)], + "connected_sequential": [(cs_shc, False, (3, 4)), (cs_shc, True, 3)], + "connected_sequential_dfs": [(cs_shc, False, (3, 4))], } @@ -703,9 +804,9 @@ def make_params_from_graph(G, F): H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N) return { - 'N': N, - 'F': F, - 'C': C, - 'H': H, - 'L': L, + "N": N, + "F": F, + "C": C, + "H": H, + "L": L, } diff --git a/networkx/algorithms/communicability_alg.py b/networkx/algorithms/communicability_alg.py index 5f45055a..e4ce2935 100644 --- a/networkx/algorithms/communicability_alg.py +++ b/networkx/algorithms/communicability_alg.py @@ -4,13 +4,14 @@ Communicability. import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['communicability', - 'communicability_exp', - ] +__all__ = [ + "communicability", + "communicability_exp", +] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def communicability(G): r"""Returns communicability between all pairs of nodes in G. @@ -68,6 +69,7 @@ def communicability(G): >>> c = nx.communicability(G) """ import numpy + nodelist = list(G) # ordering of nodes in matrix A = nx.to_numpy_array(G, nodelist) # convert to 0-1 matrix @@ -89,8 +91,8 @@ def communicability(G): return c -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def communicability_exp(G): r"""Returns communicability between all pairs of nodes in G. @@ -145,6 +147,7 @@ def communicability_exp(G): >>> c = nx.communicability_exp(G) """ import scipy.linalg + nodelist = list(G) # ordering of nodes in matrix A = nx.to_numpy_array(G, nodelist) # convert to 0-1 matrix diff --git a/networkx/algorithms/community/asyn_fluid.py b/networkx/algorithms/community/asyn_fluid.py index 72f7f7e6..e57078e9 100644 --- a/networkx/algorithms/community/asyn_fluid.py +++ b/networkx/algorithms/community/asyn_fluid.py @@ -7,11 +7,11 @@ from networkx.utils import groups from networkx.utils import not_implemented_for from networkx.utils import py_random_state -__all__ = ['asyn_fluidc'] +__all__ = ["asyn_fluidc"] @py_random_state(3) -@not_implemented_for('directed', 'multigraph') +@not_implemented_for("directed", "multigraph") def asyn_fluidc(G, k, max_iter=100, seed=None): """Returns communities in `G` as detected by Fluid Communities algorithm. @@ -97,23 +97,24 @@ def asyn_fluidc(G, k, max_iter=100, seed=None): com_counter = Counter() # Take into account self vertex community try: - com_counter.update({communities[vertex]: - density[communities[vertex]]}) + com_counter.update({communities[vertex]: density[communities[vertex]]}) except KeyError: pass # Gather neighbour vertex communities for v in G[vertex]: try: - com_counter.update({communities[v]: - density[communities[v]]}) + com_counter.update({communities[v]: density[communities[v]]}) except KeyError: continue # Check which is the community with highest density new_com = -1 if len(com_counter.keys()) > 0: max_freq = max(com_counter.values()) - best_communities = [com for com, freq in com_counter.items() - if (max_freq - freq) < 0.0001] + best_communities = [ + com + for com, freq in com_counter.items() + if (max_freq - freq) < 0.0001 + ] # If actual vertex com in best communities, it is preserved try: if communities[vertex] in best_communities: @@ -129,15 +130,17 @@ def asyn_fluidc(G, k, max_iter=100, seed=None): # Update previous community status try: com_to_numvertices[communities[vertex]] -= 1 - density[communities[vertex]] = max_density / \ - com_to_numvertices[communities[vertex]] + density[communities[vertex]] = ( + max_density / com_to_numvertices[communities[vertex]] + ) except KeyError: pass # Update new community status communities[vertex] = new_com com_to_numvertices[communities[vertex]] += 1 - density[communities[vertex]] = max_density / \ - com_to_numvertices[communities[vertex]] + density[communities[vertex]] = ( + max_density / com_to_numvertices[communities[vertex]] + ) # If maximum iterations reached --> output actual results if iter_count > max_iter: break diff --git a/networkx/algorithms/community/centrality.py b/networkx/algorithms/community/centrality.py index e7a9e2ce..a7f32710 100644 --- a/networkx/algorithms/community/centrality.py +++ b/networkx/algorithms/community/centrality.py @@ -2,7 +2,7 @@ import networkx as nx -__all__ = ['girvan_newman'] +__all__ = ["girvan_newman"] def girvan_newman(G, most_valuable_edge=None): @@ -127,6 +127,7 @@ def girvan_newman(G, most_valuable_edge=None): # If no function is provided for computing the most valuable edge, # use the edge betweenness centrality. if most_valuable_edge is None: + def most_valuable_edge(G): """Returns the edge with the highest betweenness centrality in the graph `G`. @@ -136,6 +137,7 @@ def girvan_newman(G, most_valuable_edge=None): # dictionary will never be empty. betweenness = nx.edge_betweenness_centrality(G) return max(betweenness, key=betweenness.get) + # The copy of G here must include the edge weight data. g = G.copy().to_undirected() # Self-loops must be removed because their removal has no effect on diff --git a/networkx/algorithms/community/community_utils.py b/networkx/algorithms/community/community_utils.py index aa7c1032..f06fcf49 100644 --- a/networkx/algorithms/community/community_utils.py +++ b/networkx/algorithms/community/community_utils.py @@ -1,6 +1,6 @@ """Helper functions for community-finding algorithms.""" -__all__ = ['is_partition'] +__all__ = ["is_partition"] def is_partition(G, communities): diff --git a/networkx/algorithms/community/kclique.py b/networkx/algorithms/community/kclique.py index 997cb669..045f47c8 100644 --- a/networkx/algorithms/community/kclique.py +++ b/networkx/algorithms/community/kclique.py @@ -1,7 +1,7 @@ from collections import defaultdict import networkx as nx -__all__ = ['k_clique_communities'] +__all__ = ["k_clique_communities"] def k_clique_communities(G, k, cliques=None): @@ -66,7 +66,7 @@ def k_clique_communities(G, k, cliques=None): # Connected components of clique graph with perc edges # are the percolated cliques for component in nx.connected_components(perc_graph): - yield(frozenset.union(*component)) + yield (frozenset.union(*component)) def _get_adjacent_cliques(clique, membership_dict): diff --git a/networkx/algorithms/community/kernighan_lin.py b/networkx/algorithms/community/kernighan_lin.py index 71a7d222..23233719 100644 --- a/networkx/algorithms/community/kernighan_lin.py +++ b/networkx/algorithms/community/kernighan_lin.py @@ -5,7 +5,7 @@ from itertools import count from networkx.utils import not_implemented_for, py_random_state, BinaryHeap from networkx.algorithms.community.community_utils import is_partition -__all__ = ['kernighan_lin_bisection'] +__all__ = ["kernighan_lin_bisection"] def _kernighan_lin_sweep(edges, side): @@ -38,9 +38,8 @@ def _kernighan_lin_sweep(edges, side): @py_random_state(4) -@not_implemented_for('directed') -def kernighan_lin_bisection(G, partition=None, max_iter=10, weight='weight', - seed=None): +@not_implemented_for("directed") +def kernighan_lin_bisection(G, partition=None, max_iter=10, weight="weight", seed=None): """Partition a graph into two blocks using the Kernighan–Lin algorithm. @@ -100,19 +99,25 @@ def kernighan_lin_bisection(G, partition=None, max_iter=10, weight='weight', try: A, B = partition except (TypeError, ValueError) as e: - raise nx.NetworkXError('partition must be two sets') from e + raise nx.NetworkXError("partition must be two sets") from e if not is_partition(G, (A, B)): - raise nx.NetworkXError('partition invalid') + raise nx.NetworkXError("partition invalid") side = [0] * n for a in A: side[a] = 1 if G.is_multigraph(): - edges = [[(index[u], sum(e.get(weight, 1) for e in d.values())) - for u, d in G[v].items()] for v in labels] + edges = [ + [ + (index[u], sum(e.get(weight, 1) for e in d.values())) + for u, d in G[v].items() + ] + for v in labels + ] else: - edges = [[(index[u], e.get(weight, 1)) for u, e in G[v].items()] - for v in labels] + edges = [ + [(index[u], e.get(weight, 1)) for u, e in G[v].items()] for v in labels + ] for i in range(max_iter): costs = list(_kernighan_lin_sweep(edges, side)) @@ -120,7 +125,7 @@ def kernighan_lin_bisection(G, partition=None, max_iter=10, weight='weight', if min_cost >= 0: break - for _, _, (u, v) in costs[:min_i + 1]: + for _, _, (u, v) in costs[: min_i + 1]: side[u] = 1 side[v] = 0 diff --git a/networkx/algorithms/community/label_propagation.py b/networkx/algorithms/community/label_propagation.py index 981905ee..5afd43f8 100644 --- a/networkx/algorithms/community/label_propagation.py +++ b/networkx/algorithms/community/label_propagation.py @@ -8,7 +8,7 @@ from networkx.utils import groups from networkx.utils import not_implemented_for from networkx.utils import py_random_state -__all__ = ['label_propagation_communities', 'asyn_lpa_communities'] +__all__ = ["label_propagation_communities", "asyn_lpa_communities"] @py_random_state(2) @@ -78,13 +78,15 @@ def asyn_lpa_communities(G, weight=None, seed=None): # algorithm asynchronous. label_freq = Counter() for v in G[node]: - label_freq.update({labels[v]: G.edges[node, v][weight] - if weight else 1}) + label_freq.update( + {labels[v]: G.edges[node, v][weight] if weight else 1} + ) # Choose the label with the highest frecuency. If more than 1 label # has the highest frecuency choose one randomly. max_freq = max(label_freq.values()) - best_labels = [label for label, freq in label_freq.items() - if freq == max_freq] + best_labels = [ + label for label, freq in label_freq.items() if freq == max_freq + ] # Continue until all nodes have a majority label if labels[node] not in best_labels: @@ -94,7 +96,7 @@ def asyn_lpa_communities(G, weight=None, seed=None): yield from groups(labels).values() -@not_implemented_for('directed') +@not_implemented_for("directed") def label_propagation_communities(G): """Generates community sets determined by label propagation @@ -160,8 +162,9 @@ def _labeling_complete(labeling, G): Nodes with no neighbors are considered complete. """ - return all(labeling[v] in _most_frequent_labels(v, labeling, G) - for v in G if len(G[v]) > 0) + return all( + labeling[v] in _most_frequent_labels(v, labeling, G) for v in G if len(G[v]) > 0 + ) def _most_frequent_labels(node, labeling, G): diff --git a/networkx/algorithms/community/lukes.py b/networkx/algorithms/community/lukes.py index 73be8614..ea4c12f8 100644 --- a/networkx/algorithms/community/lukes.py +++ b/networkx/algorithms/community/lukes.py @@ -7,13 +7,13 @@ from random import choice import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['lukes_partitioning'] +__all__ = ["lukes_partitioning"] -D_EDGE_W = 'weight' +D_EDGE_W = "weight" D_EDGE_VALUE = 1.0 -D_NODE_W = 'weight' +D_NODE_W = "weight" D_NODE_VALUE = 1 -PKEY = 'partitions' +PKEY = "partitions" CLUSTER_EVAL_CACHE_SIZE = 2048 @@ -25,10 +25,7 @@ def _split_n_from(n: int, min_size_of_first_part: int): yield p1, n - p1 -def lukes_partitioning(G, - max_size: int, - node_weight=None, - edge_weight=None) -> list: +def lukes_partitioning(G, max_size: int, node_weight=None, edge_weight=None) -> list: """Optimal partitioning of a weighted tree using the Lukes algorithm. @@ -76,7 +73,7 @@ def lukes_partitioning(G, """ # First sanity check and tree preparation if not nx.is_tree(G): - raise nx.NotATree('lukes_partitioning works only on trees') + raise nx.NotATree("lukes_partitioning works only on trees") else: if nx.is_directed(G): root = [n for n, d in G.in_degree() if d == 0] @@ -108,22 +105,24 @@ def lukes_partitioning(G, all_n_attr = nx.get_node_attributes(safe_G, node_weight).values() for x in all_n_attr: if not isinstance(x, int): - raise TypeError('lukes_partitioning needs integer ' - f'values for node_weight ({node_weight})') + raise TypeError( + "lukes_partitioning needs integer " + f"values for node_weight ({node_weight})" + ) # SUBROUTINES ----------------------- # these functions are defined here for two reasons: # - brevity: we can leverage global "safe_G" # - caching: signatures are hashable - @not_implemented_for('undirected') + @not_implemented_for("undirected") # this is intended to be called only on t_G def _leaves(gr): for x in gr.nodes: if not nx.descendants(gr, x): yield x - @not_implemented_for('undirected') + @not_implemented_for("undirected") def _a_parent_of_leaves_only(gr): tleaves = set(_leaves(gr)) for n in set(gr.nodes) - tleaves: @@ -132,8 +131,7 @@ def lukes_partitioning(G, @lru_cache(CLUSTER_EVAL_CACHE_SIZE) def _value_of_cluster(cluster: frozenset): - valid_edges = [e for e in safe_G.edges - if e[0] in cluster and e[1] in cluster] + valid_edges = [e for e in safe_G.edges if e[0] in cluster and e[1] in cluster] return sum([safe_G.edges[e][edge_weight] for e in valid_edges]) def _value_of_partition(partition: list): @@ -148,8 +146,7 @@ def lukes_partitioning(G, assert len(ccx) == 1 return ccx[0] - def _concatenate_or_merge(partition_1: list, partition_2: list, - x, i, ref_weigth): + def _concatenate_or_merge(partition_1: list, partition_2: list, x, i, ref_weigth): ccx = _pivot(partition_1, x) cci = _pivot(partition_2, i) @@ -191,15 +188,16 @@ def lukes_partitioning(G, for i_node in x_descendants: for j in range(weight_of_x, max_size + 1): for a, b in _split_n_from(j, weight_of_x): - if a not in t_G.nodes[x_node][PKEY].keys() \ - or b not in t_G.nodes[i_node][PKEY].keys(): + if ( + a not in t_G.nodes[x_node][PKEY].keys() + or b not in t_G.nodes[i_node][PKEY].keys() + ): # it's not possible to form this particular weight sum continue part1 = t_G.nodes[x_node][PKEY][a] part2 = t_G.nodes[i_node][PKEY][b] - part, value = _concatenate_or_merge(part1, part2, - x_node, i_node, j) + part, value = _concatenate_or_merge(part1, part2, x_node, i_node, j) if j not in bp_buffer.keys() or bp_buffer[j][1] < value: # we annotate in the buffer the best partition for j diff --git a/networkx/algorithms/community/modularity_max.py b/networkx/algorithms/community/modularity_max.py index 380e0b0c..f4ca2466 100644 --- a/networkx/algorithms/community/modularity_max.py +++ b/networkx/algorithms/community/modularity_max.py @@ -8,9 +8,7 @@ from networkx.algorithms.community.quality import modularity from networkx.utils.mapped_queue import MappedQueue -__all__ = [ - 'greedy_modularity_communities', - '_naive_greedy_modularity_communities'] +__all__ = ["greedy_modularity_communities", "_naive_greedy_modularity_communities"] def greedy_modularity_communities(G, weight=None): @@ -49,8 +47,8 @@ def greedy_modularity_communities(G, weight=None): # Count nodes and edges N = len(G.nodes()) - m = sum([d.get('weight', 1) for u, v, d in G.edges(data=True)]) - q0 = 1.0 / (2.0*m) + m = sum([d.get("weight", 1) for u, v, d in G.edges(data=True)]) + q0 = 1.0 / (2.0 * m) # Map node labels to contiguous integers label_for_node = {i: v for i, v in enumerate(G.nodes())} @@ -74,24 +72,19 @@ def greedy_modularity_communities(G, weight=None): # dq_dict[i][j]: dQ for merging community i, j # dq_heap[i][n] : (-dq, i, j) for communitiy i nth largest dQ # H[n]: (-dq, i, j) for community with nth largest max_j(dQ_ij) - a = [k[i]*q0 for i in range(N)] + a = [k[i] * q0 for i in range(N)] dq_dict = { i: { - j: 2*q0 - 2*k[i]*k[j]*q0*q0 - for j in [ - node_for_label[u] - for u in G.neighbors(label_for_node[i])] - if j != i} - for i in range(N)} - dq_heap = [ - MappedQueue([ - (-dq, i, j) - for j, dq in dq_dict[i].items()]) - for i in range(N)] - H = MappedQueue([ - dq_heap[i].h[0] + j: 2 * q0 - 2 * k[i] * k[j] * q0 * q0 + for j in [node_for_label[u] for u in G.neighbors(label_for_node[i])] + if j != i + } for i in range(N) - if len(dq_heap[i]) > 0]) + } + dq_heap = [ + MappedQueue([(-dq, i, j) for j, dq in dq_dict[i].items()]) for i in range(N) + ] + H = MappedQueue([dq_heap[i].h[0] for i in range(N) if len(dq_heap[i]) > 0]) # Merge communities until we can't improve modularity while len(H) > 1: @@ -141,10 +134,10 @@ def greedy_modularity_communities(G, weight=None): if k in both_set: dq_jk = dq_dict[j][k] + dq_dict[i][k] elif k in j_set: - dq_jk = dq_dict[j][k] - 2.0*a[i]*a[k] + dq_jk = dq_dict[j][k] - 2.0 * a[i] * a[k] else: # k in i_set - dq_jk = dq_dict[i][k] - 2.0*a[j]*a[k] + dq_jk = dq_dict[i][k] - 2.0 * a[j] * a[k] # Update rows j and k for row, col in [(j, k), (k, j)]: # Save old value for finding heap index @@ -207,8 +200,8 @@ def greedy_modularity_communities(G, weight=None): a[i] = 0 communities = [ - frozenset([label_for_node[i] for i in c]) - for c in communities.values()] + frozenset([label_for_node[i] for i in c]) for c in communities.values() + ] return sorted(communities, key=len, reverse=True) @@ -245,10 +238,7 @@ def _naive_greedy_modularity_communities(G): # Found new best, save modularity and group indexes new_modularity = trial_modularity to_merge = (i, j, new_modularity - old_modularity) - elif ( - to_merge and - min(i, j) < min(to_merge[0], to_merge[1]) - ): + elif to_merge and min(i, j) < min(to_merge[0], to_merge[1]): # Break ties by choosing pair with lowest min id new_modularity = trial_modularity to_merge = (i, j, new_modularity - old_modularity) diff --git a/networkx/algorithms/community/quality.py b/networkx/algorithms/community/quality.py index ce44aecc..a4618f4c 100644 --- a/networkx/algorithms/community/quality.py +++ b/networkx/algorithms/community/quality.py @@ -11,13 +11,14 @@ from networkx import NetworkXError from networkx.utils import not_implemented_for from networkx.algorithms.community.community_utils import is_partition -__all__ = ['coverage', 'modularity', 'performance'] +__all__ = ["coverage", "modularity", "performance"] class NotAPartition(NetworkXError): """Raised if a given collection is not a partition. """ + def __init__(self, G, collection): msg = f"{G} is not a valid partition of the graph {collection}" super().__init__(msg) @@ -52,13 +53,16 @@ def require_partition(func): networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G """ + @wraps(func) def new_func(*args, **kw): # Here we assume that the first two arguments are (G, partition). if not is_partition(*args[:2]): - raise nx.NetworkXError('`partition` is not a valid partition of' - ' the nodes of G') + raise nx.NetworkXError( + "`partition` is not a valid partition of" " the nodes of G" + ) return func(*args, **kw) + return new_func @@ -139,7 +143,7 @@ def inter_community_non_edges(G, partition): return inter_community_edges(nx.complement(G), partition) -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") @require_partition def performance(G, partition): """Returns the performance of a partition. @@ -236,7 +240,7 @@ def coverage(G, partition): return intra_edges / total_edges -def modularity(G, communities, weight='weight'): +def modularity(G, communities, weight="weight"): r"""Returns the modularity of the given partition of the graph. Modularity is defined in [1]_ as diff --git a/networkx/algorithms/community/tests/test_asyn_fluid.py b/networkx/algorithms/community/tests/test_asyn_fluid.py index f25b33a8..258991ae 100644 --- a/networkx/algorithms/community/tests/test_asyn_fluid.py +++ b/networkx/algorithms/community/tests/test_asyn_fluid.py @@ -5,21 +5,21 @@ from networkx.algorithms.community.asyn_fluid import asyn_fluidc def test_exceptions(): test = Graph() - test.add_node('a') - pytest.raises(NetworkXError, asyn_fluidc, test, 'hi') + test.add_node("a") + pytest.raises(NetworkXError, asyn_fluidc, test, "hi") pytest.raises(NetworkXError, asyn_fluidc, test, -1) pytest.raises(NetworkXError, asyn_fluidc, test, 3) - test.add_node('b') + test.add_node("b") pytest.raises(NetworkXError, asyn_fluidc, test, 1) def test_single_node(): test = Graph() - test.add_node('a') + test.add_node("a") # ground truth - ground_truth = {frozenset(['a'])} + ground_truth = {frozenset(["a"])} communities = asyn_fluidc(test, 1) result = {frozenset(c) for c in communities} @@ -29,10 +29,10 @@ def test_single_node(): def test_two_nodes(): test = Graph() - test.add_edge('a', 'b') + test.add_edge("a", "b") # ground truth - ground_truth = {frozenset(['a']), frozenset(['b'])} + ground_truth = {frozenset(["a"]), frozenset(["b"])} communities = asyn_fluidc(test, 2) result = {frozenset(c) for c in communities} @@ -43,21 +43,20 @@ def test_two_clique_communities(): test = Graph() # c1 - test.add_edge('a', 'b') - test.add_edge('a', 'c') - test.add_edge('b', 'c') + test.add_edge("a", "b") + test.add_edge("a", "c") + test.add_edge("b", "c") # connection - test.add_edge('c', 'd') + test.add_edge("c", "d") # c2 - test.add_edge('d', 'e') - test.add_edge('d', 'f') - test.add_edge('f', 'e') + test.add_edge("d", "e") + test.add_edge("d", "f") + test.add_edge("f", "e") # ground truth - ground_truth = {frozenset(['a', 'c', 'b']), - frozenset(['e', 'd', 'f'])} + ground_truth = {frozenset(["a", "c", "b"]), frozenset(["e", "d", "f"])} communities = asyn_fluidc(test, 2, seed=7) result = {frozenset(c) for c in communities} @@ -68,58 +67,60 @@ def test_five_clique_ring(): test = Graph() # c1 - test.add_edge('1a', '1b') - test.add_edge('1a', '1c') - test.add_edge('1a', '1d') - test.add_edge('1b', '1c') - test.add_edge('1b', '1d') - test.add_edge('1c', '1d') + test.add_edge("1a", "1b") + test.add_edge("1a", "1c") + test.add_edge("1a", "1d") + test.add_edge("1b", "1c") + test.add_edge("1b", "1d") + test.add_edge("1c", "1d") # c2 - test.add_edge('2a', '2b') - test.add_edge('2a', '2c') - test.add_edge('2a', '2d') - test.add_edge('2b', '2c') - test.add_edge('2b', '2d') - test.add_edge('2c', '2d') + test.add_edge("2a", "2b") + test.add_edge("2a", "2c") + test.add_edge("2a", "2d") + test.add_edge("2b", "2c") + test.add_edge("2b", "2d") + test.add_edge("2c", "2d") # c3 - test.add_edge('3a', '3b') - test.add_edge('3a', '3c') - test.add_edge('3a', '3d') - test.add_edge('3b', '3c') - test.add_edge('3b', '3d') - test.add_edge('3c', '3d') + test.add_edge("3a", "3b") + test.add_edge("3a", "3c") + test.add_edge("3a", "3d") + test.add_edge("3b", "3c") + test.add_edge("3b", "3d") + test.add_edge("3c", "3d") # c4 - test.add_edge('4a', '4b') - test.add_edge('4a', '4c') - test.add_edge('4a', '4d') - test.add_edge('4b', '4c') - test.add_edge('4b', '4d') - test.add_edge('4c', '4d') + test.add_edge("4a", "4b") + test.add_edge("4a", "4c") + test.add_edge("4a", "4d") + test.add_edge("4b", "4c") + test.add_edge("4b", "4d") + test.add_edge("4c", "4d") # c5 - test.add_edge('5a', '5b') - test.add_edge('5a', '5c') - test.add_edge('5a', '5d') - test.add_edge('5b', '5c') - test.add_edge('5b', '5d') - test.add_edge('5c', '5d') + test.add_edge("5a", "5b") + test.add_edge("5a", "5c") + test.add_edge("5a", "5d") + test.add_edge("5b", "5c") + test.add_edge("5b", "5d") + test.add_edge("5c", "5d") # connections - test.add_edge('1a', '2c') - test.add_edge('2a', '3c') - test.add_edge('3a', '4c') - test.add_edge('4a', '5c') - test.add_edge('5a', '1c') + test.add_edge("1a", "2c") + test.add_edge("2a", "3c") + test.add_edge("3a", "4c") + test.add_edge("4a", "5c") + test.add_edge("5a", "1c") # ground truth - ground_truth = {frozenset(['1a', '1b', '1c', '1d']), - frozenset(['2a', '2b', '2c', '2d']), - frozenset(['3a', '3b', '3c', '3d']), - frozenset(['4a', '4b', '4c', '4d']), - frozenset(['5a', '5b', '5c', '5d'])} + ground_truth = { + frozenset(["1a", "1b", "1c", "1d"]), + frozenset(["2a", "2b", "2c", "2d"]), + frozenset(["3a", "3b", "3c", "3d"]), + frozenset(["4a", "4b", "4c", "4d"]), + frozenset(["5a", "5b", "5c", "5d"]), + } communities = asyn_fluidc(test, 5, seed=9) result = {frozenset(c) for c in communities} diff --git a/networkx/algorithms/community/tests/test_centrality.py b/networkx/algorithms/community/tests/test_centrality.py index 033fa729..5e710887 100644 --- a/networkx/algorithms/community/tests/test_centrality.py +++ b/networkx/algorithms/community/tests/test_centrality.py @@ -43,8 +43,9 @@ class TestGirvanNewman: validate_communities(communities[0], [{0, 1}, {2, 3}]) # After the next, we get the graph .-. . ., but there are two # symmetric possible versions. - validate_possible_communities(communities[1], [{0}, {1}, {2, 3}], - [{0, 1}, {2}, {3}]) + validate_possible_communities( + communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}] + ) # After the last removal, we always get the empty graph. validate_communities(communities[2], [{0}, {1}, {2}, {3}]) @@ -53,8 +54,9 @@ class TestGirvanNewman: communities = list(girvan_newman(G)) assert len(communities) == 3 validate_communities(communities[0], [{0, 1}, {2, 3}]) - validate_possible_communities(communities[1], [{0}, {1}, {2, 3}], - [{0, 1}, {2}, {3}]) + validate_possible_communities( + communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}] + ) validate_communities(communities[2], [{0}, {1}, {2}, {3}]) def test_selfloops(self): @@ -64,8 +66,9 @@ class TestGirvanNewman: communities = list(girvan_newman(G)) assert len(communities) == 3 validate_communities(communities[0], [{0, 1}, {2, 3}]) - validate_possible_communities(communities[1], [{0}, {1}, {2, 3}], - [{0, 1}, {2}, {3}]) + validate_possible_communities( + communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}] + ) validate_communities(communities[2], [{0}, {1}, {2}, {3}]) def test_most_valuable_edge(self): @@ -73,7 +76,9 @@ class TestGirvanNewman: G.add_weighted_edges_from([(0, 1, 3), (1, 2, 2), (2, 3, 1)]) # Let the most valuable edge be the one with the highest weight. - def heaviest(G): return max(G.edges(data='weight'), key=itemgetter(2))[:2] + def heaviest(G): + return max(G.edges(data="weight"), key=itemgetter(2))[:2] + communities = list(girvan_newman(G, heaviest)) assert len(communities) == 3 validate_communities(communities[0], [{0}, {1, 2, 3}]) diff --git a/networkx/algorithms/community/tests/test_kclique.py b/networkx/algorithms/community/tests/test_kclique.py index bda69dea..ffac175d 100644 --- a/networkx/algorithms/community/tests/test_kclique.py +++ b/networkx/algorithms/community/tests/test_kclique.py @@ -25,7 +25,6 @@ def test_isolated_K5(): class TestZacharyKarateClub: - def setup(self): self.G = nx.karate_club_graph() @@ -39,16 +38,44 @@ class TestZacharyKarateClub: self._check_communities(2, expected) def test_k3(self): - comm1 = [0, 1, 2, 3, 7, 8, 12, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, - 26, 27, 28, 29, 30, 31, 32, 33] + comm1 = [ + 0, + 1, + 2, + 3, + 7, + 8, + 12, + 13, + 14, + 15, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + ] comm2 = [0, 4, 5, 6, 10, 16] comm3 = [24, 25, 31] expected = {frozenset(comm1), frozenset(comm2), frozenset(comm3)} self._check_communities(3, expected) def test_k4(self): - expected = {frozenset([0, 1, 2, 3, 7, 13]), frozenset([8, 32, 30, 33]), - frozenset([32, 33, 29, 23])} + expected = { + frozenset([0, 1, 2, 3, 7, 13]), + frozenset([8, 32, 30, 33]), + frozenset([32, 33, 29, 23]), + } self._check_communities(4, expected) def test_k5(self): diff --git a/networkx/algorithms/community/tests/test_kernighan_lin.py b/networkx/algorithms/community/tests/test_kernighan_lin.py index 3038fa58..3cd0f0e1 100644 --- a/networkx/algorithms/community/tests/test_kernighan_lin.py +++ b/networkx/algorithms/community/tests/test_kernighan_lin.py @@ -8,6 +8,7 @@ import networkx as nx from networkx.algorithms.community import kernighan_lin_bisection from itertools import permutations + def assert_partition_equal(x, y): assert set(map(frozenset, x)) == set(map(frozenset, y)) @@ -53,6 +54,6 @@ def test_multigraph(): for labels in permutations(range(4)): mapping = dict(zip(M, labels)) A, B = kernighan_lin_bisection(nx.relabel_nodes(M, mapping), seed=0) - assert_partition_equal([A, B], - [{mapping[0], mapping[1]}, - {mapping[2], mapping[3]}]) + assert_partition_equal( + [A, B], [{mapping[0], mapping[1]}, {mapping[2], mapping[3]}] + ) diff --git a/networkx/algorithms/community/tests/test_label_propagation.py b/networkx/algorithms/community/tests/test_label_propagation.py index 0d64cbc0..9e496880 100644 --- a/networkx/algorithms/community/tests/test_label_propagation.py +++ b/networkx/algorithms/community/tests/test_label_propagation.py @@ -12,18 +12,18 @@ def test_directed_not_supported(): with pytest.raises(nx.NetworkXNotImplemented): # not supported for directed graphs test = nx.DiGraph() - test.add_edge('a', 'b') - test.add_edge('a', 'c') - test.add_edge('b', 'd') + test.add_edge("a", "b") + test.add_edge("a", "c") + test.add_edge("b", "d") result = label_propagation_communities(test) def test_one_node(): test = nx.Graph() - test.add_node('a') + test.add_node("a") # The expected communities are: - ground_truth = {frozenset(['a'])} + ground_truth = {frozenset(["a"])} communities = label_propagation_communities(test) result = {frozenset(c) for c in communities} @@ -33,17 +33,16 @@ def test_one_node(): def test_unconnected_communities(): test = nx.Graph() # community 1 - test.add_edge('a', 'c') - test.add_edge('a', 'd') - test.add_edge('d', 'c') + test.add_edge("a", "c") + test.add_edge("a", "d") + test.add_edge("d", "c") # community 2 - test.add_edge('b', 'e') - test.add_edge('e', 'f') - test.add_edge('f', 'b') + test.add_edge("b", "e") + test.add_edge("e", "f") + test.add_edge("f", "b") # The expected communities are: - ground_truth = {frozenset(['a', 'c', 'd']), - frozenset(['b', 'e', 'f'])} + ground_truth = {frozenset(["a", "c", "d"]), frozenset(["b", "e", "f"])} communities = label_propagation_communities(test) result = {frozenset(c) for c in communities} @@ -53,43 +52,46 @@ def test_unconnected_communities(): def test_connected_communities(): test = nx.Graph() # community 1 - test.add_edge('a', 'b') - test.add_edge('c', 'a') - test.add_edge('c', 'b') - test.add_edge('d', 'a') - test.add_edge('d', 'b') - test.add_edge('d', 'c') - test.add_edge('e', 'a') - test.add_edge('e', 'b') - test.add_edge('e', 'c') - test.add_edge('e', 'd') + test.add_edge("a", "b") + test.add_edge("c", "a") + test.add_edge("c", "b") + test.add_edge("d", "a") + test.add_edge("d", "b") + test.add_edge("d", "c") + test.add_edge("e", "a") + test.add_edge("e", "b") + test.add_edge("e", "c") + test.add_edge("e", "d") # community 2 - test.add_edge('1', '2') - test.add_edge('3', '1') - test.add_edge('3', '2') - test.add_edge('4', '1') - test.add_edge('4', '2') - test.add_edge('4', '3') - test.add_edge('5', '1') - test.add_edge('5', '2') - test.add_edge('5', '3') - test.add_edge('5', '4') + test.add_edge("1", "2") + test.add_edge("3", "1") + test.add_edge("3", "2") + test.add_edge("4", "1") + test.add_edge("4", "2") + test.add_edge("4", "3") + test.add_edge("5", "1") + test.add_edge("5", "2") + test.add_edge("5", "3") + test.add_edge("5", "4") # edge between community 1 and 2 - test.add_edge('a', '1') + test.add_edge("a", "1") # community 3 - test.add_edge('x', 'y') + test.add_edge("x", "y") # community 4 with only a single node - test.add_node('z') + test.add_node("z") # The expected communities are: - ground_truth1 = {frozenset(['a', 'b', 'c', 'd', 'e']), - frozenset(['1', '2', '3', '4', '5']), - frozenset(['x', 'y']), - frozenset(['z'])} - ground_truth2 = {frozenset(['a', 'b', 'c', 'd', 'e', - '1', '2', '3', '4', '5']), - frozenset(['x', 'y']), - frozenset(['z'])} + ground_truth1 = { + frozenset(["a", "b", "c", "d", "e"]), + frozenset(["1", "2", "3", "4", "5"]), + frozenset(["x", "y"]), + frozenset(["z"]), + } + ground_truth2 = { + frozenset(["a", "b", "c", "d", "e", "1", "2", "3", "4", "5"]), + frozenset(["x", "y"]), + frozenset(["z"]), + } ground_truth = (ground_truth1, ground_truth2) communities = label_propagation_communities(test) @@ -133,13 +135,13 @@ class TestAsynLpaCommunities: def test_simple_communities(self): # This graph is the disjoint union of two triangles. - G = nx.Graph(['ab', 'ac', 'bc', 'de', 'df', 'fe']) - ground_truth = {frozenset('abc'), frozenset('def')} + G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"]) + ground_truth = {frozenset("abc"), frozenset("def")} self._check_communities(G, ground_truth) def test_seed_argument(self): - G = nx.Graph(['ab', 'ac', 'bc', 'de', 'df', 'fe']) - ground_truth = {frozenset('abc'), frozenset('def')} + G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"]) + ground_truth = {frozenset("abc"), frozenset("def")} communities = asyn_lpa_communities(G, seed=1) result = {frozenset(c) for c in communities} assert result == ground_truth diff --git a/networkx/algorithms/community/tests/test_lukes.py b/networkx/algorithms/community/tests/test_lukes.py index 29d330c0..80e2de34 100644 --- a/networkx/algorithms/community/tests/test_lukes.py +++ b/networkx/algorithms/community/tests/test_lukes.py @@ -5,8 +5,8 @@ import pytest import networkx as nx from networkx.algorithms.community import lukes_partitioning -EWL = 'e_weight' -NWL = 'n_weight' +EWL = "e_weight" +NWL = "n_weight" # first test from the Lukes original paper @@ -40,9 +40,10 @@ def paper_1_case(float_edge_wt=False, explicit_node_wt=True, directed=False): wtu = None # partitioning - clusters_1 = {frozenset(x) for x in - lukes_partitioning(example_1, limit, - node_weight=wtu, edge_weight=EWL)} + clusters_1 = { + frozenset(x) + for x in lukes_partitioning(example_1, limit, node_weight=wtu, edge_weight=EWL) + } return clusters_1 @@ -67,42 +68,42 @@ def paper_2_case(explicit_edge_wt=True, directed=False): wtu = None # graph creation - example_2.add_edge('name', 'home_address', **edic) - example_2.add_edge('name', 'education', **edic) - example_2.add_edge('education', 'bs', **edic) - example_2.add_edge('education', 'ms', **edic) - example_2.add_edge('education', 'phd', **edic) - example_2.add_edge('name', 'telephone', **edic) - example_2.add_edge('telephone', 'home', **edic) - example_2.add_edge('telephone', 'office', **edic) - example_2.add_edge('office', 'no1', **edic) - example_2.add_edge('office', 'no2', **edic) - - example_2.nodes['name'][NWL] = 20 - example_2.nodes['education'][NWL] = 10 - example_2.nodes['bs'][NWL] = 1 - example_2.nodes['ms'][NWL] = 1 - example_2.nodes['phd'][NWL] = 1 - example_2.nodes['home_address'][NWL] = 8 - example_2.nodes['telephone'][NWL] = 8 - example_2.nodes['home'][NWL] = 8 - example_2.nodes['office'][NWL] = 4 - example_2.nodes['no1'][NWL] = 1 - example_2.nodes['no2'][NWL] = 1 + example_2.add_edge("name", "home_address", **edic) + example_2.add_edge("name", "education", **edic) + example_2.add_edge("education", "bs", **edic) + example_2.add_edge("education", "ms", **edic) + example_2.add_edge("education", "phd", **edic) + example_2.add_edge("name", "telephone", **edic) + example_2.add_edge("telephone", "home", **edic) + example_2.add_edge("telephone", "office", **edic) + example_2.add_edge("office", "no1", **edic) + example_2.add_edge("office", "no2", **edic) + + example_2.nodes["name"][NWL] = 20 + example_2.nodes["education"][NWL] = 10 + example_2.nodes["bs"][NWL] = 1 + example_2.nodes["ms"][NWL] = 1 + example_2.nodes["phd"][NWL] = 1 + example_2.nodes["home_address"][NWL] = 8 + example_2.nodes["telephone"][NWL] = 8 + example_2.nodes["home"][NWL] = 8 + example_2.nodes["office"][NWL] = 4 + example_2.nodes["no1"][NWL] = 1 + example_2.nodes["no2"][NWL] = 1 # partitioning - clusters_2 = {frozenset(x) for x in - lukes_partitioning(example_2, - byte_block_size, - node_weight=NWL, - edge_weight=wtu)} + clusters_2 = { + frozenset(x) + for x in lukes_partitioning( + example_2, byte_block_size, node_weight=NWL, edge_weight=wtu + ) + } return clusters_2 def test_paper_1_case(): - ground_truth = {frozenset([1, 4]), - frozenset([2, 3, 5])} + ground_truth = {frozenset([1, 4]), frozenset([2, 3, 5])} tf = (True, False) for flt, nwt, drc in product(tf, tf, tf): @@ -111,10 +112,11 @@ def test_paper_1_case(): def test_paper_2_case(): - ground_truth = {frozenset(['education', 'bs', 'ms', 'phd']), - frozenset(['name', 'home_address']), - frozenset(['telephone', 'home', 'office', 'no1', 'no2']), - } + ground_truth = { + frozenset(["education", "bs", "ms", "phd"]), + frozenset(["name", "home_address"]), + frozenset(["telephone", "home", "office", "no1", "no2"]), + } tf = (True, False) for ewt, drc in product(tf, tf): @@ -147,7 +149,6 @@ def test_mandatory_integrality(): ex_1_broken.nodes[5][NWL] = 2 with pytest.raises(TypeError): - lukes_partitioning(ex_1_broken, - byte_block_size, - node_weight=NWL, - edge_weight=EWL) + lukes_partitioning( + ex_1_broken, byte_block_size, node_weight=NWL, edge_weight=EWL + ) diff --git a/networkx/algorithms/community/tests/test_modularity_max.py b/networkx/algorithms/community/tests/test_modularity_max.py index 54ecff45..3a3b6a20 100644 --- a/networkx/algorithms/community/tests/test_modularity_max.py +++ b/networkx/algorithms/community/tests/test_modularity_max.py @@ -1,11 +1,11 @@ import networkx as nx from networkx.algorithms.community import ( greedy_modularity_communities, - _naive_greedy_modularity_communities) + _naive_greedy_modularity_communities, +) class TestCNM: - def setup(self): self.G = nx.karate_club_graph() @@ -14,15 +14,15 @@ class TestCNM: assert communities == expected def test_karate_club(self): - john_a = frozenset([ - 8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]) + john_a = frozenset( + [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33] + ) mr_hi = frozenset([0, 4, 5, 6, 10, 11, 16, 19]) overlap = frozenset([1, 2, 3, 7, 9, 12, 13, 17, 21]) self._check_communities({john_a, overlap, mr_hi}) class TestNaive: - def setup(self): self.G = nx.karate_club_graph() @@ -31,8 +31,9 @@ class TestNaive: assert communities == expected def test_karate_club(self): - john_a = frozenset([ - 8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]) + john_a = frozenset( + [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33] + ) mr_hi = frozenset([0, 4, 5, 6, 10, 11, 16, 19]) overlap = frozenset([1, 2, 3, 7, 9, 12, 13, 17, 21]) - self._check_communities({john_a, overlap, mr_hi}) + self._check_communities({john_a, overlap, mr_hi}) diff --git a/networkx/algorithms/components/attracting.py b/networkx/algorithms/components/attracting.py index 2df25967..8d2cd8b6 100644 --- a/networkx/algorithms/components/attracting.py +++ b/networkx/algorithms/components/attracting.py @@ -2,13 +2,14 @@ import networkx as nx from networkx.utils.decorators import not_implemented_for -__all__ = ['number_attracting_components', - 'attracting_components', - 'is_attracting_component', - ] +__all__ = [ + "number_attracting_components", + "attracting_components", + "is_attracting_component", +] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def attracting_components(G): """Generates the attracting components in `G`. @@ -51,7 +52,7 @@ def attracting_components(G): yield scc[n] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def number_attracting_components(G): """Returns the number of attracting components in `G`. @@ -79,7 +80,7 @@ def number_attracting_components(G): return sum(1 for ac in attracting_components(G)) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def is_attracting_component(G): """Returns True if `G` consists of a single attracting component. diff --git a/networkx/algorithms/components/biconnected.py b/networkx/algorithms/components/biconnected.py index 4beca866..bbd085cb 100644 --- a/networkx/algorithms/components/biconnected.py +++ b/networkx/algorithms/components/biconnected.py @@ -3,14 +3,14 @@ from itertools import chain from networkx.utils.decorators import not_implemented_for __all__ = [ - 'biconnected_components', - 'biconnected_component_edges', - 'is_biconnected', - 'articulation_points', + "biconnected_components", + "biconnected_component_edges", + "is_biconnected", + "articulation_points", ] -@not_implemented_for('directed') +@not_implemented_for("directed") def is_biconnected(G): """Returns True if the graph is biconnected, False otherwise. @@ -78,12 +78,14 @@ def is_biconnected(G): if len(bcc) == 1: return len(bcc[0]) == len(G) return False # Multiple bicomponents or No bicomponents (empty graph?) + + # if len(bcc) == 0: # No bicomponents (it could be an empty graph) # return False # return len(bcc[0]) == len(G) -@not_implemented_for('directed') +@not_implemented_for("directed") def biconnected_component_edges(G): """Returns a generator of lists of edges, one list for each biconnected component of the input graph. @@ -155,7 +157,7 @@ def biconnected_component_edges(G): yield from _biconnected_dfs(G, components=True) -@not_implemented_for('directed') +@not_implemented_for("directed") def biconnected_components(G): """Returns a generator of sets of nodes, one set for each biconnected component of the graph @@ -251,7 +253,7 @@ def biconnected_components(G): yield set(chain.from_iterable(comp)) -@not_implemented_for('directed') +@not_implemented_for("directed") def articulation_points(G): """Yield the articulation points, or cut vertices, of a graph. @@ -325,7 +327,7 @@ def articulation_points(G): yield articulation -@not_implemented_for('directed') +@not_implemented_for("directed") def _biconnected_dfs(G, components=True): # depth-first search algorithm to generate articulation points # and biconnected components diff --git a/networkx/algorithms/components/connected.py b/networkx/algorithms/components/connected.py index 4eedf835..95b169c8 100644 --- a/networkx/algorithms/components/connected.py +++ b/networkx/algorithms/components/connected.py @@ -4,14 +4,14 @@ from networkx.utils.decorators import not_implemented_for from ...utils import arbitrary_element __all__ = [ - 'number_connected_components', - 'connected_components', - 'is_connected', - 'node_connected_component', + "number_connected_components", + "connected_components", + "is_connected", + "node_connected_component", ] -@not_implemented_for('directed') +@not_implemented_for("directed") def connected_components(G): """Generate connected components. @@ -93,7 +93,7 @@ def number_connected_components(G): return sum(1 for cc in connected_components(G)) -@not_implemented_for('directed') +@not_implemented_for("directed") def is_connected(G): """Returns True if the graph is connected, False otherwise. @@ -132,12 +132,13 @@ def is_connected(G): """ if len(G) == 0: - raise nx.NetworkXPointlessConcept('Connectivity is undefined ', - 'for the null graph.') + raise nx.NetworkXPointlessConcept( + "Connectivity is undefined ", "for the null graph." + ) return sum(1 for node in _plain_bfs(G, arbitrary_element(G))) == len(G) -@not_implemented_for('directed') +@not_implemented_for("directed") def node_connected_component(G, n): """Returns the set of nodes in the component of graph containing node n. diff --git a/networkx/algorithms/components/semiconnected.py b/networkx/algorithms/components/semiconnected.py index 8d19f678..e06704da 100644 --- a/networkx/algorithms/components/semiconnected.py +++ b/networkx/algorithms/components/semiconnected.py @@ -2,10 +2,10 @@ import networkx as nx from networkx.utils import not_implemented_for, pairwise -__all__ = ['is_semiconnected'] +__all__ = ["is_semiconnected"] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def is_semiconnected(G, topo_order=None): """Returns True if the graph is semiconnected, False otherwise. @@ -51,7 +51,8 @@ def is_semiconnected(G, topo_order=None): """ if len(G) == 0: raise nx.NetworkXPointlessConcept( - 'Connectivity is undefined for the null graph.') + "Connectivity is undefined for the null graph." + ) if not nx.is_weakly_connected(G): return False diff --git a/networkx/algorithms/components/strongly_connected.py b/networkx/algorithms/components/strongly_connected.py index a4347704..0ff43bae 100644 --- a/networkx/algorithms/components/strongly_connected.py +++ b/networkx/algorithms/components/strongly_connected.py @@ -2,15 +2,17 @@ import networkx as nx from networkx.utils.decorators import not_implemented_for -__all__ = ['number_strongly_connected_components', - 'strongly_connected_components', - 'is_strongly_connected', - 'strongly_connected_components_recursive', - 'kosaraju_strongly_connected_components', - 'condensation'] +__all__ = [ + "number_strongly_connected_components", + "strongly_connected_components", + "is_strongly_connected", + "strongly_connected_components_recursive", + "kosaraju_strongly_connected_components", + "condensation", +] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def strongly_connected_components(G): """Generate nodes in strongly connected components of graph. @@ -70,7 +72,7 @@ def strongly_connected_components(G): lowlink = {} scc_found = set() scc_queue = [] - i = 0 # Preorder counter + i = 0 # Preorder counter for source in G: if source not in scc_found: queue = [source] @@ -105,7 +107,7 @@ def strongly_connected_components(G): scc_queue.append(v) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def kosaraju_strongly_connected_components(G, source=None): """Generate nodes in strongly connected components of graph. @@ -162,7 +164,7 @@ def kosaraju_strongly_connected_components(G, source=None): seen.update(new) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def strongly_connected_components_recursive(G): """Generate nodes in strongly connected components of graph. @@ -220,6 +222,7 @@ def strongly_connected_components_recursive(G): Information Processing Letters 49(1): 9-14, (1994).. """ + def visit(v, cnt): root[v] = cnt visited[v] = cnt @@ -250,7 +253,7 @@ def strongly_connected_components_recursive(G): yield from visit(source, cnt) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def number_strongly_connected_components(G): """Returns number of strongly connected components in graph. @@ -282,7 +285,7 @@ def number_strongly_connected_components(G): return sum(1 for scc in strongly_connected_components(G)) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def is_strongly_connected(G): """Test directed graph for strong connectivity. @@ -318,12 +321,13 @@ def is_strongly_connected(G): """ if len(G) == 0: raise nx.NetworkXPointlessConcept( - """Connectivity is undefined for the null graph.""") + """Connectivity is undefined for the null graph.""" + ) return len(list(strongly_connected_components(G))[0]) == len(G) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def condensation(G, scc=None): """Returns the condensation of G. @@ -368,7 +372,7 @@ def condensation(G, scc=None): members = {} C = nx.DiGraph() # Add mapping dict as graph attribute - C.graph['mapping'] = mapping + C.graph["mapping"] = mapping if len(G) == 0: return C for i, component in enumerate(scc): @@ -376,8 +380,9 @@ def condensation(G, scc=None): mapping.update((n, i) for n in component) number_of_components = i + 1 C.add_nodes_from(range(number_of_components)) - C.add_edges_from((mapping[u], mapping[v]) for u, v in G.edges() - if mapping[u] != mapping[v]) + C.add_edges_from( + (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v] + ) # Add a list of members (ie original nodes) to each node (ie scc) in C. - nx.set_node_attributes(C, members, 'members') + nx.set_node_attributes(C, members, "members") return C diff --git a/networkx/algorithms/components/tests/test_attracting.py b/networkx/algorithms/components/tests/test_attracting.py index 2be723aa..aee49e05 100644 --- a/networkx/algorithms/components/tests/test_attracting.py +++ b/networkx/algorithms/components/tests/test_attracting.py @@ -7,8 +7,19 @@ class TestAttractingComponents: @classmethod def setup_class(cls): cls.G1 = nx.DiGraph() - cls.G1.add_edges_from([(5, 11), (11, 2), (11, 9), (11, 10), - (7, 11), (7, 8), (8, 9), (3, 8), (3, 10)]) + cls.G1.add_edges_from( + [ + (5, 11), + (11, 2), + (11, 9), + (11, 10), + (7, 11), + (7, 8), + (8, 9), + (3, 8), + (3, 10), + ] + ) cls.G2 = nx.DiGraph() cls.G2.add_edges_from([(0, 1), (0, 2), (1, 1), (1, 2), (2, 1)]) diff --git a/networkx/algorithms/components/tests/test_biconnected.py b/networkx/algorithms/components/tests/test_biconnected.py index 1bc7e302..c21c0a8e 100644 --- a/networkx/algorithms/components/tests/test_biconnected.py +++ b/networkx/algorithms/components/tests/test_biconnected.py @@ -80,9 +80,30 @@ def test_biconnected_components1(): # graph example from # http://www.ibluemojo.com/school/articul_algorithm.html edges = [ - (0, 1), (0, 5), (0, 6), (0, 14), (1, 5), (1, 6), (1, 14), (2, 4), - (2, 10), (3, 4), (3, 15), (4, 6), (4, 7), (4, 10), (5, 14), (6, 14), - (7, 9), (8, 9), (8, 12), (8, 13), (10, 15), (11, 12), (11, 13), (12, 13) + (0, 1), + (0, 5), + (0, 6), + (0, 14), + (1, 5), + (1, 6), + (1, 14), + (2, 4), + (2, 10), + (3, 4), + (3, 15), + (4, 6), + (4, 7), + (4, 10), + (5, 14), + (6, 14), + (7, 9), + (8, 9), + (8, 12), + (8, 13), + (10, 15), + (11, 12), + (11, 13), + (12, 13), ] G = nx.Graph(edges) pts = set(nx.articulation_points(G)) @@ -102,18 +123,25 @@ def test_biconnected_components1(): def test_biconnected_components2(): G = nx.Graph() - nx.add_cycle(G, 'ABC') - nx.add_cycle(G, 'CDE') - nx.add_cycle(G, 'FIJHG') - nx.add_cycle(G, 'GIJ') - G.add_edge('E', 'G') + nx.add_cycle(G, "ABC") + nx.add_cycle(G, "CDE") + nx.add_cycle(G, "FIJHG") + nx.add_cycle(G, "GIJ") + G.add_edge("E", "G") comps = list(nx.biconnected_component_edges(G)) answer = [ - [tuple('GF'), tuple('FI'), tuple('IG'), tuple('IJ'), - tuple('JG'), tuple('JH'), tuple('HG')], - [tuple('EG')], - [tuple('CD'), tuple('DE'), tuple('CE')], - [tuple('AB'), tuple('BC'), tuple('AC')] + [ + tuple("GF"), + tuple("FI"), + tuple("IG"), + tuple("IJ"), + tuple("JG"), + tuple("JH"), + tuple("HG"), + ], + [tuple("EG")], + [tuple("CD"), tuple("DE"), tuple("CE")], + [tuple("AB"), tuple("BC"), tuple("AC")], ] assert_components_edges_equal(comps, answer) @@ -128,10 +156,40 @@ def test_biconnected_davis(): def test_biconnected_karate(): K = nx.karate_club_graph() - answer = [{0, 1, 2, 3, 7, 8, 9, 12, 13, 14, 15, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33}, - {0, 4, 5, 6, 10, 16}, - {0, 11}] + answer = [ + { + 0, + 1, + 2, + 3, + 7, + 8, + 9, + 12, + 13, + 14, + 15, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + }, + {0, 4, 5, 6, 10, 16}, + {0, 11}, + ] bcc = list(nx.biconnected_components(K)) assert_components_equal(bcc, answer) assert set(nx.articulation_points(K)) == {0} @@ -139,26 +197,30 @@ def test_biconnected_karate(): def test_biconnected_eppstein(): # tests from http://www.ics.uci.edu/~eppstein/PADS/Biconnectivity.py - G1 = nx.Graph({ - 0: [1, 2, 5], - 1: [0, 5], - 2: [0, 3, 4], - 3: [2, 4, 5, 6], - 4: [2, 3, 5, 6], - 5: [0, 1, 3, 4], - 6: [3, 4], - }) - G2 = nx.Graph({ - 0: [2, 5], - 1: [3, 8], - 2: [0, 3, 5], - 3: [1, 2, 6, 8], - 4: [7], - 5: [0, 2], - 6: [3, 8], - 7: [4], - 8: [1, 3, 6], - }) + G1 = nx.Graph( + { + 0: [1, 2, 5], + 1: [0, 5], + 2: [0, 3, 4], + 3: [2, 4, 5, 6], + 4: [2, 3, 5, 6], + 5: [0, 1, 3, 4], + 6: [3, 4], + } + ) + G2 = nx.Graph( + { + 0: [2, 5], + 1: [3, 8], + 2: [0, 3, 5], + 3: [1, 2, 6, 8], + 4: [7], + 5: [0, 2], + 6: [3, 8], + 7: [4], + 8: [1, 3, 6], + } + ) assert nx.is_biconnected(G1) assert not nx.is_biconnected(G2) answer_G2 = [{1, 3, 6, 8}, {0, 2, 5}, {2, 3}, {4, 7}] diff --git a/networkx/algorithms/components/tests/test_connected.py b/networkx/algorithms/components/tests/test_connected.py index cbab9819..ebe30ac6 100644 --- a/networkx/algorithms/components/tests/test_connected.py +++ b/networkx/algorithms/components/tests/test_connected.py @@ -5,7 +5,6 @@ from networkx import NetworkXNotImplemented class TestConnected: - @classmethod def setup_class(cls): G1 = cnlti(nx.grid_2d_graph(2, 2), first_label=0, ordering="sorted") @@ -18,8 +17,22 @@ class TestConnected: cls.gc = [] G = nx.DiGraph() - G.add_edges_from([(1, 2), (2, 3), (2, 8), (3, 4), (3, 7), (4, 5), - (5, 3), (5, 6), (7, 4), (7, 6), (8, 1), (8, 7)]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 8), + (3, 4), + (3, 7), + (4, 5), + (5, 3), + (5, 6), + (7, 4), + (7, 6), + (8, 1), + (8, 7), + ] + ) C = [[3, 4, 5, 7], [1, 2, 8], [6]] cls.gc.append((G, C)) @@ -52,7 +65,7 @@ class TestConnected: C = { frozenset([0, 1, 2, 3]), frozenset([4, 5, 6, 7, 8, 9]), - frozenset([10, 11, 12, 13, 14]) + frozenset([10, 11, 12, 13, 14]), } assert {frozenset(g) for g in cc(G)} == C diff --git a/networkx/algorithms/components/tests/test_semiconnected.py b/networkx/algorithms/components/tests/test_semiconnected.py index 6859ddea..d9a8c7c3 100644 --- a/networkx/algorithms/components/tests/test_semiconnected.py +++ b/networkx/algorithms/components/tests/test_semiconnected.py @@ -4,18 +4,15 @@ import pytest class TestIsSemiconnected: - def test_undirected(self): - pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, - nx.Graph()) - pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, - nx.MultiGraph()) + pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.Graph()) + pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.MultiGraph()) def test_empty(self): - pytest.raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, - nx.DiGraph()) - pytest.raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, - nx.MultiDiGraph()) + pytest.raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.DiGraph()) + pytest.raises( + nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.MultiDiGraph() + ) def test_single_node_graph(self): G = nx.DiGraph() @@ -37,8 +34,9 @@ class TestIsSemiconnected: def test_tree(self): G = nx.DiGraph() - G.add_edges_from(chain.from_iterable([(i, 2 * i + 1), (i, 2 * i + 2)] - for i in range(100))) + G.add_edges_from( + chain.from_iterable([(i, 2 * i + 1), (i, 2 * i + 2)] for i in range(100)) + ) assert not nx.is_semiconnected(G) def test_dumbbell(self): @@ -49,6 +47,7 @@ class TestIsSemiconnected: assert nx.is_semiconnected(G) def test_alternating_path(self): - G = nx.DiGraph(chain.from_iterable([(i, i - 1), (i, i + 1)] - for i in range(0, 100, 2))) + G = nx.DiGraph( + chain.from_iterable([(i, i - 1), (i, i + 1)] for i in range(0, 100, 2)) + ) assert not nx.is_semiconnected(G) diff --git a/networkx/algorithms/components/tests/test_strongly_connected.py b/networkx/algorithms/components/tests/test_strongly_connected.py index f42b0e5d..959b333c 100644 --- a/networkx/algorithms/components/tests/test_strongly_connected.py +++ b/networkx/algorithms/components/tests/test_strongly_connected.py @@ -4,13 +4,26 @@ from networkx import NetworkXNotImplemented class TestStronglyConnected: - @classmethod def setup_class(cls): cls.gc = [] G = nx.DiGraph() - G.add_edges_from([(1, 2), (2, 3), (2, 8), (3, 4), (3, 7), (4, 5), - (5, 3), (5, 6), (7, 4), (7, 6), (8, 1), (8, 7)]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 8), + (3, 4), + (3, 7), + (4, 5), + (5, 3), + (5, 6), + (7, 4), + (7, 6), + (8, 1), + (8, 7), + ] + ) C = {frozenset([3, 4, 5, 7]), frozenset([1, 2, 8]), frozenset([6])} cls.gc.append((G, C)) @@ -70,11 +83,31 @@ class TestStronglyConnected: def test_contract_scc1(self): G = nx.DiGraph() - G.add_edges_from([ - (1, 2), (2, 3), (2, 11), (2, 12), (3, 4), (4, 3), (4, 5), (5, 6), - (6, 5), (6, 7), (7, 8), (7, 9), (7, 10), (8, 9), (9, 7), (10, 6), - (11, 2), (11, 4), (11, 6), (12, 6), (12, 11), - ]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 11), + (2, 12), + (3, 4), + (4, 3), + (4, 5), + (5, 6), + (6, 5), + (6, 7), + (7, 8), + (7, 9), + (7, 10), + (8, 9), + (9, 7), + (10, 6), + (11, 2), + (11, 4), + (11, 6), + (12, 6), + (12, 11), + ] + ) scc = list(nx.strongly_connected_components(G)) cG = nx.condensation(G, scc) # DAG @@ -123,12 +156,12 @@ class TestStronglyConnected: G, C = self.gc[1] C = sorted(C, key=len, reverse=True) cG = nx.condensation(G) - mapping = cG.graph['mapping'] + mapping = cG.graph["mapping"] assert all(n in G for n in mapping) assert all(0 == cN for n, cN in mapping.items() if n in C[0]) assert all(1 == cN for n, cN in mapping.items() if n in C[1]) for n, d in cG.nodes(data=True): - assert set(C[n]) == cG.nodes[n]['members'] + assert set(C[n]) == cG.nodes[n]["members"] def test_null_graph(self): G = nx.DiGraph() @@ -136,17 +169,26 @@ class TestStronglyConnected: assert list(nx.kosaraju_strongly_connected_components(G)) == [] assert list(nx.strongly_connected_components_recursive(G)) == [] assert len(nx.condensation(G)) == 0 - pytest.raises(nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph()) + pytest.raises( + nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph() + ) def test_connected_raise(self): G = nx.Graph() pytest.raises(NetworkXNotImplemented, nx.strongly_connected_components, G) - pytest.raises(NetworkXNotImplemented, nx.kosaraju_strongly_connected_components, G) - pytest.raises(NetworkXNotImplemented, nx.strongly_connected_components_recursive, G) + pytest.raises( + NetworkXNotImplemented, nx.kosaraju_strongly_connected_components, G + ) + pytest.raises( + NetworkXNotImplemented, nx.strongly_connected_components_recursive, G + ) pytest.raises(NetworkXNotImplemented, nx.is_strongly_connected, G) - pytest.raises(nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph()) + pytest.raises( + nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph() + ) pytest.raises(NetworkXNotImplemented, nx.condensation, G) + # Commented out due to variability on Travis-CI hardware/operating systems # def test_linear_time(self): # # See Issue #2831 diff --git a/networkx/algorithms/components/tests/test_weakly_connected.py b/networkx/algorithms/components/tests/test_weakly_connected.py index 20e5b413..393f688d 100644 --- a/networkx/algorithms/components/tests/test_weakly_connected.py +++ b/networkx/algorithms/components/tests/test_weakly_connected.py @@ -4,13 +4,26 @@ from networkx import NetworkXNotImplemented class TestWeaklyConnected: - @classmethod def setup_class(cls): cls.gc = [] G = nx.DiGraph() - G.add_edges_from([(1, 2), (2, 3), (2, 8), (3, 4), (3, 7), (4, 5), - (5, 3), (5, 6), (7, 4), (7, 6), (8, 1), (8, 7)]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 8), + (3, 4), + (3, 7), + (4, 5), + (5, 3), + (5, 6), + (7, 4), + (7, 6), + (8, 1), + (8, 7), + ] + ) C = [[3, 4, 5, 7], [1, 2, 8], [6]] cls.gc.append((G, C)) diff --git a/networkx/algorithms/components/weakly_connected.py b/networkx/algorithms/components/weakly_connected.py index b584b8c8..e95a3978 100644 --- a/networkx/algorithms/components/weakly_connected.py +++ b/networkx/algorithms/components/weakly_connected.py @@ -3,13 +3,13 @@ import networkx as nx from networkx.utils.decorators import not_implemented_for __all__ = [ - 'number_weakly_connected_components', - 'weakly_connected_components', - 'is_weakly_connected', + "number_weakly_connected_components", + "weakly_connected_components", + "is_weakly_connected", ] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def weakly_connected_components(G): """Generate weakly connected components of G. @@ -62,7 +62,7 @@ def weakly_connected_components(G): seen.update(c) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def number_weakly_connected_components(G): """Returns the number of weakly connected components in G. @@ -95,7 +95,7 @@ def number_weakly_connected_components(G): return sum(1 for wcc in weakly_connected_components(G)) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def is_weakly_connected(G): """Test directed graph for weak connectivity. @@ -136,7 +136,8 @@ def is_weakly_connected(G): """ if len(G) == 0: raise nx.NetworkXPointlessConcept( - """Connectivity is undefined for the null graph.""") + """Connectivity is undefined for the null graph.""" + ) return len(list(weakly_connected_components(G))[0]) == len(G) diff --git a/networkx/algorithms/connectivity/__init__.py b/networkx/algorithms/connectivity/__init__.py index fd77d4a3..65490c00 100644 --- a/networkx/algorithms/connectivity/__init__.py +++ b/networkx/algorithms/connectivity/__init__.py @@ -10,13 +10,17 @@ from .kcutsets import * from .stoerwagner import * from .utils import * -__all__ = sum([connectivity.__all__, - cuts.__all__, - edge_augmentation.__all__, - edge_kcomponents.__all__, - disjoint_paths.__all__, - kcomponents.__all__, - kcutsets.__all__, - stoerwagner.__all__, - utils.__all__, - ], []) +__all__ = sum( + [ + connectivity.__all__, + cuts.__all__, + edge_augmentation.__all__, + edge_kcomponents.__all__, + disjoint_paths.__all__, + kcomponents.__all__, + kcutsets.__all__, + stoerwagner.__all__, + utils.__all__, + ], + [], +) diff --git a/networkx/algorithms/connectivity/connectivity.py b/networkx/algorithms/connectivity/connectivity.py index 037fc8ed..2a2e8ec9 100644 --- a/networkx/algorithms/connectivity/connectivity.py +++ b/networkx/algorithms/connectivity/connectivity.py @@ -29,7 +29,9 @@ __all__ = [ ] -def local_node_connectivity(G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None): +def local_node_connectivity( + G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None +): r"""Computes local node connectivity for nodes s and t. Local node connectivity for two non adjacent nodes s and t is the @@ -483,7 +485,9 @@ def all_pairs_node_connectivity(G, nbunch=None, flow_func=None): return all_pairs -def local_edge_connectivity(G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None): +def local_edge_connectivity( + G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None +): r"""Returns local edge connectivity for nodes s and t in G. Local edge connectivity for two nodes s and t is the minimum number diff --git a/networkx/algorithms/connectivity/cuts.py b/networkx/algorithms/connectivity/cuts.py index dd59e3db..7b8a993f 100644 --- a/networkx/algorithms/connectivity/cuts.py +++ b/networkx/algorithms/connectivity/cuts.py @@ -8,19 +8,20 @@ import networkx as nx # cut algorithms. from networkx.algorithms.flow import edmonds_karp from networkx.algorithms.flow import build_residual_network + default_flow_func = edmonds_karp -from .utils import (build_auxiliary_node_connectivity, - build_auxiliary_edge_connectivity) +from .utils import build_auxiliary_node_connectivity, build_auxiliary_edge_connectivity -__all__ = ['minimum_st_node_cut', - 'minimum_node_cut', - 'minimum_st_edge_cut', - 'minimum_edge_cut'] +__all__ = [ + "minimum_st_node_cut", + "minimum_node_cut", + "minimum_st_edge_cut", + "minimum_edge_cut", +] -def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, - residual=None): +def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): """Returns the edges of the cut-set of a minimum (s, t)-cut. This function returns the set of edges of minimum cardinality that, @@ -140,7 +141,7 @@ def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, else: H = auxiliary - kwargs = dict(capacity='capacity', flow_func=flow_func, residual=residual) + kwargs = dict(capacity="capacity", flow_func=flow_func, residual=residual) cut_value, partition = nx.minimum_cut(H, s, t, **kwargs) reachable, non_reachable = partition @@ -277,19 +278,18 @@ def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): else: H = auxiliary - mapping = H.graph.get('mapping', None) + mapping = H.graph.get("mapping", None) if mapping is None: - raise nx.NetworkXError('Invalid auxiliary digraph.') + raise nx.NetworkXError("Invalid auxiliary digraph.") if G.has_edge(s, t) or G.has_edge(t, s): return {} kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H) # The edge cut in the auxiliary digraph corresponds to the node cut in the # original graph. - edge_cut = minimum_st_edge_cut(H, f'{mapping[s]}B', f'{mapping[t]}A', - **kwargs) + edge_cut = minimum_st_edge_cut(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) # Each node in the original graph maps to two nodes of the auxiliary graph - node_cut = {H.nodes[node]['id'] for edge in edge_cut for node in edge} + node_cut = {H.nodes[node]["id"] for edge in edge_cut for node in edge} return node_cut - {s, t} @@ -387,7 +387,7 @@ def minimum_node_cut(G, s=None, t=None, flow_func=None): """ if (s is not None and t is None) or (s is None and t is not None): - raise nx.NetworkXError('Both source and target must be specified.') + raise nx.NetworkXError("Both source and target must be specified.") # Local minimum node cut. if s is not None and t is not None: @@ -401,21 +401,21 @@ def minimum_node_cut(G, s=None, t=None, flow_func=None): # Analog to the algorithm 11 for global node connectivity in [1]. if G.is_directed(): if not nx.is_weakly_connected(G): - raise nx.NetworkXError('Input graph is not connected') + raise nx.NetworkXError("Input graph is not connected") iter_func = itertools.permutations def neighbors(v): - return itertools.chain.from_iterable([G.predecessors(v), - G.successors(v)]) + return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)]) + else: if not nx.is_connected(G): - raise nx.NetworkXError('Input graph is not connected') + raise nx.NetworkXError("Input graph is not connected") iter_func = itertools.combinations neighbors = G.neighbors # Reuse the auxiliary digraph and the residual network. H = build_auxiliary_node_connectivity(G) - R = build_residual_network(H, 'capacity') + R = build_residual_network(H, "capacity") kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R) # Choose a node with minimum degree. @@ -534,11 +534,11 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): """ if (s is not None and t is None) or (s is None and t is not None): - raise nx.NetworkXError('Both source and target must be specified.') + raise nx.NetworkXError("Both source and target must be specified.") # reuse auxiliary digraph and residual network H = build_auxiliary_edge_connectivity(G) - R = build_residual_network(H, 'capacity') + R = build_residual_network(H, "capacity") kwargs = dict(flow_func=flow_func, residual=R, auxiliary=H) # Local minimum edge cut if s and t are not None @@ -554,7 +554,7 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): if G.is_directed(): # Based on algorithm 8 in [1] if not nx.is_weakly_connected(G): - raise nx.NetworkXError('Input graph is not connected') + raise nx.NetworkXError("Input graph is not connected") # Initial cutset is all edges of a node with minimum degree node = min(G, key=G.degree) @@ -576,7 +576,7 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None): else: # undirected # Based on algorithm 6 in [1] if not nx.is_connected(G): - raise nx.NetworkXError('Input graph is not connected') + raise nx.NetworkXError("Input graph is not connected") # Initial cutset is all edges of a node with minimum degree node = min(G, key=G.degree) diff --git a/networkx/algorithms/connectivity/disjoint_paths.py b/networkx/algorithms/connectivity/disjoint_paths.py index 86db6bf0..d788a6cb 100644 --- a/networkx/algorithms/connectivity/disjoint_paths.py +++ b/networkx/algorithms/connectivity/disjoint_paths.py @@ -1,11 +1,13 @@ """Flow based node and edge disjoint paths.""" import networkx as nx from networkx.exception import NetworkXNoPath + # Define the default maximum flow function to use for the undelying # maximum flow computations from networkx.algorithms.flow import edmonds_karp from networkx.algorithms.flow import preflow_push from networkx.algorithms.flow import shortest_augmenting_path + default_flow_func = edmonds_karp # Functions to build auxiliary data structures. from .utils import build_auxiliary_node_connectivity @@ -14,13 +16,14 @@ from .utils import build_auxiliary_edge_connectivity from itertools import filterfalse as _filterfalse __all__ = [ - 'edge_disjoint_paths', - 'node_disjoint_paths', + "edge_disjoint_paths", + "node_disjoint_paths", ] -def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, - residual=None): +def edge_disjoint_paths( + G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None +): """Returns the edges disjoint paths between source and target. Edge disjoint paths are paths that do not share any edge. The @@ -175,21 +178,25 @@ def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, # Compute maximum flow between source and target. Flow functions in # NetworkX return a residual network. - kwargs = dict(capacity='capacity', residual=residual, cutoff=cutoff, - value_only=True) + kwargs = dict( + capacity="capacity", residual=residual, cutoff=cutoff, value_only=True + ) if flow_func is preflow_push: - del kwargs['cutoff'] + del kwargs["cutoff"] if flow_func is shortest_augmenting_path: - kwargs['two_phase'] = True + kwargs["two_phase"] = True R = flow_func(H, s, t, **kwargs) - if R.graph['flow_value'] == 0: + if R.graph["flow_value"] == 0: raise NetworkXNoPath # Saturated edges in the residual network form the edge disjoint paths # between source and target - cutset = [(u, v) for u, v, d in R.edges(data=True) - if d['capacity'] == d['flow'] and d['flow'] > 0] + cutset = [ + (u, v) + for u, v, d in R.edges(data=True) + if d["capacity"] == d["flow"] and d["flow"] > 0 + ] # This is equivalent of what flow.utils.build_flow_dict returns, but # only for the nodes with saturated edges and without reporting 0 flows. flow_dict = {n: {} for edge in cutset for n in edge} @@ -221,8 +228,9 @@ def edge_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, paths_found += 1 -def node_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, - residual=None): +def node_disjoint_paths( + G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None +): r"""Computes node disjoint paths between source and target. Node disjoint paths are paths that only share their first and last @@ -355,13 +363,12 @@ def node_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, else: H = auxiliary - mapping = H.graph.get('mapping', None) + mapping = H.graph.get("mapping", None) if mapping is None: - raise nx.NetworkXError('Invalid auxiliary digraph.') + raise nx.NetworkXError("Invalid auxiliary digraph.") # Maximum possible edge disjoint paths - possible = min(H.out_degree(f'{mapping[s]}B'), - H.in_degree(f'{mapping[t]}A')) + possible = min(H.out_degree(f"{mapping[s]}B"), H.in_degree(f"{mapping[t]}A")) if not possible: raise NetworkXNoPath @@ -370,16 +377,14 @@ def node_disjoint_paths(G, s, t, flow_func=None, cutoff=None, auxiliary=None, else: cutoff = min(cutoff, possible) - kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H, - cutoff=cutoff) + kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H, cutoff=cutoff) # The edge disjoint paths in the auxiliary digraph correspond to the node # disjoint paths in the original graph. - paths_edges = edge_disjoint_paths(H, f'{mapping[s]}B', f'{mapping[t]}A', - **kwargs) + paths_edges = edge_disjoint_paths(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) for path in paths_edges: # Each node in the original graph maps to two nodes in auxiliary graph - yield list(_unique_everseen(H.nodes[node]['id'] for node in path)) + yield list(_unique_everseen(H.nodes[node]["id"] for node in path)) def _unique_everseen(iterable): diff --git a/networkx/algorithms/connectivity/edge_augmentation.py b/networkx/algorithms/connectivity/edge_augmentation.py index b831a409..a0eee416 100644 --- a/networkx/algorithms/connectivity/edge_augmentation.py +++ b/networkx/algorithms/connectivity/edge_augmentation.py @@ -19,14 +19,14 @@ from networkx.utils import not_implemented_for, py_random_state from collections import defaultdict, namedtuple __all__ = [ - 'k_edge_augmentation', - 'is_k_edge_connected', - 'is_locally_k_edge_connected', + "k_edge_augmentation", + "is_k_edge_connected", + "is_locally_k_edge_connected", ] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def is_k_edge_connected(G, k): """Tests to see if a graph is k-edge-connected. @@ -59,7 +59,7 @@ def is_k_edge_connected(G, k): False """ if k < 1: - raise ValueError(f'k must be positive, not {k}') + raise ValueError(f"k must be positive, not {k}") # First try to quickly determine if G is not k-edge-connected if G.number_of_nodes() < k + 1: return False @@ -75,8 +75,8 @@ def is_k_edge_connected(G, k): return nx.edge_connectivity(G, cutoff=k) >= k -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def is_locally_k_edge_connected(G, s, t, k): """Tests to see if an edge in a graph is locally k-edge-connected. @@ -118,7 +118,7 @@ def is_locally_k_edge_connected(G, s, t, k): True """ if k < 1: - raise ValueError(f'k must be positive, not {k}') + raise ValueError(f"k must be positive, not {k}") # First try to quickly determine s, t is not k-locally-edge-connected in G if G.degree(s) < k or G.degree(t) < k: @@ -132,8 +132,8 @@ def is_locally_k_edge_connected(G, s, t, k): return localk >= k -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): """Finds set of edges to k-edge-connect G. @@ -248,23 +248,25 @@ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): """ try: if k <= 0: - raise ValueError(f'k must be a positive integer, not {k}') + raise ValueError(f"k must be a positive integer, not {k}") elif G.number_of_nodes() < k + 1: msg = f"impossible to {k} connect in graph with less than {k + 1} nodes" raise nx.NetworkXUnfeasible(msg) elif avail is not None and len(avail) == 0: if not nx.is_k_edge_connected(G, k): - raise nx.NetworkXUnfeasible('no available edges') + raise nx.NetworkXUnfeasible("no available edges") aug_edges = [] elif k == 1: - aug_edges = one_edge_augmentation(G, avail=avail, weight=weight, - partial=partial) + aug_edges = one_edge_augmentation( + G, avail=avail, weight=weight, partial=partial + ) elif k == 2: aug_edges = bridge_augmentation(G, avail=avail, weight=weight) else: # raise NotImplementedError(f'not implemented for k>2. k={k}') aug_edges = greedy_k_edge_augmentation( - G, k=k, avail=avail, weight=weight, seed=0) + G, k=k, avail=avail, weight=weight, seed=0 + ) # Do eager evaulation so we can catch any exceptions # Before executing partial code. yield from list(aug_edges) @@ -276,8 +278,9 @@ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): else: # If we can't k-edge-connect the entire graph, try to # k-edge-connect as much as possible - aug_edges = partial_k_edge_augmentation(G, k=k, avail=avail, - weight=weight) + aug_edges = partial_k_edge_augmentation( + G, k=k, avail=avail, weight=weight + ) yield from aug_edges else: raise @@ -335,6 +338,7 @@ def partial_k_edge_augmentation(G, k, avail, weight=None): >>> sorted(partial_k_edge_augmentation(G, k=2, avail=avail)) [(1, 5), (1, 8)] """ + def _edges_between_disjoint(H, only1, only2): """ finds edges between disjoint nodes """ only1_adj = {u: set(H.adj[u]) for u in only1} @@ -349,8 +353,11 @@ def partial_k_edge_augmentation(G, k, avail, weight=None): # Find which parts of the graph can be k-edge-connected H = G.copy() H.add_edges_from( - ((u, v, {'weight': w, 'generator': (u, v)}) - for (u, v), w in zip(avail, avail_w))) + ( + (u, v, {"weight": w, "generator": (u, v)}) + for (u, v), w in zip(avail, avail_w) + ) + ) k_edge_subgraphs = list(nx.k_edge_subgraphs(H, k=k)) # Generate edges to k-edge-connect internal subgraphs @@ -360,9 +367,9 @@ def partial_k_edge_augmentation(G, k, avail, weight=None): C = H.subgraph(nodes).copy() # Find the internal edges that were available sub_avail = { - d['generator']: d['weight'] + d["generator"]: d["weight"] for (u, v, d) in C.edges(data=True) - if 'generator' in d + if "generator" in d } # Remove potential augmenting edges C.remove_edges_from(sub_avail.keys()) @@ -374,13 +381,13 @@ def partial_k_edge_augmentation(G, k, avail, weight=None): for cc1, cc2 in it.combinations(k_edge_subgraphs, 2): for (u, v) in _edges_between_disjoint(H, cc1, cc2): d = H.get_edge_data(u, v) - edge = d.get('generator', None) + edge = d.get("generator", None) if edge is not None: yield edge -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def one_edge_augmentation(G, avail=None, weight=None, partial=False): """Finds minimum weight set of edges to connect G. @@ -428,12 +435,13 @@ def one_edge_augmentation(G, avail=None, weight=None, partial=False): if avail is None: return unconstrained_one_edge_augmentation(G) else: - return weighted_one_edge_augmentation(G, avail=avail, weight=weight, - partial=partial) + return weighted_one_edge_augmentation( + G, avail=avail, weight=weight, partial=partial + ) -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def bridge_augmentation(G, avail=None, weight=None): """Finds the a set of edges that bridge connects G. @@ -476,8 +484,7 @@ def bridge_augmentation(G, avail=None, weight=None): :func:`k_edge_augmentation` """ if G.number_of_nodes() < 3: - raise nx.NetworkXUnfeasible( - 'impossible to bridge connect less than 3 nodes') + raise nx.NetworkXUnfeasible("impossible to bridge connect less than 3 nodes") if avail is None: return unconstrained_bridge_augmentation(G) else: @@ -486,6 +493,7 @@ def bridge_augmentation(G, avail=None, weight=None): # --- Algorithms and Helpers --- + def _ordered(u, v): """Returns the nodes in an undirected edge in lower-triangular order""" return (u, v) if u < v else (v, u) @@ -494,19 +502,20 @@ def _ordered(u, v): def _unpack_available_edges(avail, weight=None, G=None): """Helper to separate avail into edges and corresponding weights""" if weight is None: - weight = 'weight' + weight = "weight" if isinstance(avail, dict): avail_uv = list(avail.keys()) avail_w = list(avail.values()) else: + def _try_getitem(d): try: return d[weight] except TypeError: return d + avail_uv = [tup[0:2] for tup in avail] - avail_w = [1 if len(tup) == 2 else _try_getitem(tup[-1]) - for tup in avail] + avail_w = [1 if len(tup) == 2 else _try_getitem(tup[-1]) for tup in avail] if G is not None: # Edges already in the graph are filtered @@ -516,7 +525,7 @@ def _unpack_available_edges(avail, weight=None, G=None): return avail_uv, avail_w -MetaEdge = namedtuple('MetaEdge', ('meta_uv', 'uv', 'w')) +MetaEdge = namedtuple("MetaEdge", ("meta_uv", "uv", "w")) def _lightest_meta_edges(mapping, avail_uv, avail_w): @@ -604,7 +613,7 @@ def unconstrained_one_edge_augmentation(G): meta_aug = list(zip(meta_nodes, meta_nodes[1:])) # map that path to the original graph inverse = defaultdict(list) - for k, v in C.graph['mapping'].items(): + for k, v in C.graph["mapping"].items(): inverse[v].append(k) for mu, mv in meta_aug: yield (inverse[mu][0], inverse[mv][0]) @@ -659,23 +668,22 @@ def weighted_one_edge_augmentation(G, avail, weight=None, partial=False): # Collapse CCs in the original graph into nodes in a metagraph # Then find an MST of the metagraph instead of the original graph C = collapse(G, nx.connected_components(G)) - mapping = C.graph['mapping'] + mapping = C.graph["mapping"] # Assign each available edge to an edge in the metagraph candidate_mapping = _lightest_meta_edges(mapping, avail_uv, avail_w) # nx.set_edge_attributes(C, name='weight', values=0) C.add_edges_from( - (mu, mv, {'weight': w, 'generator': uv}) + (mu, mv, {"weight": w, "generator": uv}) for (mu, mv), uv, w in candidate_mapping ) # Find MST of the meta graph meta_mst = nx.minimum_spanning_tree(C) if not partial and not nx.is_connected(meta_mst): - raise nx.NetworkXUnfeasible( - 'Not possible to connect G with available edges') + raise nx.NetworkXUnfeasible("Not possible to connect G with available edges") # Yield the edge that generated the meta-edge for mu, mv, d in meta_mst.edges(data=True): - if 'generator' in d: - edge = d['generator'] + if "generator" in d: + edge = d["generator"] yield edge @@ -774,9 +782,9 @@ def unconstrained_bridge_augmentation(G): # Choose pairs of distinct leaf nodes in each tree. If this is not # possible then make a pair using the single isolated node in the tree. vset1 = [ - tuple(cc) * 2 # case1: an isolated node - if len(cc) == 1 else - sorted(cc, key=C.degree)[0:2] # case2: pair of leaf nodes + tuple(cc) * 2 # case1: an isolated node + if len(cc) == 1 + else sorted(cc, key=C.degree)[0:2] # case2: pair of leaf nodes for cc in nx.connected_components(C) ] if len(vset1) > 1: @@ -814,11 +822,13 @@ def unconstrained_bridge_augmentation(G): # Construct the mapping (beta) from meta-nodes to regular nodes inverse = defaultdict(list) - for k, v in C.graph['mapping'].items(): + for k, v in C.graph["mapping"].items(): inverse[v].append(k) # sort so we choose minimum degree nodes first - inverse = {mu: sorted(mapped, key=lambda u: (G.degree(u), u)) - for mu, mapped in inverse.items()} + inverse = { + mu: sorted(mapped, key=lambda u: (G.degree(u), u)) + for mu, mapped in inverse.items() + } # For each meta-edge, map back to an arbitrary pair in the original graph G2 = G.copy() @@ -897,7 +907,7 @@ def weighted_bridge_augmentation(G, avail, weight=None): """ if weight is None: - weight = 'weight' + weight = "weight" # If input G is not connected the approximation factor increases to 3 if not nx.is_connected(G): @@ -912,7 +922,7 @@ def weighted_bridge_augmentation(G, avail, weight=None): if len(avail) == 0: if nx.has_bridges(H): - raise nx.NetworkXUnfeasible('no augmentation possible') + raise nx.NetworkXUnfeasible("no augmentation possible") avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=H) @@ -921,7 +931,7 @@ def weighted_bridge_augmentation(G, avail, weight=None): C = collapse(H, bridge_ccs) # Use the meta graph to shrink avail to a small feasible subset - mapping = C.graph['mapping'] + mapping = C.graph["mapping"] # Choose the minimum weight feasible edge in each group meta_to_wuv = { (mu, mv): (w, uv) @@ -953,12 +963,13 @@ def weighted_bridge_augmentation(G, avail, weight=None): # This indicates that it costs nothing to use edges that were given. D = nx.reverse(TR).copy() - nx.set_edge_attributes(D, name='weight', values=0) + nx.set_edge_attributes(D, name="weight", values=0) # The LCA of mu and mv in T is the shared ancestor of mu and mv that is # located farthest from the root. lca_gen = nx.tree_all_pairs_lowest_common_ancestor( - TR, root=root, pairs=meta_to_wuv.keys()) + TR, root=root, pairs=meta_to_wuv.keys() + ) for (mu, mv), lca in lca_gen: w, uv = meta_to_wuv[(mu, mv)] @@ -982,7 +993,7 @@ def weighted_bridge_augmentation(G, avail, weight=None): A = _minimum_rooted_branching(D, root) except nx.NetworkXException as e: # If there is no branching then augmentation is not possible - raise nx.NetworkXUnfeasible('no 2-edge-augmentation possible') from e + raise nx.NetworkXUnfeasible("no 2-edge-augmentation possible") from e # For each edge e, in the branching that did not belong to the directed # tree T, add the corresponding edge that **GENERATED** it (this is not @@ -992,9 +1003,9 @@ def weighted_bridge_augmentation(G, avail, weight=None): bridge_connectors = set() for mu, mv in A.edges(): data = D.get_edge_data(mu, mv) - if 'generator' in data: + if "generator" in data: # Add the avail edge that generated the branching edge. - edge = data['generator'] + edge = data["generator"] bridge_connectors.add(edge) yield from bridge_connectors @@ -1072,8 +1083,9 @@ def collapse(G, grouped_nodes): remaining = set(G.nodes()) for i, group in enumerate(grouped_nodes): group = set(group) - assert remaining.issuperset(group), ( - 'grouped nodes must exist in G and be disjoint') + assert remaining.issuperset( + group + ), "grouped nodes must exist in G and be disjoint" remaining.difference_update(group) members[i] = group mapping.update((n, i) for n in group) @@ -1084,12 +1096,13 @@ def collapse(G, grouped_nodes): mapping.update((n, i) for n in group) number_of_groups = i + 1 C.add_nodes_from(range(number_of_groups)) - C.add_edges_from((mapping[u], mapping[v]) for u, v in G.edges() - if mapping[u] != mapping[v]) + C.add_edges_from( + (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v] + ) # Add a list of members (ie original nodes) to each node (ie scc) in C. - nx.set_node_attributes(C, name='members', values=members) + nx.set_node_attributes(C, name="members", values=members) # Add mapping dict as graph attribute - C.graph['mapping'] = mapping + C.graph["mapping"] = mapping return C @@ -1135,8 +1148,8 @@ def _compat_shuffle(rng, input): @py_random_state(4) -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): """Greedy algorithm for finding a k-edge-augmentation @@ -1228,8 +1241,7 @@ def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): # Check for feasibility if not done: - raise nx.NetworkXUnfeasible( - 'not able to k-edge-connect with available edges') + raise nx.NetworkXUnfeasible("not able to k-edge-connect with available edges") # Randomized attempt to reduce the size of the solution _compat_shuffle(seed, aug_edges) diff --git a/networkx/algorithms/connectivity/edge_kcomponents.py b/networkx/algorithms/connectivity/edge_kcomponents.py index 64f321c2..b46c721d 100644 --- a/networkx/algorithms/connectivity/edge_kcomponents.py +++ b/networkx/algorithms/connectivity/edge_kcomponents.py @@ -16,14 +16,14 @@ from functools import partial import itertools as it __all__ = [ - 'k_edge_components', - 'k_edge_subgraphs', - 'bridge_components', - 'EdgeComponentAuxGraph', + "k_edge_components", + "k_edge_subgraphs", + "bridge_components", + "EdgeComponentAuxGraph", ] -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def k_edge_components(G, k): """Generates nodes in each maximal k-edge-connected component in G. @@ -88,7 +88,7 @@ def k_edge_components(G, k): """ # Compute k-edge-ccs using the most efficient algorithms available. if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") if G.is_directed(): if k == 1: return nx.strongly_connected_components(G) @@ -106,7 +106,7 @@ def k_edge_components(G, k): return aux_graph.k_edge_components(k) -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def k_edge_subgraphs(G, k): """Generates nodes in each maximal k-edge-connected subgraph in G. @@ -167,7 +167,7 @@ def k_edge_subgraphs(G, k): https://openproceedings.org/2012/conf/edbt/ZhouLYLCL12.pdf """ if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") if G.is_directed(): if k <= 1: # For directed graphs , @@ -193,8 +193,8 @@ def _k_edge_subgraphs_nodes(G, k): yield set(C.nodes()) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def bridge_components(G): """Finds all bridge-connected components G. @@ -332,7 +332,7 @@ class EdgeComponentAuxGraph: G : NetworkX graph """ # workaround for classmethod decorator - not_implemented_for('multigraph')(lambda G: G)(G) + not_implemented_for("multigraph")(lambda G: G)(G) def _recursive_build(H, A, source, avail): # Terminate once the flow has been compute to every node. @@ -398,11 +398,11 @@ class EdgeComponentAuxGraph: k-edge-ccs in the original graph. """ if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") A = self.A # "traverse the auxiliary graph A and delete all edges with weights less # than k" - aux_weights = nx.get_edge_attributes(A, 'weight') + aux_weights = nx.get_edge_attributes(A, "weight") # Create a relevant graph with the auxiliary edges with weights >= k R = nx.Graph() R.add_nodes_from(A.nodes()) @@ -433,12 +433,12 @@ class EdgeComponentAuxGraph: then use this method. """ if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") H = self.H A = self.A # "traverse the auxiliary graph A and delete all edges with weights less # than k" - aux_weights = nx.get_edge_attributes(A, 'weight') + aux_weights = nx.get_edge_attributes(A, "weight") # Create a relevant graph with the auxiliary edges with weights >= k R = nx.Graph() R.add_nodes_from(A.nodes()) @@ -549,7 +549,7 @@ def general_k_edge_subgraphs(G, k): [1, 1, 1, 4, 4] """ if k < 1: - raise ValueError('k cannot be less than 1') + raise ValueError("k cannot be less than 1") # Node pruning optimization (incorporates early return) # find_ccs is either connected_components/strongly_connected_components diff --git a/networkx/algorithms/connectivity/kcomponents.py b/networkx/algorithms/connectivity/kcomponents.py index 4f0eb85e..a9fe783c 100644 --- a/networkx/algorithms/connectivity/kcomponents.py +++ b/networkx/algorithms/connectivity/kcomponents.py @@ -7,14 +7,16 @@ from operator import itemgetter import networkx as nx from networkx.utils import not_implemented_for + # Define the default maximum flow function. from networkx.algorithms.flow import edmonds_karp + default_flow_func = edmonds_karp -__all__ = ['k_components'] +__all__ = ["k_components"] -@not_implemented_for('directed') +@not_implemented_for("directed") def k_components(G, flow_func=None): r"""Returns the k-component structure of a graph G. @@ -167,8 +169,9 @@ def _consolidate(sets, k): G = nx.Graph() nodes = {i: s for i, s in enumerate(sets)} G.add_nodes_from(nodes) - G.add_edges_from((u, v) for u, v in combinations(nodes, 2) - if len(nodes[u] & nodes[v]) >= k) + G.add_edges_from( + (u, v) for u, v in combinations(nodes, 2) if len(nodes[u] & nodes[v]) >= k + ) for component in nx.connected_components(G): yield set.union(*[nodes[n] for n in component]) @@ -179,9 +182,9 @@ def _generate_partition(G, cuts, k): if n in partition: return True return False + components = [] - nodes = ({n for n, d in G.degree() if d > k} - - {n for cut in cuts for n in cut}) + nodes = {n for n, d in G.degree() if d > k} - {n for cut in cuts for n in cut} H = G.subgraph(nodes) for cc in nx.connected_components(H): component = set(cc) diff --git a/networkx/algorithms/connectivity/kcutsets.py b/networkx/algorithms/connectivity/kcutsets.py index 54d60255..0d806b7e 100644 --- a/networkx/algorithms/connectivity/kcutsets.py +++ b/networkx/algorithms/connectivity/kcutsets.py @@ -13,10 +13,11 @@ from networkx.algorithms.flow import ( edmonds_karp, shortest_augmenting_path, ) + default_flow_func = edmonds_karp -__all__ = ['all_node_cuts'] +__all__ = ["all_node_cuts"] def all_node_cuts(G, k=None, flow_func=None): @@ -85,7 +86,7 @@ def all_node_cuts(G, k=None, flow_func=None): """ if not nx.is_connected(G): - raise nx.NetworkXError('Input graph is disconnected.') + raise nx.NetworkXError("Input graph is disconnected.") # Address some corner cases first. # For complete Graphs @@ -100,17 +101,17 @@ def all_node_cuts(G, k=None, flow_func=None): # for node connectivity. H = build_auxiliary_node_connectivity(G) H_nodes = H.nodes # for speed - mapping = H.graph['mapping'] + mapping = H.graph["mapping"] # Keep a copy of original predecessors, H will be modified later. # Shallow copy is enough. original_H_pred = copy.copy(H._pred) - R = build_residual_network(H, 'capacity') - kwargs = dict(capacity='capacity', residual=R) + R = build_residual_network(H, "capacity") + kwargs = dict(capacity="capacity", residual=R) # Define default flow function if flow_func is None: flow_func = default_flow_func if flow_func is shortest_augmenting_path: - kwargs['two_phase'] = True + kwargs["two_phase"] = True # Begin the actual algorithm # step 1: Find node connectivity k of G if k is None: @@ -130,28 +131,29 @@ def all_node_cuts(G, k=None, flow_func=None): for v in non_adjacent: # step 4: compute maximum flow in an Even-Tarjan reduction H of G # and step 5: build the associated residual network R - R = flow_func(H, f'{mapping[x]}B', f'{mapping[v]}A', **kwargs) - flow_value = R.graph['flow_value'] + R = flow_func(H, f"{mapping[x]}B", f"{mapping[v]}A", **kwargs) + flow_value = R.graph["flow_value"] if flow_value == k: # Find the nodes incident to the flow. - E1 = flowed_edges = [(u, w) for (u, w, d) in - R.edges(data=True) - if d['flow'] != 0] + E1 = flowed_edges = [ + (u, w) for (u, w, d) in R.edges(data=True) if d["flow"] != 0 + ] VE1 = incident_nodes = {n for edge in E1 for n in edge} # Remove saturated edges form the residual network. # Note that reversed edges are introduced with capacity 0 # in the residual graph and they need to be removed too. - saturated_edges = [(u, w, d) for (u, w, d) in - R.edges(data=True) - if d['capacity'] == d['flow'] - or d['capacity'] == 0] + saturated_edges = [ + (u, w, d) + for (u, w, d) in R.edges(data=True) + if d["capacity"] == d["flow"] or d["capacity"] == 0 + ] R.remove_edges_from(saturated_edges) R_closure = nx.transitive_closure(R) # step 6: shrink the strongly connected components of # residual flow network R and call it L. L = nx.condensation(R) - cmap = L.graph['mapping'] + cmap = L.graph["mapping"] inv_cmap = defaultdict(list) for n, scc in cmap.items(): inv_cmap[scc].append(n) @@ -177,19 +179,17 @@ def all_node_cuts(G, k=None, flow_func=None): for n in S: S_ancestors.update(R_closure._pred[n]) S.update(S_ancestors) - if f'{mapping[x]}B' not in S or f'{mapping[v]}A' in S: + if f"{mapping[x]}B" not in S or f"{mapping[v]}A" in S: continue # Find the cutset that links the node partition (S,~S) in H cutset = set() for u in S: - cutset.update((u, w) - for w in original_H_pred[u] if w not in S) + cutset.update((u, w) for w in original_H_pred[u] if w not in S) # The edges in H that form the cutset are internal edges # (ie edges that represent a node of the original graph G) - if any([H_nodes[u]['id'] != H_nodes[w]['id'] - for u, w in cutset]): + if any([H_nodes[u]["id"] != H_nodes[w]["id"] for u, w in cutset]): continue - node_cut = {H_nodes[u]['id'] for u, _ in cutset} + node_cut = {H_nodes[u]["id"] for u, _ in cutset} if len(node_cut) == k: # The cut is invalid if it includes internal edges of @@ -207,19 +207,13 @@ def all_node_cuts(G, k=None, flow_func=None): # Add edges to the auxiliary digraph. # See build_residual_network for convention we used # in residual graphs. - H.add_edge(f'{mapping[x]}B', f'{mapping[v]}A', - capacity=1) - H.add_edge(f'{mapping[v]}B', f'{mapping[x]}A', - capacity=1) + H.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1) + H.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1) # Add edges to the residual network. - R.add_edge(f'{mapping[x]}B', f'{mapping[v]}A', - capacity=1) - R.add_edge(f'{mapping[v]}A', f'{mapping[x]}B', - capacity=0) - R.add_edge(f'{mapping[v]}B', f'{mapping[x]}A', - capacity=1) - R.add_edge(f'{mapping[x]}A', f'{mapping[v]}B', - capacity=0) + R.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1) + R.add_edge(f"{mapping[v]}A", f"{mapping[x]}B", capacity=0) + R.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1) + R.add_edge(f"{mapping[x]}A", f"{mapping[v]}B", capacity=0) # Add again the saturated edges to reuse the residual network R.add_edges_from(saturated_edges) diff --git a/networkx/algorithms/connectivity/stoerwagner.py b/networkx/algorithms/connectivity/stoerwagner.py index 93afc543..912f97f6 100644 --- a/networkx/algorithms/connectivity/stoerwagner.py +++ b/networkx/algorithms/connectivity/stoerwagner.py @@ -8,12 +8,12 @@ from ...utils import BinaryHeap from ...utils import not_implemented_for from ...utils import arbitrary_element -__all__ = ['stoer_wagner'] +__all__ = ["stoer_wagner"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def stoer_wagner(G, weight='weight', heap=BinaryHeap): +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def stoer_wagner(G, weight="weight", heap=BinaryHeap): r"""Returns the weighted minimum edge cut using the Stoer-Wagner algorithm. Determine the minimum edge cut of a connected graph using the @@ -86,19 +86,20 @@ def stoer_wagner(G, weight='weight', heap=BinaryHeap): """ n = len(G) if n < 2: - raise nx.NetworkXError('graph has less than two nodes.') + raise nx.NetworkXError("graph has less than two nodes.") if not nx.is_connected(G): - raise nx.NetworkXError('graph is not connected.') + raise nx.NetworkXError("graph is not connected.") # Make a copy of the graph for internal use. - G = nx.Graph((u, v, {'weight': e.get(weight, 1)}) - for u, v, e in G.edges(data=True) if u != v) + G = nx.Graph( + (u, v, {"weight": e.get(weight, 1)}) for u, v, e in G.edges(data=True) if u != v + ) for u, v, e, in G.edges(data=True): - if e['weight'] < 0: - raise nx.NetworkXError('graph has a negative-weighted edge.') + if e["weight"] < 0: + raise nx.NetworkXError("graph has a negative-weighted edge.") - cut_value = float('inf') + cut_value = float("inf") nodes = set(G) contractions = [] # contracted node pairs @@ -112,14 +113,14 @@ def stoer_wagner(G, weight='weight', heap=BinaryHeap): # of edges connecting it to nodes in A. h = heap() # min-heap emulating a max-heap for v, e in G[u].items(): - h.insert(v, -e['weight']) + h.insert(v, -e["weight"]) # Repeat until all but one node has been added to A. for j in range(n - i - 2): u = h.pop()[0] A.add(u) for v, e, in G[u].items(): if v not in A: - h.insert(v, h.get(v, 0) - e['weight']) + h.insert(v, h.get(v, 0) - e["weight"]) # A and the remaining node v define a "cut of the phase". There is a # minimum cut of the original graph that is also a cut of the phase. # Due to contractions in earlier phases, v may in fact represent @@ -134,9 +135,9 @@ def stoer_wagner(G, weight='weight', heap=BinaryHeap): for w, e in G[v].items(): if w != u: if w not in G[u]: - G.add_edge(u, w, weight=e['weight']) + G.add_edge(u, w, weight=e["weight"]) else: - G[u][w]['weight'] += e['weight'] + G[u][w]["weight"] += e["weight"] G.remove_node(v) # Recover the optimal partitioning from the contractions. diff --git a/networkx/algorithms/connectivity/tests/test_connectivity.py b/networkx/algorithms/connectivity/tests/test_connectivity.py index a492c0ee..51642b69 100644 --- a/networkx/algorithms/connectivity/tests/test_connectivity.py +++ b/networkx/algorithms/connectivity/tests/test_connectivity.py @@ -71,9 +71,29 @@ def test_brandes_erlebach(): # Figure 1 chapter 7: Connectivity # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf G = nx.Graph() - G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4), - (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8), - (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)]) + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 6), + (3, 4), + (3, 6), + (4, 6), + (4, 7), + (5, 7), + (6, 8), + (6, 9), + (7, 8), + (7, 10), + (8, 11), + (9, 10), + (9, 11), + (10, 11), + ] + ) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) errmsg = f"Assertion failed in function: {flow_func.__name__}" @@ -127,11 +147,13 @@ def test_complete_graphs(): G = nx.complete_graph(n) errmsg = f"Assertion failed in function: {flow_func.__name__}" assert n - 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg - assert n - 1 == nx.node_connectivity(G.to_directed(), - flow_func=flow_func), errmsg + assert n - 1 == nx.node_connectivity( + G.to_directed(), flow_func=flow_func + ), errmsg assert n - 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg - assert n - 1 == nx.edge_connectivity(G.to_directed(), - flow_func=flow_func), errmsg + assert n - 1 == nx.edge_connectivity( + G.to_directed(), flow_func=flow_func + ), errmsg def test_empty_graphs(): @@ -186,29 +208,33 @@ def test_icosahedral(): def test_missing_source(): G = nx.path_graph(4) for flow_func in flow_funcs: - pytest.raises(nx.NetworkXError, nx.node_connectivity, G, 10, 1, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, nx.node_connectivity, G, 10, 1, flow_func=flow_func + ) def test_missing_target(): G = nx.path_graph(4) for flow_func in flow_funcs: - pytest.raises(nx.NetworkXError, nx.node_connectivity, G, 1, 10, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, nx.node_connectivity, G, 1, 10, flow_func=flow_func + ) def test_edge_missing_source(): G = nx.path_graph(4) for flow_func in flow_funcs: - pytest.raises(nx.NetworkXError, nx.edge_connectivity, G, 10, 1, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, nx.edge_connectivity, G, 10, 1, flow_func=flow_func + ) def test_edge_missing_target(): G = nx.path_graph(4) for flow_func in flow_funcs: - pytest.raises(nx.NetworkXError, nx.edge_connectivity, G, 1, 10, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, nx.edge_connectivity, G, 1, 10, flow_func=flow_func + ) def test_not_weakly_connected(): @@ -258,8 +284,7 @@ def test_cutoff(): def test_invalid_auxiliary(): G = nx.complete_graph(5) - pytest.raises(nx.NetworkXError, local_node_connectivity, G, 0, 3, - auxiliary=G) + pytest.raises(nx.NetworkXError, local_node_connectivity, G, 0, 3, auxiliary=G) def test_interface_only_source(): @@ -286,7 +311,6 @@ def test_edge_connectivity_flow_vs_stoer_wagner(): class TestAllPairsNodeConnectivity: - @classmethod def setup_class(cls): cls.path = nx.path_graph(7) @@ -298,9 +322,17 @@ class TestAllPairsNodeConnectivity: cls.K20 = nx.complete_graph(20) cls.K10 = nx.complete_graph(10) cls.K5 = nx.complete_graph(5) - cls.G_list = [cls.path, cls.directed_path, cls.cycle, - cls.directed_cycle, cls.gnp, cls.directed_gnp, - cls.K10, cls.K5, cls.K20] + cls.G_list = [ + cls.path, + cls.directed_path, + cls.cycle, + cls.directed_cycle, + cls.gnp, + cls.directed_gnp, + cls.K10, + cls.K5, + cls.K20, + ] def test_cycles(self): K_undir = nx.all_pairs_node_connectivity(self.cycle) @@ -351,8 +383,9 @@ class TestAllPairsNodeConnectivity: for u, v in itertools.combinations(nodes, 2): A[u][v] = A[v][u] = nx.node_connectivity(G, u, v) C = nx.all_pairs_node_connectivity(G) - assert (sorted((k, sorted(v)) for k, v in A.items()) == - sorted((k, sorted(v)) for k, v in C.items())) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) def test_all_pairs_connectivity_directed(self): G = nx.DiGraph() @@ -362,8 +395,9 @@ class TestAllPairsNodeConnectivity: for u, v in itertools.permutations(nodes, 2): A[u][v] = nx.node_connectivity(G, u, v) C = nx.all_pairs_node_connectivity(G) - assert (sorted((k, sorted(v)) for k, v in A.items()) == - sorted((k, sorted(v)) for k, v in C.items())) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) def test_all_pairs_connectivity_nbunch_combinations(self): G = nx.complete_graph(5) @@ -372,8 +406,9 @@ class TestAllPairsNodeConnectivity: for u, v in itertools.combinations(nbunch, 2): A[u][v] = A[v][u] = nx.node_connectivity(G, u, v) C = nx.all_pairs_node_connectivity(G, nbunch=nbunch) - assert (sorted((k, sorted(v)) for k, v in A.items()) == - sorted((k, sorted(v)) for k, v in C.items())) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) def test_all_pairs_connectivity_nbunch_iter(self): G = nx.complete_graph(5) @@ -382,5 +417,6 @@ class TestAllPairsNodeConnectivity: for u, v in itertools.combinations(nbunch, 2): A[u][v] = A[v][u] = nx.node_connectivity(G, u, v) C = nx.all_pairs_node_connectivity(G, nbunch=iter(nbunch)) - assert (sorted((k, sorted(v)) for k, v in A.items()) == - sorted((k, sorted(v)) for k, v in C.items())) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) diff --git a/networkx/algorithms/connectivity/tests/test_cuts.py b/networkx/algorithms/connectivity/tests/test_cuts.py index 257797a6..becad52b 100644 --- a/networkx/algorithms/connectivity/tests/test_cuts.py +++ b/networkx/algorithms/connectivity/tests/test_cuts.py @@ -47,9 +47,29 @@ def test_brandes_erlebach_book(): # Figure 1 chapter 7: Connectivity # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf G = nx.Graph() - G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4), - (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8), - (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)]) + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 6), + (3, 4), + (3, 6), + (4, 6), + (4, 7), + (5, 7), + (6, 8), + (6, 9), + (7, 8), + (7, 10), + (8, 11), + (9, 10), + (9, 11), + (10, 11), + ] + ) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) errmsg = f"Assertion failed in function: {flow_func.__name__}" @@ -200,10 +220,12 @@ def test_empty_graphs(): D = nx.DiGraph() for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]: for flow_func in flow_funcs: - pytest.raises(nx.NetworkXPointlessConcept, interface_func, G, - flow_func=flow_func) - pytest.raises(nx.NetworkXPointlessConcept, interface_func, D, - flow_func=flow_func) + pytest.raises( + nx.NetworkXPointlessConcept, interface_func, G, flow_func=flow_func + ) + pytest.raises( + nx.NetworkXPointlessConcept, interface_func, D, flow_func=flow_func + ) def test_unbounded(): @@ -216,16 +238,18 @@ def test_missing_source(): G = nx.path_graph(4) for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - pytest.raises(nx.NetworkXError, interface_func, G, 10, 1, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, interface_func, G, 10, 1, flow_func=flow_func + ) def test_missing_target(): G = nx.path_graph(4) for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - pytest.raises(nx.NetworkXError, interface_func, G, 1, 10, - flow_func=flow_func) + pytest.raises( + nx.NetworkXError, interface_func, G, 1, 10, flow_func=flow_func + ) def test_not_weakly_connected(): @@ -234,8 +258,7 @@ def test_not_weakly_connected(): nx.add_path(G, [4, 5]) for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - pytest.raises(nx.NetworkXError, interface_func, G, - flow_func=flow_func) + pytest.raises(nx.NetworkXError, interface_func, G, flow_func=flow_func) def test_not_connected(): @@ -244,8 +267,7 @@ def test_not_connected(): nx.add_path(G, [4, 5]) for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: for flow_func in flow_funcs: - pytest.raises(nx.NetworkXError, interface_func, G, - flow_func=flow_func) + pytest.raises(nx.NetworkXError, interface_func, G, flow_func=flow_func) def tests_min_cut_complete(): @@ -268,13 +290,12 @@ def tests_minimum_st_node_cut(): G.add_nodes_from([0, 1, 2, 3, 7, 8, 11, 12]) G.add_edges_from([(7, 11), (1, 11), (1, 12), (12, 8), (0, 1)]) nodelist = minimum_st_node_cut(G, 7, 11) - assert(nodelist == {}) + assert nodelist == {} def test_invalid_auxiliary(): G = nx.complete_graph(5) - pytest.raises(nx.NetworkXError, minimum_st_node_cut, G, 0, 3, - auxiliary=G) + pytest.raises(nx.NetworkXError, minimum_st_node_cut, G, 0, 3, auxiliary=G) def test_interface_only_source(): diff --git a/networkx/algorithms/connectivity/tests/test_disjoint_paths.py b/networkx/algorithms/connectivity/tests/test_disjoint_paths.py index f9bcfc85..74bb3f2d 100644 --- a/networkx/algorithms/connectivity/tests/test_disjoint_paths.py +++ b/networkx/algorithms/connectivity/tests/test_disjoint_paths.py @@ -46,22 +46,37 @@ def are_node_disjoint_paths(G, paths): def test_graph_from_pr_2053(): G = nx.Graph() - G.add_edges_from([ - ('A', 'B'), ('A', 'D'), ('A', 'F'), ('A', 'G'), - ('B', 'C'), ('B', 'D'), ('B', 'G'), ('C', 'D'), - ('C', 'E'), ('C', 'Z'), ('D', 'E'), ('D', 'F'), - ('E', 'F'), ('E', 'Z'), ('F', 'Z'), ('G', 'Z')]) + G.add_edges_from( + [ + ("A", "B"), + ("A", "D"), + ("A", "F"), + ("A", "G"), + ("B", "C"), + ("B", "D"), + ("B", "G"), + ("C", "D"), + ("C", "E"), + ("C", "Z"), + ("D", "E"), + ("D", "F"), + ("E", "F"), + ("E", "Z"), + ("F", "Z"), + ("G", "Z"), + ] + ) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge disjoint paths - edge_paths = list(nx.edge_disjoint_paths(G, 'A', 'Z', **kwargs)) + edge_paths = list(nx.edge_disjoint_paths(G, "A", "Z", **kwargs)) assert are_edge_disjoint_paths(G, edge_paths), errmsg - assert nx.edge_connectivity(G, 'A', 'Z') == len(edge_paths), errmsg + assert nx.edge_connectivity(G, "A", "Z") == len(edge_paths), errmsg # node disjoint paths - node_paths = list(nx.node_disjoint_paths(G, 'A', 'Z', **kwargs)) + node_paths = list(nx.node_disjoint_paths(G, "A", "Z", **kwargs)) assert are_node_disjoint_paths(G, node_paths), errmsg - assert nx.node_connectivity(G, 'A', 'Z') == len(node_paths), errmsg + assert nx.node_connectivity(G, "A", "Z") == len(node_paths), errmsg def test_florentine_families(): @@ -70,13 +85,13 @@ def test_florentine_families(): kwargs = dict(flow_func=flow_func) errmsg = f"Assertion failed in function: {flow_func.__name__}" # edge disjoint paths - edge_dpaths = list(nx.edge_disjoint_paths(G, 'Medici', 'Strozzi', **kwargs)) + edge_dpaths = list(nx.edge_disjoint_paths(G, "Medici", "Strozzi", **kwargs)) assert are_edge_disjoint_paths(G, edge_dpaths), errmsg - assert nx.edge_connectivity(G, 'Medici', 'Strozzi') == len(edge_dpaths), errmsg + assert nx.edge_connectivity(G, "Medici", "Strozzi") == len(edge_dpaths), errmsg # node disjoint paths - node_dpaths = list(nx.node_disjoint_paths(G, 'Medici', 'Strozzi', **kwargs)) + node_dpaths = list(nx.node_disjoint_paths(G, "Medici", "Strozzi", **kwargs)) assert are_node_disjoint_paths(G, node_dpaths), errmsg - assert nx.node_connectivity(G, 'Medici', 'Strozzi') == len(node_dpaths), errmsg + assert nx.node_connectivity(G, "Medici", "Strozzi") == len(node_dpaths), errmsg def test_karate(): @@ -145,7 +160,7 @@ def test_cutoff_disjoint_paths(): kwargs = dict(flow_func=flow_func) errmsg = f"Assertion failed in function: {flow_func.__name__}" for cutoff in [2, 4]: - kwargs['cutoff'] = cutoff + kwargs["cutoff"] = cutoff # edge disjoint paths edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs)) assert are_edge_disjoint_paths(G, edge_dpaths), errmsg diff --git a/networkx/algorithms/connectivity/tests/test_edge_augmentation.py b/networkx/algorithms/connectivity/tests/test_edge_augmentation.py index d33ffcf4..dfef3635 100644 --- a/networkx/algorithms/connectivity/tests/test_edge_augmentation.py +++ b/networkx/algorithms/connectivity/tests/test_edge_augmentation.py @@ -3,9 +3,7 @@ import networkx as nx import itertools as it from networkx.utils import pairwise import pytest -from networkx.algorithms.connectivity import ( - k_edge_augmentation, -) +from networkx.algorithms.connectivity import k_edge_augmentation from networkx.algorithms.connectivity.edge_augmentation import ( collapse, complement_edges, @@ -25,8 +23,13 @@ def tarjan_bridge_graph(): # Information Processing Letters, 1974 - Elsevier # doi:10.1016/0020-0190(74)90003-9. # define 2-connected components and bridges - ccs = [(1, 2, 4, 3, 1, 4), (5, 6, 7, 5), (8, 9, 10, 8), - (17, 18, 16, 15, 17), (11, 12, 14, 13, 11, 14)] + ccs = [ + (1, 2, 4, 3, 1, 4), + (5, 6, 7, 5), + (8, 9, 10, 8), + (17, 18, 16, 15, 17), + (11, 12, 14, 13, 11, 14), + ] bridges = [(4, 8), (3, 5), (3, 17)] G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges))) return G @@ -34,30 +37,24 @@ def tarjan_bridge_graph(): def test_weight_key(): G = nx.Graph() - G.add_nodes_from([ - 1, 2, 3, 4, 5, 6, 7, 8, 9]) + G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9]) G.add_edges_from([(3, 8), (1, 2), (2, 3)]) impossible = {(3, 6), (3, 9)} rng = random.Random(0) avail_uv = list(set(complement_edges(G)) - impossible) - avail = [(u, v, {'cost': rng.random()}) for u, v in avail_uv] + avail = [(u, v, {"cost": rng.random()}) for u, v in avail_uv] _augment_and_check(G, k=1) _augment_and_check(G, k=1, avail=avail_uv) - _augment_and_check(G, k=1, avail=avail, weight='cost') + _augment_and_check(G, k=1, avail=avail, weight="cost") - _check_augmentations(G, avail, weight='cost') + _check_augmentations(G, avail, weight="cost") def test_is_locally_k_edge_connected_exceptions(): - pytest.raises(nx.NetworkXNotImplemented, - is_k_edge_connected, - nx.DiGraph(), k=0) - pytest.raises(nx.NetworkXNotImplemented, - is_k_edge_connected, - nx.MultiGraph(), k=0) - pytest.raises(ValueError, is_k_edge_connected, - nx.Graph(), k=0) + pytest.raises(nx.NetworkXNotImplemented, is_k_edge_connected, nx.DiGraph(), k=0) + pytest.raises(nx.NetworkXNotImplemented, is_k_edge_connected, nx.MultiGraph(), k=0) + pytest.raises(ValueError, is_k_edge_connected, nx.Graph(), k=0) def test_is_k_edge_connected(): @@ -78,15 +75,18 @@ def test_is_k_edge_connected(): def test_is_k_edge_connected_exceptions(): - pytest.raises(nx.NetworkXNotImplemented, - is_locally_k_edge_connected, - nx.DiGraph(), 1, 2, k=0) - pytest.raises(nx.NetworkXNotImplemented, - is_locally_k_edge_connected, - nx.MultiGraph(), 1, 2, k=0) - pytest.raises(ValueError, - is_locally_k_edge_connected, - nx.Graph(), 1, 2, k=0) + pytest.raises( + nx.NetworkXNotImplemented, is_locally_k_edge_connected, nx.DiGraph(), 1, 2, k=0 + ) + pytest.raises( + nx.NetworkXNotImplemented, + is_locally_k_edge_connected, + nx.MultiGraph(), + 1, + 2, + k=0, + ) + pytest.raises(ValueError, is_locally_k_edge_connected, nx.Graph(), 1, 2, k=0) def test_is_locally_k_edge_connected(): @@ -137,14 +137,13 @@ def test_invalid_k(): def test_unfeasible(): G = tarjan_bridge_graph() - pytest.raises(nx.NetworkXUnfeasible, list, - k_edge_augmentation(G, k=1, avail=[])) + pytest.raises(nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=1, avail=[])) - pytest.raises(nx.NetworkXUnfeasible, list, - k_edge_augmentation(G, k=2, avail=[])) + pytest.raises(nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=2, avail=[])) - pytest.raises(nx.NetworkXUnfeasible, list, - k_edge_augmentation(G, k=2, avail=[(7, 9)])) + pytest.raises( + nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=2, avail=[(7, 9)]) + ) # partial solutions should not error if real solutions are infeasible aug_edges = list(k_edge_augmentation(G, k=2, avail=[(7, 9)], partial=True)) @@ -159,13 +158,23 @@ def test_tarjan(): G = tarjan_bridge_graph() aug_edges = set(_augment_and_check(G, k=2)[0]) - print(f'aug_edges = {aug_edges!r}') + print(f"aug_edges = {aug_edges!r}") # can't assert edge exactly equality due to non-determinant edge order # but we do know the size of the solution must be 3 assert len(aug_edges) == 3 - avail = [(9, 7), (8, 5), (2, 10), (6, 13), (11, 18), (1, 17), (2, 3), - (16, 17), (18, 14), (15, 14)] + avail = [ + (9, 7), + (8, 5), + (2, 10), + (6, 13), + (11, 18), + (1, 17), + (2, 3), + (16, 17), + (18, 14), + (15, 14), + ] aug_edges = set(_augment_and_check(G, avail=avail, k=2)[0]) # Can't assert exact length since approximation depends on the order of a @@ -233,28 +242,33 @@ def test_gnp_augmentation(): rng = random.Random(0) G = nx.gnp_random_graph(30, 0.005, seed=0) # Randomly make edges available - avail = {(u, v): 1 + rng.random() - for u, v in complement_edges(G) - if rng.random() < .25} + avail = { + (u, v): 1 + rng.random() for u, v in complement_edges(G) if rng.random() < 0.25 + } _check_augmentations(G, avail) def _assert_solution_properties(G, aug_edges, avail_dict=None): """ Checks that aug_edges are consistently formatted """ if avail_dict is not None: - assert all(e in avail_dict for e in aug_edges), 'when avail is specified aug-edges should be in avail' + assert all( + e in avail_dict for e in aug_edges + ), "when avail is specified aug-edges should be in avail" unique_aug = set(map(tuple, map(sorted, aug_edges))) unique_aug = list(map(tuple, map(sorted, aug_edges))) - assert len(aug_edges) == len(unique_aug), 'edges should be unique' + assert len(aug_edges) == len(unique_aug), "edges should be unique" - assert not any(u == v for u, v in unique_aug), 'should be no self-edges' + assert not any(u == v for u, v in unique_aug), "should be no self-edges" - assert not any(G.has_edge(u, v) for u, v in unique_aug), 'aug edges and G.edges should be disjoint' + assert not any( + G.has_edge(u, v) for u, v in unique_aug + ), "aug edges and G.edges should be disjoint" -def _augment_and_check(G, k, avail=None, weight=None, verbose=False, - orig_k=None, max_aug_k=None): +def _augment_and_check( + G, k, avail=None, weight=None, verbose=False, orig_k=None, max_aug_k=None +): """ Does one specific augmentation and checks for properties of the result """ @@ -267,28 +281,26 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, try: if avail is not None: # ensure avail is in dict form - avail_dict = dict(zip(*_unpack_available_edges(avail, - weight=weight))) + avail_dict = dict(zip(*_unpack_available_edges(avail, weight=weight))) else: avail_dict = None try: # Find the augmentation if possible - generator = nx.k_edge_augmentation(G, k=k, weight=weight, - avail=avail) - assert not isinstance(generator, list), 'should always return an iter' + generator = nx.k_edge_augmentation(G, k=k, weight=weight, avail=avail) + assert not isinstance(generator, list), "should always return an iter" aug_edges = [] for edge in generator: aug_edges.append(edge) except nx.NetworkXUnfeasible: infeasible = True - info['infeasible'] = True - assert len(aug_edges) == 0, 'should not generate anything if unfeasible' + info["infeasible"] = True + assert len(aug_edges) == 0, "should not generate anything if unfeasible" if avail is None: n_nodes = G.number_of_nodes() assert n_nodes <= k, ( - 'unconstrained cases are only unfeasible if |V| <= k. ' - f'Got |V|={n_nodes} and k={k}' + "unconstrained cases are only unfeasible if |V| <= k. " + f"Got |V|={n_nodes} and k={k}" ) else: if max_aug_k is None: @@ -300,18 +312,21 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, max_aug_k = 0 assert max_aug_k < k, ( - 'avail should only be unfeasible if using all edges ' - 'does not achieve k-edge-connectivity') + "avail should only be unfeasible if using all edges " + "does not achieve k-edge-connectivity" + ) # Test for a partial solution - partial_edges = list(nx.k_edge_augmentation( - G, k=k, weight=weight, partial=True, avail=avail)) + partial_edges = list( + nx.k_edge_augmentation(G, k=k, weight=weight, partial=True, avail=avail) + ) - info['n_partial_edges'] = len(partial_edges) + info["n_partial_edges"] = len(partial_edges) if avail_dict is None: - assert set(partial_edges) == set(complement_edges(G)), ( - 'unweighted partial solutions should be the complement') + assert set(partial_edges) == set( + complement_edges(G) + ), "unweighted partial solutions should be the complement" elif len(avail_dict) > 0: H = G.copy() @@ -324,7 +339,9 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, # Full connectivity should be no better than our partial # solution. - assert partial_conn == full_conn, 'adding more edges should not increase k-conn' + assert ( + partial_conn == full_conn + ), "adding more edges should not increase k-conn" # Find the new edge-connectivity after adding the augmenting edges aug_edges = partial_edges @@ -338,8 +355,8 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, else: total_weight = num_edges - info['total_weight'] = total_weight - info['num_edges'] = num_edges + info["total_weight"] = total_weight + info["num_edges"] = num_edges # Find the new edge-connectivity after adding the augmenting edges G_aug = G.copy() @@ -348,20 +365,18 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, aug_k = nx.edge_connectivity(G_aug) except nx.NetworkXPointlessConcept: aug_k = 0 - info['aug_k'] = aug_k + info["aug_k"] = aug_k # Do checks if not infeasible and orig_k < k: - assert info['aug_k'] >= k, ( - f'connectivity should increase to k={k} or more') + assert info["aug_k"] >= k, f"connectivity should increase to k={k} or more" - assert info['aug_k'] >= orig_k, ( - 'augmenting should never reduce connectivity') + assert info["aug_k"] >= orig_k, "augmenting should never reduce connectivity" _assert_solution_properties(G, aug_edges, avail_dict) except Exception: - info['failed'] = True + info["failed"] = True print(f"edges = {list(G.edges())}") print(f"nodes = {list(G.nodes())}") print(f"aug_edges = {list(aug_edges)}") @@ -369,15 +384,14 @@ def _augment_and_check(G, k, avail=None, weight=None, verbose=False, raise else: if verbose: - print(f'info = {info}') + print(f"info = {info}") if infeasible: aug_edges = None return aug_edges, info -def _check_augmentations(G, avail=None, max_k=None, weight=None, - verbose=False): +def _check_augmentations(G, avail=None, max_k=None, weight=None, verbose=False): """ Helper to check weighted/unweighted cases with multiple values of k """ # Using all available edges, find the maximum edge-connectivity try: @@ -402,7 +416,7 @@ def _check_augmentations(G, avail=None, max_k=None, weight=None, avail_uniform = {e: 1 for e in complement_edges(G)} if verbose: - print('\n=== CHECK_AUGMENTATION ===') + print("\n=== CHECK_AUGMENTATION ===") print(f"G.number_of_nodes = {G.number_of_nodes()!r}") print(f"G.number_of_edges = {G.number_of_edges()!r}") print(f"max_k = {max_k!r}") @@ -412,53 +426,61 @@ def _check_augmentations(G, avail=None, max_k=None, weight=None, # check augmentation for multiple values of k for k in range(1, max_k + 1): if verbose: - print('---------------') - print(f'Checking k = {k}') + print("---------------") + print(f"Checking k = {k}") # Check the unweighted version if verbose: - print('unweighted case') - aug_edges1, info1 = _augment_and_check( - G, k=k, verbose=verbose, orig_k=orig_k) + print("unweighted case") + aug_edges1, info1 = _augment_and_check(G, k=k, verbose=verbose, orig_k=orig_k) # Check that the weighted version with all available edges and uniform # weights gives a similar solution to the unweighted case. if verbose: - print('weighted uniform case') + print("weighted uniform case") aug_edges2, info2 = _augment_and_check( - G, k=k, avail=avail_uniform, verbose=verbose, + G, + k=k, + avail=avail_uniform, + verbose=verbose, orig_k=orig_k, - max_aug_k=G.number_of_nodes() - 1) + max_aug_k=G.number_of_nodes() - 1, + ) # Check the weighted version if avail is not None: if verbose: - print('weighted case') + print("weighted case") aug_edges3, info3 = _augment_and_check( - G, k=k, avail=avail, weight=weight, verbose=verbose, - max_aug_k=max_aug_k, orig_k=orig_k) + G, + k=k, + avail=avail, + weight=weight, + verbose=verbose, + max_aug_k=max_aug_k, + orig_k=orig_k, + ) if aug_edges1 is not None: # Check approximation ratios if k == 1: # when k=1, both solutions should be optimal - assert info2['total_weight'] == info1['total_weight'] + assert info2["total_weight"] == info1["total_weight"] if k == 2: # when k=2, the weighted version is an approximation if orig_k == 0: # the approximation ratio is 3 if G is not connected - assert (info2['total_weight'] <= - info1['total_weight'] * 3) + assert info2["total_weight"] <= info1["total_weight"] * 3 else: # the approximation ratio is 2 if G is was connected - assert (info2['total_weight'] <= - info1['total_weight'] * 2) + assert info2["total_weight"] <= info1["total_weight"] * 2 _check_unconstrained_bridge_property(G, info1) def _check_unconstrained_bridge_property(G, info1): # Check Theorem 5 from Eswaran and Tarjan. (1975) Augmentation problems import math + bridge_ccs = list(nx.connectivity.bridge_components(G)) # condense G into an forest C C = collapse(G, bridge_ccs) @@ -467,6 +489,7 @@ def _check_unconstrained_bridge_property(G, info1): q = len([n for n, d in C.degree() if d == 0]) # isolated if p + q > 1: size_target = int(math.ceil(p / 2.0)) + q - size_aug = info1['num_edges'] - assert size_aug == size_target, ( - 'augmentation size is different from what theory predicts') + size_aug = info1["num_edges"] + assert ( + size_aug == size_target + ), "augmentation size is different from what theory predicts" diff --git a/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py b/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py index a757dd04..0ff2b08e 100644 --- a/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py +++ b/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py @@ -6,15 +6,14 @@ from networkx.algorithms.connectivity import ( bridge_components, EdgeComponentAuxGraph, ) -from networkx.algorithms.connectivity.edge_kcomponents import ( - general_k_edge_subgraphs, -) +from networkx.algorithms.connectivity.edge_kcomponents import general_k_edge_subgraphs # ---------------- # Helper functions # ---------------- + def fset(list_of_sets): """ allows == to be used for list of sets """ return set(map(frozenset, list_of_sets)) @@ -98,30 +97,33 @@ def _check_edge_connectivity(G): _assert_subgraph_edge_connectivity(G, ccs_subgraph, k) if k == 1 or k == 2 and not G.is_directed(): - assert ccs_local == ccs_subgraph, 'Subgraphs and components should be the same when k == 1 or (k == 2 and not G.directed())' + assert ( + ccs_local == ccs_subgraph + ), "Subgraphs and components should be the same when k == 1 or (k == 2 and not G.directed())" if G.is_directed(): # Test special case methods are the same as the aux graph if k == 1: alt_sccs = fset(nx.strongly_connected_components(G)) - assert alt_sccs == ccs_local, 'k=1 failed alt' - assert alt_sccs == ccs_subgraph, 'k=1 failed alt' + assert alt_sccs == ccs_local, "k=1 failed alt" + assert alt_sccs == ccs_subgraph, "k=1 failed alt" else: # Test special case methods are the same as the aux graph if k == 1: alt_ccs = fset(nx.connected_components(G)) - assert alt_ccs == ccs_local, 'k=1 failed alt' - assert alt_ccs == ccs_subgraph, 'k=1 failed alt' + assert alt_ccs == ccs_local, "k=1 failed alt" + assert alt_ccs == ccs_subgraph, "k=1 failed alt" elif k == 2: alt_bridge_ccs = fset(bridge_components(G)) - assert alt_bridge_ccs == ccs_local, 'k=2 failed alt' - assert alt_bridge_ccs == ccs_subgraph, 'k=2 failed alt' + assert alt_bridge_ccs == ccs_local, "k=2 failed alt" + assert alt_bridge_ccs == ccs_subgraph, "k=2 failed alt" # if new methods for k == 3 or k == 4 are implemented add them here # Check the general subgraph method works by itself - alt_subgraph_ccs = fset([set(C.nodes()) for C in - general_k_edge_subgraphs(G, k=k)]) - assert alt_subgraph_ccs == ccs_subgraph, 'alt subgraph method failed' + alt_subgraph_ccs = fset( + [set(C.nodes()) for C in general_k_edge_subgraphs(G, k=k)] + ) + assert alt_subgraph_ccs == ccs_subgraph, "alt subgraph method failed" # Stop once k is larger than all special case methods # and we cannot break down ccs any further. @@ -133,6 +135,7 @@ def _check_edge_connectivity(G): # Misc tests # ---------------- + def test_zero_k_exception(): G = nx.Graph() # functions that return generators error immediately @@ -186,6 +189,7 @@ def test_general_k_edge_subgraph_quick_return(): # Undirected tests # ---------------- + def test_random_gnp(): # seeds = [1550709854, 1309423156, 4208992358, 2785630813, 1915069929] seeds = [12, 13] @@ -225,8 +229,13 @@ def test_tarjan_bridge(): # Information Processing Letters, 1974 - Elsevier # doi:10.1016/0020-0190(74)90003-9. # define 2-connected components and bridges - ccs = [(1, 2, 4, 3, 1, 4), (5, 6, 7, 5), (8, 9, 10, 8), - (17, 18, 16, 15, 17), (11, 12, 14, 13, 11, 14)] + ccs = [ + (1, 2, 4, 3, 1, 4), + (5, 6, 7, 5), + (8, 9, 10, 8), + (17, 18, 16, 15, 17), + (11, 12, 14, 13, 11, 14), + ] bridges = [(4, 8), (3, 5), (3, 17)] G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges))) _check_edge_connectivity(G) @@ -238,10 +247,9 @@ def test_bridge_cc(): bridges = [(4, 8), (3, 5), (20, 21), (22, 23, 24)] G = nx.Graph(it.chain(*(pairwise(path) for path in cc2 + bridges))) bridge_ccs = fset(bridge_components(G)) - target_ccs = fset([ - {1, 2, 3, 4}, {5}, {8, 9, 10}, {11, 12, 13}, {20}, - {21}, {22}, {23}, {24} - ]) + target_ccs = fset( + [{1, 2, 3, 4}, {5}, {8, 9, 10}, {11, 12, 13}, {20}, {21}, {22}, {23}, {24}] + ) assert bridge_ccs == target_ccs _check_edge_connectivity(G) @@ -249,14 +257,14 @@ def test_bridge_cc(): def test_undirected_aux_graph(): # Graph similar to the one in # http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264 - a, b, c, d, e, f, g, h, i = 'abcdefghi' + a, b, c, d, e, f, g, h, i = "abcdefghi" paths = [ (a, d, b, f, c), (a, e, b), (a, e, b, c, g, b, a), (c, b), (f, g, f), - (h, i) + (h, i), ] G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) aux_graph = EdgeComponentAuxGraph.construct(G) @@ -303,15 +311,15 @@ def test_local_subgraph_difference(): # Each clique is returned separately in k-edge-subgraphs subgraph_ccs = fset(aux_graph.k_edge_subgraphs(3)) - subgraph_target = fset([{101}, {102}, {103}, {104}, - {21, 22, 23, 24}, {11, 12, 13, 14}]) + subgraph_target = fset( + [{101}, {102}, {103}, {104}, {21, 22, 23, 24}, {11, 12, 13, 14}] + ) assert subgraph_ccs == subgraph_target # But in k-edge-ccs they are returned together # because they are locally 3-edge-connected local_ccs = fset(aux_graph.k_edge_components(3)) - local_target = fset([{101}, {102}, {103}, {104}, - {11, 12, 13, 14, 21, 22, 23, 24}]) + local_target = fset([{101}, {102}, {103}, {104}, {11, 12, 13, 14, 21, 22, 23, 24}]) assert local_ccs == local_target @@ -322,20 +330,14 @@ def test_local_subgraph_difference_directed(): ] G = nx.DiGraph(it.chain(*[pairwise(path) for path in dipaths])) - assert ( - fset(nx.k_edge_components(G, k=1)) == - fset(nx.k_edge_subgraphs(G, k=1))) + assert fset(nx.k_edge_components(G, k=1)) == fset(nx.k_edge_subgraphs(G, k=1)) # Unlike undirected graphs, when k=2, for directed graphs there is a case # where the k-edge-ccs are not the same as the k-edge-subgraphs. # (in directed graphs ccs and subgraphs are the same when k=2) - assert ( - fset(nx.k_edge_components(G, k=2)) != - fset(nx.k_edge_subgraphs(G, k=2))) + assert fset(nx.k_edge_components(G, k=2)) != fset(nx.k_edge_subgraphs(G, k=2)) - assert ( - fset(nx.k_edge_components(G, k=3)) == - fset(nx.k_edge_subgraphs(G, k=3))) + assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3)) _check_edge_connectivity(G) @@ -349,17 +351,11 @@ def test_triangles(): G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) # subgraph and ccs are the same in all cases here - assert ( - fset(nx.k_edge_components(G, k=1)) == - fset(nx.k_edge_subgraphs(G, k=1))) + assert fset(nx.k_edge_components(G, k=1)) == fset(nx.k_edge_subgraphs(G, k=1)) - assert ( - fset(nx.k_edge_components(G, k=2)) == - fset(nx.k_edge_subgraphs(G, k=2))) + assert fset(nx.k_edge_components(G, k=2)) == fset(nx.k_edge_subgraphs(G, k=2)) - assert ( - fset(nx.k_edge_components(G, k=3)) == - fset(nx.k_edge_subgraphs(G, k=3))) + assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3)) _check_edge_connectivity(G) @@ -403,31 +399,26 @@ def test_five_clique(): G = nx.disjoint_union(nx.complete_graph(5), nx.complete_graph(5)) paths = [ # add aux-connections - (1, 100, 6), (2, 100, 7), (3, 200, 8), (4, 200, 100), + (1, 100, 6), + (2, 100, 7), + (3, 200, 8), + (4, 200, 100), ] G.add_edges_from(it.chain(*[pairwise(path) for path in paths])) assert min(dict(nx.degree(G)).values()) == 4 # For k=3 they are the same - assert ( - fset(nx.k_edge_components(G, k=3)) == - fset(nx.k_edge_subgraphs(G, k=3))) + assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3)) # For k=4 they are the different # the aux nodes are in the same CC as clique 1 but no the same subgraph - assert ( - fset(nx.k_edge_components(G, k=4)) != - fset(nx.k_edge_subgraphs(G, k=4))) + assert fset(nx.k_edge_components(G, k=4)) != fset(nx.k_edge_subgraphs(G, k=4)) # For k=5 they are not the same - assert ( - fset(nx.k_edge_components(G, k=5)) != - fset(nx.k_edge_subgraphs(G, k=5))) + assert fset(nx.k_edge_components(G, k=5)) != fset(nx.k_edge_subgraphs(G, k=5)) # For k=6 they are the same - assert ( - fset(nx.k_edge_components(G, k=6)) == - fset(nx.k_edge_subgraphs(G, k=6))) + assert fset(nx.k_edge_components(G, k=6)) == fset(nx.k_edge_subgraphs(G, k=6)) _check_edge_connectivity(G) @@ -435,17 +426,18 @@ def test_five_clique(): # Undirected tests # ---------------- + def test_directed_aux_graph(): # Graph similar to the one in # http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264 - a, b, c, d, e, f, g, h, i = 'abcdefghi' + a, b, c, d, e, f, g, h, i = "abcdefghi" dipaths = [ (a, d, b, f, c), (a, e, b), (a, e, b, c, g, b, a), (c, b), (f, g, f), - (h, i) + (h, i), ] G = nx.DiGraph(it.chain(*[pairwise(path) for path in dipaths])) aux_graph = EdgeComponentAuxGraph.construct(G) diff --git a/networkx/algorithms/connectivity/tests/test_stoer_wagner.py b/networkx/algorithms/connectivity/tests/test_stoer_wagner.py index 3ae74499..68360817 100644 --- a/networkx/algorithms/connectivity/tests/test_stoer_wagner.py +++ b/networkx/algorithms/connectivity/tests/test_stoer_wagner.py @@ -20,40 +20,38 @@ def _check_partition(G, cut_value, partition, weight): assert w == cut_value -def _test_stoer_wagner(G, answer, weight='weight'): - cut_value, partition = nx.stoer_wagner(G, weight, - heap=nx.utils.PairingHeap) +def _test_stoer_wagner(G, answer, weight="weight"): + cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.PairingHeap) assert cut_value == answer _check_partition(G, cut_value, partition, weight) - cut_value, partition = nx.stoer_wagner(G, weight, - heap=nx.utils.BinaryHeap) + cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.BinaryHeap) assert cut_value == answer _check_partition(G, cut_value, partition, weight) def test_graph1(): G = nx.Graph() - G.add_edge('x', 'a', weight=3) - G.add_edge('x', 'b', weight=1) - G.add_edge('a', 'c', weight=3) - G.add_edge('b', 'c', weight=5) - G.add_edge('b', 'd', weight=4) - G.add_edge('d', 'e', weight=2) - G.add_edge('c', 'y', weight=2) - G.add_edge('e', 'y', weight=3) + G.add_edge("x", "a", weight=3) + G.add_edge("x", "b", weight=1) + G.add_edge("a", "c", weight=3) + G.add_edge("b", "c", weight=5) + G.add_edge("b", "d", weight=4) + G.add_edge("d", "e", weight=2) + G.add_edge("c", "y", weight=2) + G.add_edge("e", "y", weight=3) _test_stoer_wagner(G, 4) def test_graph2(): G = nx.Graph() - G.add_edge('x', 'a') - G.add_edge('x', 'b') - G.add_edge('a', 'c') - G.add_edge('b', 'c') - G.add_edge('b', 'd') - G.add_edge('d', 'e') - G.add_edge('c', 'y') - G.add_edge('e', 'y') + G.add_edge("x", "a") + G.add_edge("x", "b") + G.add_edge("a", "c") + G.add_edge("b", "c") + G.add_edge("b", "d") + G.add_edge("d", "e") + G.add_edge("c", "y") + G.add_edge("e", "y") _test_stoer_wagner(G, 2) @@ -82,7 +80,7 @@ def test_weight_name(): G.add_edge(1, 2, weight=1, cost=8) G.add_edge(1, 3, cost=2) G.add_edge(2, 3, cost=4) - _test_stoer_wagner(G, 6, weight='cost') + _test_stoer_wagner(G, 6, weight="cost") def test_exceptions(): diff --git a/networkx/algorithms/core.py b/networkx/algorithms/core.py index 94ce6300..f95ba792 100644 --- a/networkx/algorithms/core.py +++ b/networkx/algorithms/core.py @@ -32,11 +32,19 @@ import networkx as nx from networkx.exception import NetworkXError from networkx.utils import not_implemented_for -__all__ = ['core_number', 'find_cores', 'k_core', 'k_shell', - 'k_crust', 'k_corona', 'k_truss', 'onion_layers'] - - -@not_implemented_for('multigraph') +__all__ = [ + "core_number", + "find_cores", + "k_core", + "k_shell", + "k_crust", + "k_corona", + "k_truss", + "onion_layers", +] + + +@not_implemented_for("multigraph") def core_number(G): """Returns the core number for each vertex. @@ -75,8 +83,10 @@ def core_number(G): https://arxiv.org/abs/cs.DS/0310049 """ if nx.number_of_selfloops(G) > 0: - msg = ('Input graph has self loops which is not permitted; ' - 'Consider using G.remove_edges_from(nx.selfloop_edges(G)).') + msg = ( + "Input graph has self loops which is not permitted; " + "Consider using G.remove_edges_from(nx.selfloop_edges(G))." + ) raise NetworkXError(msg) degrees = dict(G.degree()) # Sort nodes by degree. @@ -180,8 +190,10 @@ def k_core(G, k=None, core_number=None): Vladimir Batagelj and Matjaz Zaversnik, 2003. https://arxiv.org/abs/cs.DS/0310049 """ + def k_filter(v, k, c): return c[v] >= k + return _core_subgraph(G, k_filter, k, core_number) @@ -237,8 +249,10 @@ def k_shell(G, k=None, core_number=None): and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154 http://www.pnas.org/content/104/27/11150.full """ + def k_filter(v, k, c): return c[v] == k + return _core_subgraph(G, k_filter, k, core_number) @@ -347,13 +361,15 @@ def k_corona(G, k, core_number=None): Phys. Rev. E 73, 056101 (2006) http://link.aps.org/doi/10.1103/PhysRevE.73.056101 """ + def func(v, k, c): return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k) + return _core_subgraph(G, func, k, core_number) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def k_truss(G, k): """Returns the k-truss of `G`. @@ -412,7 +428,7 @@ def k_truss(G, k): seen.add(u) new_nbrs = [v for v in nbrs_u if v not in seen] for v in new_nbrs: - if (len(nbrs_u & set(H[v])) < (k - 2)): + if len(nbrs_u & set(H[v])) < (k - 2): to_drop.append((u, v)) H.remove_edges_from(to_drop) n_dropped = len(to_drop) @@ -421,8 +437,8 @@ def k_truss(G, k): return H -@not_implemented_for('multigraph') -@not_implemented_for('directed') +@not_implemented_for("multigraph") +@not_implemented_for("directed") def onion_layers(G): """Returns the layer of each vertex in an onion decomposition of the graph. @@ -470,8 +486,10 @@ def onion_layers(G): http://doi.org/10.1103/PhysRevX.9.011023 """ if nx.number_of_selfloops(G) > 0: - msg = ('Input graph contains self loops which is not permitted; ' - 'Consider using G.remove_edges_from(nx.selfloop_edges(G)).') + msg = ( + "Input graph contains self loops which is not permitted; " + "Consider using G.remove_edges_from(nx.selfloop_edges(G))." + ) raise NetworkXError(msg) # Dictionaries to register the k-core/onion decompositions. od_layers = {} diff --git a/networkx/algorithms/covering.py b/networkx/algorithms/covering.py index ed65cf3b..17404427 100644 --- a/networkx/algorithms/covering.py +++ b/networkx/algorithms/covering.py @@ -6,11 +6,11 @@ from functools import partial from itertools import chain -__all__ = ['min_edge_cover', 'is_edge_cover'] +__all__ = ["min_edge_cover", "is_edge_cover"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def min_edge_cover(G, matching_algorithm=None): """Returns a set of edges which constitutes the minimum edge cover of the graph. @@ -59,15 +59,16 @@ def min_edge_cover(G, matching_algorithm=None): if nx.number_of_isolates(G) > 0: # ``min_cover`` does not exist as there is an isolated node raise nx.NetworkXException( - "Graph has a node with no edge incident on it, " - "so no edge cover exists.") + "Graph has a node with no edge incident on it, " "so no edge cover exists." + ) if matching_algorithm is None: - matching_algorithm = partial(nx.max_weight_matching, - maxcardinality=True) + matching_algorithm = partial(nx.max_weight_matching, maxcardinality=True) maximum_matching = matching_algorithm(G) # ``min_cover`` is superset of ``maximum_matching`` try: - min_cover = set(maximum_matching.items()) # bipartite matching case returns dict + min_cover = set( + maximum_matching.items() + ) # bipartite matching case returns dict except AttributeError: min_cover = maximum_matching # iterate for uncovered nodes @@ -85,7 +86,7 @@ def min_edge_cover(G, matching_algorithm=None): return min_cover -@not_implemented_for('directed') +@not_implemented_for("directed") def is_edge_cover(G, cover): """Decides whether a set of edges is a valid edge cover of the graph. diff --git a/networkx/algorithms/cuts.py b/networkx/algorithms/cuts.py index 9fd3d02f..379c47d1 100644 --- a/networkx/algorithms/cuts.py +++ b/networkx/algorithms/cuts.py @@ -6,13 +6,21 @@ from itertools import chain import networkx as nx -__all__ = ['boundary_expansion', 'conductance', 'cut_size', 'edge_expansion', - 'mixing_expansion', 'node_expansion', 'normalized_cut_size', - 'volume'] +__all__ = [ + "boundary_expansion", + "conductance", + "cut_size", + "edge_expansion", + "mixing_expansion", + "node_expansion", + "normalized_cut_size", + "volume", +] # TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION! + def cut_size(G, S, T=None, weight=None): """Returns the size of the cut between two sets of nodes. diff --git a/networkx/algorithms/cycles.py b/networkx/algorithms/cycles.py index 87eb453a..1701995b 100644 --- a/networkx/algorithms/cycles.py +++ b/networkx/algorithms/cycles.py @@ -10,14 +10,16 @@ import networkx as nx from networkx.utils import not_implemented_for, pairwise __all__ = [ - 'cycle_basis', 'simple_cycles', - 'recursive_simple_cycles', 'find_cycle', - 'minimum_cycle_basis', + "cycle_basis", + "simple_cycles", + "recursive_simple_cycles", + "find_cycle", + "minimum_cycle_basis", ] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def cycle_basis(G, root=None): """ Returns a list of cycles which form a basis for cycles of G. @@ -72,11 +74,11 @@ def cycle_basis(G, root=None): z = stack.pop() # use last-in so cycles easier to find zused = used[z] for nbr in G[z]: - if nbr not in used: # new node + if nbr not in used: # new node pred[nbr] = z stack.append(nbr) used[nbr] = {z} - elif nbr == z: # self loops + elif nbr == z: # self loops cycles.append([z]) elif nbr not in zused: # found a cycle pn = used[nbr] @@ -93,7 +95,7 @@ def cycle_basis(G, root=None): return cycles -@not_implemented_for('undirected') +@not_implemented_for("undirected") def simple_cycles(G): """Find simple cycles (elementary circuits) of a directed graph. @@ -154,6 +156,7 @@ def simple_cycles(G): -------- cycle_basis """ + def _unblock(thisnode, blocked, B): stack = {thisnode} while stack: @@ -169,8 +172,7 @@ def simple_cycles(G): # Also we save the actual graph so we can mutate it. We only take the # edges because we do not want to copy edge and node attributes here. subG = type(G)(G.edges()) - sccs = [scc for scc in nx.strongly_connected_components(subG) - if len(scc) > 1] + sccs = [scc for scc in nx.strongly_connected_components(subG) if len(scc) > 1] # Johnson's algorithm exclude self cycle edges like (v, v) # To be backward compatible, we record those cycles in advance @@ -188,7 +190,7 @@ def simple_cycles(G): # Processing node runs "circuit" routine from recursive version path = [startnode] blocked = set() # vertex: blocked from search? - closed = set() # nodes involved in a cycle + closed = set() # nodes involved in a cycle blocked.add(startnode) B = defaultdict(set) # graph portions that yield no elementary circuit stack = [(startnode, list(sccG[startnode]))] # sccG gives comp nbrs @@ -199,7 +201,7 @@ def simple_cycles(G): if nextnode == startnode: yield path[:] closed.update(path) -# print "Found a cycle", path, closed + # print "Found a cycle", path, closed elif nextnode not in blocked: path.append(nextnode) stack.append((nextnode, list(sccG[nextnode]))) @@ -215,15 +217,14 @@ def simple_cycles(G): if thisnode not in B[nbr]: B[nbr].add(thisnode) stack.pop() -# assert path[-1] == thisnode + # assert path[-1] == thisnode path.pop() # done processing this node H = subG.subgraph(scc) # make smaller to avoid work in SCC routine - sccs.extend(scc for scc in nx.strongly_connected_components(H) - if len(scc) > 1) + sccs.extend(scc for scc in nx.strongly_connected_components(H) if len(scc) > 1) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def recursive_simple_cycles(G): """Find simple cycles (elementary circuits) of a directed graph. @@ -301,10 +302,10 @@ def recursive_simple_cycles(G): path.pop() # remove thisnode from path return closed - path = [] # stack of nodes in current path + path = [] # stack of nodes in current path blocked = defaultdict(bool) # vertex: blocked from search? B = defaultdict(list) # graph portions that yield no elementary circuit - result = [] # list to accumulate the circuits found + result = [] # list to accumulate the circuits found # Johnson's algorithm exclude self cycle edges like (v, v) # To be backward compatible, we record those cycles in advance @@ -319,8 +320,7 @@ def recursive_simple_cycles(G): ordering = dict(zip(G, range(len(G)))) for s in ordering: # Build the subgraph induced by s and following nodes in the ordering - subgraph = G.subgraph(node for node in G - if ordering[node] >= ordering[s]) + subgraph = G.subgraph(node for node in G if ordering[node] >= ordering[s]) # Find the strongly connected component in the subgraph # that contains the least node according to the ordering strongcomp = nx.strongly_connected_components(subgraph) @@ -405,15 +405,20 @@ def find_cycle(G, source=None, orientation=None): -------- simple_cycles """ - if not G.is_directed() or orientation in (None, 'original'): + if not G.is_directed() or orientation in (None, "original"): + def tailhead(edge): return edge[:2] - elif orientation == 'reverse': + + elif orientation == "reverse": + def tailhead(edge): return edge[1], edge[0] - elif orientation == 'ignore': + + elif orientation == "ignore": + def tailhead(edge): - if edge[-1] == 'reverse': + if edge[-1] == "reverse": return edge[1], edge[0] return edge[:2] @@ -478,8 +483,8 @@ def find_cycle(G, source=None, orientation=None): explored.update(seen) else: - assert(len(cycle) == 0) - raise nx.exception.NetworkXNoCycle('No cycle found.') + assert len(cycle) == 0 + raise nx.exception.NetworkXNoCycle("No cycle found.") # We now have a list of edges which ends on a cycle. # So we need to remove from the beginning edges that are not relevant. @@ -492,8 +497,8 @@ def find_cycle(G, source=None, orientation=None): return cycle[i:] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def minimum_cycle_basis(G, weight=None): """ Returns a minimum weight cycle basis for G @@ -532,8 +537,10 @@ def minimum_cycle_basis(G, weight=None): simple_cycles, cycle_basis """ # We first split the graph in commected subgraphs - return sum((_min_cycle_basis(G.subgraph(c), weight) for c in - nx.connected_components(G)), []) + return sum( + (_min_cycle_basis(G.subgraph(c), weight) for c in nx.connected_components(G)), + [], + ) def _min_cycle_basis(comp, weight): @@ -541,10 +548,8 @@ def _min_cycle_basis(comp, weight): # We extract the edges not in a spanning tree. We do not really need a # *minimum* spanning tree. That is why we call the next function with # weight=None. Depending on implementation, it may be faster as well - spanning_tree_edges = list(nx.minimum_spanning_edges(comp, weight=None, - data=False)) - edges_excl = [frozenset(e) for e in comp.edges() - if e not in spanning_tree_edges] + spanning_tree_edges = list(nx.minimum_spanning_edges(comp, weight=None, data=False)) + edges_excl = [frozenset(e) for e in comp.edges() if e not in spanning_tree_edges] N = len(edges_excl) # We maintain a set of vectors orthogonal to sofar found cycles @@ -556,8 +561,10 @@ def _min_cycle_basis(comp, weight): # now update set_orth so that k+1,k+2... th elements are # orthogonal to the newly found cycle, as per [p. 336, 1] base = set_orth[k] - set_orth[k + 1:] = [orth ^ base if len(orth & new_cycle) % 2 else orth - for orth in set_orth[k + 1:]] + set_orth[k + 1 :] = [ + orth ^ base if len(orth & new_cycle) % 2 else orth + for orth in set_orth[k + 1 :] + ] return cb @@ -580,23 +587,25 @@ def _min_cycle(G, orth, weight=None): edge_w = data.get(weight, 1) if frozenset((u, v)) in orth: T.add_edges_from( - [(uidx, nnodes + vidx), (nnodes + uidx, vidx)], weight=edge_w) + [(uidx, nnodes + vidx), (nnodes + uidx, vidx)], weight=edge_w + ) else: T.add_edges_from( - [(uidx, vidx), (nnodes + uidx, nnodes + vidx)], weight=edge_w) + [(uidx, vidx), (nnodes + uidx, nnodes + vidx)], weight=edge_w + ) all_shortest_pathlens = dict(nx.shortest_path_length(T, weight=weight)) - cross_paths_w_lens = {n: all_shortest_pathlens[n][nnodes + n] - for n in range(nnodes)} + cross_paths_w_lens = { + n: all_shortest_pathlens[n][nnodes + n] for n in range(nnodes) + } # Now compute shortest paths in T, which translates to cyles in G start = min(cross_paths_w_lens, key=cross_paths_w_lens.get) end = nnodes + start - min_path = nx.shortest_path(T, source=start, target=end, weight='weight') + min_path = nx.shortest_path(T, source=start, target=end, weight="weight") # Now we obtain the actual path, re-map nodes in T to those in G - min_path_nodes = [node if node < nnodes else node - nnodes - for node in min_path] + min_path_nodes = [node if node < nnodes else node - nnodes for node in min_path] # Now remove the edges that occur two times mcycle_pruned = _path_to_cycle(min_path_nodes) diff --git a/networkx/algorithms/d_separation.py b/networkx/algorithms/d_separation.py index 30d76098..d7e71735 100644 --- a/networkx/algorithms/d_separation.py +++ b/networkx/algorithms/d_separation.py @@ -51,8 +51,7 @@ __all__ = ["d_separated"] @not_implemented_for("undirected") -def d_separated(G: nx.DiGraph, x: AbstractSet, y: AbstractSet, - z: AbstractSet) -> bool: +def d_separated(G: nx.DiGraph, x: AbstractSet, y: AbstractSet, z: AbstractSet) -> bool: """ Return whether node sets ``x`` and ``y`` are d-separated by ``z``. @@ -94,8 +93,7 @@ def d_separated(G: nx.DiGraph, x: AbstractSet, y: AbstractSet, union_xyz = x.union(y).union(z) if any(n not in G.nodes for n in union_xyz): - raise nx.NodeNotFound( - "one or more specified nodes not found in the graph") + raise nx.NodeNotFound("one or more specified nodes not found in the graph") G_copy = G.copy() diff --git a/networkx/algorithms/dag.py b/networkx/algorithms/dag.py index 59aa35bf..4c3d809b 100644 --- a/networkx/algorithms/dag.py +++ b/networkx/algorithms/dag.py @@ -14,28 +14,29 @@ from itertools import starmap import heapq import networkx as nx -from networkx.algorithms.traversal.breadth_first_search import \ - descendants_at_distance +from networkx.algorithms.traversal.breadth_first_search import descendants_at_distance from networkx.generators.trees import NIL from networkx.utils import arbitrary_element from networkx.utils import consume from networkx.utils import pairwise from networkx.utils import not_implemented_for -__all__ = ['descendants', - 'ancestors', - 'topological_sort', - 'lexicographical_topological_sort', - 'all_topological_sorts', - 'is_directed_acyclic_graph', - 'is_aperiodic', - 'transitive_closure', - 'transitive_closure_dag', - 'transitive_reduction', - 'antichains', - 'dag_longest_path', - 'dag_longest_path_length', - 'dag_to_branching'] +__all__ = [ + "descendants", + "ancestors", + "topological_sort", + "lexicographical_topological_sort", + "all_topological_sorts", + "is_directed_acyclic_graph", + "is_aperiodic", + "transitive_closure", + "transitive_closure_dag", + "transitive_reduction", + "antichains", + "dag_longest_path", + "dag_longest_path_length", + "dag_to_branching", +] chaini = chain.from_iterable @@ -170,8 +171,7 @@ def topological_sort(G): *Introduction to Algorithms - A Creative Approach.* Addison-Wesley. """ if not G.is_directed(): - raise nx.NetworkXError( - "Topological sort not defined on undirected graphs.") + raise nx.NetworkXError("Topological sort not defined on undirected graphs.") indegree_map = {v: d for v, d in G.in_degree() if d > 0} # These nodes have zero indegree and ready to be returned. @@ -193,8 +193,9 @@ def topological_sort(G): yield node if indegree_map: - raise nx.NetworkXUnfeasible("Graph contains a cycle or graph changed " - "during iteration") + raise nx.NetworkXUnfeasible( + "Graph contains a cycle or graph changed " "during iteration" + ) def lexicographical_topological_sort(G, key=None): @@ -252,6 +253,7 @@ def lexicographical_topological_sort(G, key=None): raise nx.NetworkXError(msg) if key is None: + def key(node): return node @@ -286,7 +288,7 @@ def lexicographical_topological_sort(G, key=None): raise nx.NetworkXUnfeasible(msg) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def all_topological_sorts(G): """Returns a generator of _all_ topological sorts of the directed graph G. @@ -333,8 +335,7 @@ def all_topological_sorts(G): Elsevier (North-Holland), Amsterdam """ if not G.is_directed(): - raise nx.NetworkXError( - "Topological sort not defined on undirected graphs.") + raise nx.NetworkXError("Topological sort not defined on undirected graphs.") # the names of count and D are chosen to match the global variables in [1] # number of edges originating in a vertex v @@ -441,8 +442,7 @@ def is_aperiodic(G): A Multidisciplinary Approach, CRC Press. """ if not G.is_directed(): - raise nx.NetworkXError( - "is_aperiodic not defined for undirected graphs") + raise nx.NetworkXError("is_aperiodic not defined for undirected graphs") s = arbitrary_element(G) levels = {s: 0} @@ -466,7 +466,7 @@ def is_aperiodic(G): return g == 1 and nx.is_aperiodic(G.subgraph(set(G) - set(levels))) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def transitive_closure(G, reflexive=False): """ Returns transitive closure of a directed graph @@ -528,7 +528,7 @@ def transitive_closure(G, reflexive=False): return TC -@not_implemented_for('undirected') +@not_implemented_for("undirected") def transitive_closure_dag(G, topo_order=None): """ Returns the transitive closure of a directed acyclic graph. @@ -577,7 +577,7 @@ def transitive_closure_dag(G, topo_order=None): return TC -@not_implemented_for('undirected') +@not_implemented_for("undirected") def transitive_reduction(G): """ Returns transitive reduction of a directed graph @@ -628,7 +628,7 @@ def transitive_reduction(G): return TR -@not_implemented_for('undirected') +@not_implemented_for("undirected") def antichains(G, topo_order=None): """Generates antichains from a directed acyclic graph (DAG). @@ -683,13 +683,12 @@ def antichains(G, topo_order=None): while stack: x = stack.pop() new_antichain = antichain + [x] - new_stack = [ - t for t in stack if not ((t in TC[x]) or (x in TC[t]))] + new_stack = [t for t in stack if not ((t in TC[x]) or (x in TC[t]))] antichains_stacks.append((new_antichain, new_stack)) -@not_implemented_for('undirected') -def dag_longest_path(G, weight='weight', default_weight=1, topo_order=None): +@not_implemented_for("undirected") +def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None): """Returns the longest path in a directed acyclic graph (DAG). If `G` has edges with `weight` attribute the edge data are used as @@ -732,8 +731,10 @@ def dag_longest_path(G, weight='weight', default_weight=1, topo_order=None): dist = {} # stores {v : (length, u)} for v in topo_order: - us = [(dist[u][0] + data.get(weight, default_weight), u) - for u, data in G.pred[v].items()] + us = [ + (dist[u][0] + data.get(weight, default_weight), u) + for u, data in G.pred[v].items() + ] # Use the best predecessor if there is one and its distance is # non-negative, otherwise terminate. @@ -752,8 +753,8 @@ def dag_longest_path(G, weight='weight', default_weight=1, topo_order=None): return path -@not_implemented_for('undirected') -def dag_longest_path_length(G, weight='weight', default_weight=1): +@not_implemented_for("undirected") +def dag_longest_path_length(G, weight="weight", default_weight=1): """Returns the longest path length in a DAG Parameters @@ -807,8 +808,8 @@ def root_to_leaf_paths(G): return chaini(starmap(all_paths, product(roots, leaves))) -@not_implemented_for('multigraph') -@not_implemented_for('undirected') +@not_implemented_for("multigraph") +@not_implemented_for("undirected") def dag_to_branching(G): """Returns a branching representing all (overlapping) paths from root nodes to leaf nodes in the given directed acyclic graph. @@ -895,7 +896,7 @@ def dag_to_branching(G): """ if has_cycle(G): - msg = 'dag_to_branching is only defined for acyclic graphs' + msg = "dag_to_branching is only defined for acyclic graphs" raise nx.HasACycle(msg) paths = root_to_leaf_paths(G) B, root = nx.prefix_tree(paths) diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py index 294296e3..6e97279d 100644 --- a/networkx/algorithms/distance_measures.py +++ b/networkx/algorithms/distance_measures.py @@ -3,9 +3,16 @@ import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['extrema_bounding', 'eccentricity', 'diameter', - 'radius', 'periphery', 'center', 'barycenter', - 'resistance_distance'] +__all__ = [ + "extrema_bounding", + "eccentricity", + "diameter", + "radius", + "periphery", + "center", + "barycenter", + "resistance_distance", +] def extrema_bounding(G, compute="diameter"): @@ -84,15 +91,15 @@ def extrema_bounding(G, compute="diameter"): # get distances from/to current node and derive eccentricity dist = dict(nx.single_source_shortest_path_length(G, current)) if len(dist) != N: - msg = ('Cannot compute metric because graph is not connected.') + msg = "Cannot compute metric because graph is not connected." raise nx.NetworkXError(msg) current_ecc = max(dist.values()) # print status update -# print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/" -# + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is " -# + str(current_ecc)) -# print(ecc_upper) + # print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/" + # + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is " + # + str(current_ecc)) + # print(ecc_upper) # (re)set bound extremes maxuppernode = None @@ -112,73 +119,95 @@ def extrema_bounding(G, compute="diameter"): maxupper = max(ecc_upper[i], maxupper) # update candidate set - if compute == 'diameter': - ruled_out = {i for i in candidates if ecc_upper[i] <= maxlower and - 2 * ecc_lower[i] >= maxupper} - - elif compute == 'radius': - ruled_out = {i for i in candidates if ecc_lower[i] >= minupper and - ecc_upper[i] + 1 <= 2 * minlower} - - elif compute == 'periphery': - ruled_out = {i for i in candidates if ecc_upper[i] < maxlower and - (maxlower == maxupper or ecc_lower[i] > maxupper)} - - elif compute == 'center': - ruled_out = {i for i in candidates if ecc_lower[i] > minupper and - (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower)} - - elif compute == 'eccentricities': + if compute == "diameter": + ruled_out = { + i + for i in candidates + if ecc_upper[i] <= maxlower and 2 * ecc_lower[i] >= maxupper + } + + elif compute == "radius": + ruled_out = { + i + for i in candidates + if ecc_lower[i] >= minupper and ecc_upper[i] + 1 <= 2 * minlower + } + + elif compute == "periphery": + ruled_out = { + i + for i in candidates + if ecc_upper[i] < maxlower + and (maxlower == maxupper or ecc_lower[i] > maxupper) + } + + elif compute == "center": + ruled_out = { + i + for i in candidates + if ecc_lower[i] > minupper + and (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower) + } + + elif compute == "eccentricities": ruled_out = {} ruled_out.update(i for i in candidates if ecc_lower[i] == ecc_upper[i]) candidates -= ruled_out -# for i in ruled_out: -# print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% -# (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper)) -# print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% -# (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper)) -# print("NODE 4: %g"%(ecc_upper[4] <= maxlower)) -# print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper)) -# print("NODE 4: %g"%(ecc_upper[4] <= maxlower -# and 2 * ecc_lower[4] >= maxupper)) + # for i in ruled_out: + # print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% + # (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper)) + # print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% + # (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper)) + # print("NODE 4: %g"%(ecc_upper[4] <= maxlower)) + # print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper)) + # print("NODE 4: %g"%(ecc_upper[4] <= maxlower + # and 2 * ecc_lower[4] >= maxupper)) # updating maxuppernode and minlowernode for selection in next round for i in candidates: - if minlowernode is None \ - or (ecc_lower[i] == ecc_lower[minlowernode] - and degrees[i] > degrees[minlowernode]) \ - or (ecc_lower[i] < ecc_lower[minlowernode]): + if ( + minlowernode is None + or ( + ecc_lower[i] == ecc_lower[minlowernode] + and degrees[i] > degrees[minlowernode] + ) + or (ecc_lower[i] < ecc_lower[minlowernode]) + ): minlowernode = i - if maxuppernode is None \ - or (ecc_upper[i] == ecc_upper[maxuppernode] - and degrees[i] > degrees[maxuppernode]) \ - or (ecc_upper[i] > ecc_upper[maxuppernode]): + if ( + maxuppernode is None + or ( + ecc_upper[i] == ecc_upper[maxuppernode] + and degrees[i] > degrees[maxuppernode] + ) + or (ecc_upper[i] > ecc_upper[maxuppernode]) + ): maxuppernode = i # print status update -# print (" min=" + str(minlower) + "/" + str(minupper) + -# " max=" + str(maxlower) + "/" + str(maxupper) + -# " candidates: " + str(len(candidates))) -# print("cand:",candidates) -# print("ecc_l",ecc_lower) -# print("ecc_u",ecc_upper) -# wait = input("press Enter to continue") + # print (" min=" + str(minlower) + "/" + str(minupper) + + # " max=" + str(maxlower) + "/" + str(maxupper) + + # " candidates: " + str(len(candidates))) + # print("cand:",candidates) + # print("ecc_l",ecc_lower) + # print("ecc_u",ecc_upper) + # wait = input("press Enter to continue") # return the correct value of the requested metric - if compute == 'diameter': + if compute == "diameter": return maxlower - elif compute == 'radius': + elif compute == "radius": return minupper - elif compute == 'periphery': + elif compute == "periphery": p = [v for v in G if ecc_lower[v] == maxlower] return p - elif compute == 'center': + elif compute == "center": c = [v for v in G if ecc_upper[v] == minupper] return c - elif compute == 'eccentricities': + elif compute == "eccentricities": return ecc_lower return None @@ -205,12 +234,12 @@ def eccentricity(G, v=None, sp=None): ecc : dictionary A dictionary of eccentricity values keyed by node. """ -# if v is None: # none, use entire graph -# nodes=G.nodes() -# elif v in G: # is v a single node -# nodes=[v] -# else: # assume v is a container of nodes -# nodes=v + # if v is None: # none, use entire graph + # nodes=G.nodes() + # elif v in G: # is v a single node + # nodes=[v] + # else: # assume v is a container of nodes + # nodes=v order = G.order() e = {} @@ -226,11 +255,12 @@ def eccentricity(G, v=None, sp=None): raise nx.NetworkXError('Format of "sp" is invalid.') from e if L != order: if G.is_directed(): - msg = ('Found infinite path length because the digraph is not' - ' strongly connected') + msg = ( + "Found infinite path length because the digraph is not" + " strongly connected" + ) else: - msg = ('Found infinite path length because the graph is not' - ' connected') + msg = "Found infinite path length because the graph is not" " connected" raise nx.NetworkXError(msg) e[n] = max(length.values()) @@ -412,14 +442,14 @@ def barycenter(G, weight=None, attr=None, sp=None): else: sp = sp.items() if weight is not None: - raise ValueError('Cannot use both sp, weight arguments together') - smallest, barycenter_vertices, n = float('inf'), [], len(G) + raise ValueError("Cannot use both sp, weight arguments together") + smallest, barycenter_vertices, n = float("inf"), [], len(G) for v, dists in sp: if len(dists) < n: raise nx.NetworkXNoPath( f"Input graph {G} is disconnected, so every induced subgraph " "has infinite barycentricity." - ) + ) barycentricity = sum(dists.values()) if attr is not None: G.nodes[v][attr] = barycentricity @@ -439,7 +469,7 @@ def _laplacian_submatrix(node, mat, node_list): n.pop(j) if mat.shape[0] != mat.shape[1]: - raise nx.NetworkXError('Matrix must be square') + raise nx.NetworkXError("Matrix must be square") elif len(node_list) != mat.shape[0]: msg = "Node list length does not match matrix dimentions" raise nx.NetworkXError(msg) @@ -470,7 +500,7 @@ def _count_lu_permutations(perm_array): return perm_cnt -@not_implemented_for('directed') +@not_implemented_for("directed") def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True): """Returns the resistance distance between node A and node B on graph G. @@ -520,16 +550,16 @@ def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True): import scipy.sparse if not nx.is_connected(G): - msg = ('Graph G must be strongly connected.') + msg = "Graph G must be strongly connected." raise nx.NetworkXError(msg) elif nodeA not in G: - msg = ('Node A is not in graph G.') + msg = "Node A is not in graph G." raise nx.NetworkXError(msg) elif nodeB not in G: - msg = ('Node B is not in graph G.') + msg = "Node B is not in graph G." raise nx.NetworkXError(msg) elif nodeA == nodeB: - msg = ('Node A and Node B cannot be the same.') + msg = "Node A and Node B cannot be the same." raise nx.NetworkXError(msg) G = G.copy() @@ -538,20 +568,18 @@ def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True): if invert_weight and weight is not None: if G.is_multigraph(): for (u, v, k, d) in G.edges(keys=True, data=True): - d[weight] = 1/d[weight] + d[weight] = 1 / d[weight] else: for (u, v, d) in G.edges(data=True): - d[weight] = 1/d[weight] + d[weight] = 1 / d[weight] # Replace with collapsing topology or approximated zero? # Using determinants to compute the effective resistance is more memory # efficent than directly calculating the psuedo-inverse L = nx.laplacian_matrix(G, node_list, weight=weight) - Lsub_a, node_list_a = _laplacian_submatrix(nodeA, L.copy(), - node_list[:]) - Lsub_ab, node_list_ab = _laplacian_submatrix(nodeB, Lsub_a.copy(), - node_list_a[:]) + Lsub_a, node_list_a = _laplacian_submatrix(nodeA, L.copy(), node_list[:]) + Lsub_ab, node_list_ab = _laplacian_submatrix(nodeB, Lsub_a.copy(), node_list_a[:]) # Factorize Laplacian submatrixes and extract diagonals # Order the diagonals to minimize the likelihood over overflows @@ -559,16 +587,16 @@ def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True): lu_a = scipy.sparse.linalg.splu(Lsub_a, options=dict(SymmetricMode=True)) LdiagA = lu_a.U.diagonal() LdiagA_s = np.product(np.sign(LdiagA)) * np.product(lu_a.L.diagonal()) - LdiagA_s *= (-1)**_count_lu_permutations(lu_a.perm_r) - LdiagA_s *= (-1)**_count_lu_permutations(lu_a.perm_c) + LdiagA_s *= (-1) ** _count_lu_permutations(lu_a.perm_r) + LdiagA_s *= (-1) ** _count_lu_permutations(lu_a.perm_c) LdiagA = np.absolute(LdiagA) LdiagA = np.sort(LdiagA) lu_ab = scipy.sparse.linalg.splu(Lsub_ab, options=dict(SymmetricMode=True)) LdiagAB = lu_ab.U.diagonal() LdiagAB_s = np.product(np.sign(LdiagAB)) * np.product(lu_ab.L.diagonal()) - LdiagAB_s *= (-1)**_count_lu_permutations(lu_ab.perm_r) - LdiagAB_s *= (-1)**_count_lu_permutations(lu_ab.perm_c) + LdiagAB_s *= (-1) ** _count_lu_permutations(lu_ab.perm_r) + LdiagAB_s *= (-1) ** _count_lu_permutations(lu_ab.perm_c) LdiagAB = np.absolute(LdiagAB) LdiagAB = np.sort(LdiagAB) diff --git a/networkx/algorithms/distance_regular.py b/networkx/algorithms/distance_regular.py index 43e1e818..48546823 100644 --- a/networkx/algorithms/distance_regular.py +++ b/networkx/algorithms/distance_regular.py @@ -8,8 +8,12 @@ import networkx as nx from networkx.utils import not_implemented_for from .distance_measures import diameter -__all__ = ['is_distance_regular', 'is_strongly_regular', - 'intersection_array', 'global_parameters'] +__all__ = [ + "is_distance_regular", + "is_strongly_regular", + "intersection_array", + "global_parameters", +] def is_distance_regular(G): @@ -103,7 +107,7 @@ def global_parameters(b, c): return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c)) -@not_implemented_for('directed', 'multigraph') +@not_implemented_for("directed", "multigraph") def intersection_array(G): """Returns the intersection array of a distance-regular graph. @@ -144,7 +148,7 @@ def intersection_array(G): (_, k) = next(degree) for _, knext in degree: if knext != k: - raise nx.NetworkXError('Graph is not distance regular.') + raise nx.NetworkXError("Graph is not distance regular.") k = knext path_length = dict(nx.all_pairs_shortest_path_length(G)) diameter = max([max(path_length[n].values()) for n in path_length]) @@ -155,22 +159,24 @@ def intersection_array(G): try: i = path_length[u][v] except KeyError as e: # graph must be connected - raise nx.NetworkXError('Graph is not distance regular.') from e + raise nx.NetworkXError("Graph is not distance regular.") from e # number of neighbors of v at a distance of i-1 from u c = len([n for n in G[v] if path_length[n][u] == i - 1]) # number of neighbors of v at a distance of i+1 from u b = len([n for n in G[v] if path_length[n][u] == i + 1]) # b,c are independent of u and v if cint.get(i, c) != c or bint.get(i, b) != b: - raise nx.NetworkXError('Graph is not distance regular') + raise nx.NetworkXError("Graph is not distance regular") bint[i] = b cint[i] = c - return ([bint.get(j, 0) for j in range(diameter)], - [cint.get(j + 1, 0) for j in range(diameter)]) + return ( + [bint.get(j, 0) for j in range(diameter)], + [cint.get(j + 1, 0) for j in range(diameter)], + ) # TODO There is a definition for directed strongly regular graphs. -@not_implemented_for('directed', 'multigraph') +@not_implemented_for("directed", "multigraph") def is_strongly_regular(G): """Returns True if and only if the given graph is strongly regular. diff --git a/networkx/algorithms/dominance.py b/networkx/algorithms/dominance.py index c38ecda3..63ed7a8c 100644 --- a/networkx/algorithms/dominance.py +++ b/networkx/algorithms/dominance.py @@ -6,10 +6,10 @@ from functools import reduce import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['immediate_dominators', 'dominance_frontiers'] +__all__ = ["immediate_dominators", "dominance_frontiers"] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def immediate_dominators(G, start): """Returns the immediate dominators of all nodes of a directed graph. @@ -53,7 +53,7 @@ def immediate_dominators(G, start): Software Practice & Experience, 4:110, 2001. """ if start not in G: - raise nx.NetworkXError('start is not in G') + raise nx.NetworkXError("start is not in G") idom = {start: start} diff --git a/networkx/algorithms/dominating.py b/networkx/algorithms/dominating.py index ae77f20d..32fff4d9 100644 --- a/networkx/algorithms/dominating.py +++ b/networkx/algorithms/dominating.py @@ -4,7 +4,7 @@ from itertools import chain import networkx as nx from networkx.utils import arbitrary_element -__all__ = ['dominating_set', 'is_dominating_set'] +__all__ = ["dominating_set", "is_dominating_set"] def dominating_set(G, start_with=None): @@ -47,7 +47,7 @@ def dominating_set(G, start_with=None): if start_with is None: start_with = arbitrary_element(all_nodes) if start_with not in G: - raise nx.NetworkXError(f'node {start_with} is not in G') + raise nx.NetworkXError(f"node {start_with} is not in G") dominating_set = {start_with} dominated_nodes = set(G[start_with]) remaining_nodes = all_nodes - dominated_nodes - dominating_set diff --git a/networkx/algorithms/efficiency_measures.py b/networkx/algorithms/efficiency_measures.py index 603c8776..b50b7515 100644 --- a/networkx/algorithms/efficiency_measures.py +++ b/networkx/algorithms/efficiency_measures.py @@ -4,10 +4,10 @@ import networkx as nx from networkx.exception import NetworkXNoPath from ..utils import not_implemented_for -__all__ = ['efficiency', 'local_efficiency', 'global_efficiency'] +__all__ = ["efficiency", "local_efficiency", "global_efficiency"] -@not_implemented_for('directed') +@not_implemented_for("directed") def efficiency(G, u, v): """Returns the efficiency of a pair of nodes in a graph. @@ -51,7 +51,7 @@ def efficiency(G, u, v): return eff -@not_implemented_for('directed') +@not_implemented_for("directed") def global_efficiency(G): """Returns the average global efficiency of the graph. @@ -105,7 +105,7 @@ def global_efficiency(G): return g_eff -@not_implemented_for('directed') +@not_implemented_for("directed") def local_efficiency(G): """Returns the average local efficiency of the graph. diff --git a/networkx/algorithms/euler.py b/networkx/algorithms/euler.py index c6ccd4e5..6f8283f2 100644 --- a/networkx/algorithms/euler.py +++ b/networkx/algorithms/euler.py @@ -6,9 +6,14 @@ from itertools import combinations import networkx as nx from ..utils import arbitrary_element, not_implemented_for -__all__ = ['is_eulerian', 'eulerian_circuit', 'eulerize', - 'is_semieulerian', 'has_eulerian_path', 'eulerian_path', - ] +__all__ = [ + "is_eulerian", + "eulerian_circuit", + "eulerize", + "is_semieulerian", + "has_eulerian_path", + "eulerian_path", +] def is_eulerian(G): @@ -41,8 +46,9 @@ def is_eulerian(G): if G.is_directed(): # Every node must have equal in degree and out degree and the # graph must be strongly connected - return (all(G.in_degree(n) == G.out_degree(n) for n in G) and - nx.is_strongly_connected(G)) + return all( + G.in_degree(n) == G.out_degree(n) for n in G + ) and nx.is_strongly_connected(G) # An undirected Eulerian graph has no vertices of odd degree and # must be connected. return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G) @@ -244,13 +250,14 @@ def has_eulerian_path(G): outs = G.out_degree semibalanced_ins = sum(ins(v) - outs(v) == 1 for v in G) semibalanced_outs = sum(outs(v) - ins(v) == 1 for v in G) - return (semibalanced_ins <= 1 and - semibalanced_outs <= 1 and - sum(G.in_degree(v) != G.out_degree(v) for v in G) <= 2 and - nx.is_weakly_connected(G)) + return ( + semibalanced_ins <= 1 + and semibalanced_outs <= 1 + and sum(G.in_degree(v) != G.out_degree(v) for v in G) <= 2 + and nx.is_weakly_connected(G) + ) else: - return (sum(d % 2 == 1 for v, d in G.degree()) in (0, 2) - and nx.is_connected(G)) + return sum(d % 2 == 1 for v, d in G.degree()) in (0, 2) and nx.is_connected(G) def eulerian_path(G, source=None, keys=False): @@ -292,7 +299,7 @@ def eulerian_path(G, source=None, keys=False): yield from _simplegraph_eulerian_circuit(G, source) -@not_implemented_for('directed') +@not_implemented_for("directed") def eulerize(G): """Transforms a graph into an Eulerian graph @@ -341,10 +348,10 @@ def eulerize(G): return G # get all shortest paths between vertices of odd degree - odd_deg_pairs_paths = [(m, - {n: nx.shortest_path(G, source=m, target=n)} - ) - for m, n in combinations(odd_degree_nodes, 2)] + odd_deg_pairs_paths = [ + (m, {n: nx.shortest_path(G, source=m, target=n)}) + for m, n in combinations(odd_degree_nodes, 2) + ] # use inverse path lengths as edge-weights in a new graph # store the paths in the graph for easy indexing later @@ -352,7 +359,7 @@ def eulerize(G): for n, Ps in odd_deg_pairs_paths: for m, P in Ps.items(): if n != m: - Gp.add_edge(m, n, weight=1/len(P), path=P) + Gp.add_edge(m, n, weight=1 / len(P), path=P) # find the minimum weight matching of edges in the weighted graph best_matching = nx.Graph(list(nx.max_weight_matching(Gp))) diff --git a/networkx/algorithms/flow/boykovkolmogorov.py b/networkx/algorithms/flow/boykovkolmogorov.py index a71c9ee0..579aead6 100644 --- a/networkx/algorithms/flow/boykovkolmogorov.py +++ b/networkx/algorithms/flow/boykovkolmogorov.py @@ -7,11 +7,12 @@ from operator import itemgetter import networkx as nx from networkx.algorithms.flow.utils import build_residual_network -__all__ = ['boykov_kolmogorov'] +__all__ = ["boykov_kolmogorov"] -def boykov_kolmogorov(G, s, t, capacity='capacity', residual=None, - value_only=False, cutoff=None): +def boykov_kolmogorov( + G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None +): r"""Find a maximum single-commodity flow using Boykov-Kolmogorov algorithm. This function returns the residual network resulting after computing @@ -154,7 +155,7 @@ def boykov_kolmogorov(G, s, t, capacity='capacity', residual=None, """ R = boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff) - R.graph['algorithm'] = 'boykov_kolmogorov' + R.graph["algorithm"] = "boykov_kolmogorov" return R @@ -164,7 +165,7 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): if t not in G: raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if residual is None: R = build_residual_network(G, capacity) @@ -176,11 +177,11 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): # nx.set_edge_attributes(R, 0, 'flow') for u in R: for e in R[u].values(): - e['flow'] = 0 + e["flow"] = 0 # Use an arbitrary high value as infinite. It is computed # when building the residual network. - INF = R.graph['inf'] + INF = R.graph["inf"] if cutoff is None: cutoff = INF @@ -208,7 +209,7 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): other_tree = source_tree neighbors = R_pred for v, attr in neighbors[u].items(): - if attr['capacity'] - attr['flow'] > 0: + if attr["capacity"] - attr["flow"] > 0: if v not in this_tree: if v in other_tree: return (u, v) if this_tree is source_tree else (v, u) @@ -233,7 +234,7 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): and the input of this function. """ attr = R_succ[u][v] - flow = min(INF, attr['capacity'] - attr['flow']) + flow = min(INF, attr["capacity"] - attr["flow"]) path = [u] # Trace a path from u to s in source_tree. w = u @@ -241,7 +242,7 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): n = w w = source_tree[n] attr = R_pred[n][w] - flow = min(flow, attr['capacity'] - attr['flow']) + flow = min(flow, attr["capacity"] - attr["flow"]) path.append(w) path.reverse() # Trace a path from v to t in target_tree. @@ -251,16 +252,16 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): n = w w = target_tree[n] attr = R_succ[n][w] - flow = min(flow, attr['capacity'] - attr['flow']) + flow = min(flow, attr["capacity"] - attr["flow"]) path.append(w) # Augment flow along the path and check for saturated edges. it = iter(path) u = next(it) these_orphans = [] for v in it: - R_succ[u][v]['flow'] += flow - R_succ[v][u]['flow'] -= flow - if R_succ[u][v]['flow'] == R_succ[u][v]['capacity']: + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow + if R_succ[u][v]["flow"] == R_succ[u][v]["capacity"]: if v in source_tree: source_tree[v] = None these_orphans.append(v) @@ -289,20 +290,20 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): else: tree = target_tree neighbors = R_succ - nbrs = ((n, attr, dist[n]) for n, attr in neighbors[u].items() - if n in tree) + nbrs = ((n, attr, dist[n]) for n, attr in neighbors[u].items() if n in tree) for v, attr, d in sorted(nbrs, key=itemgetter(2)): - if attr['capacity'] - attr['flow'] > 0: + if attr["capacity"] - attr["flow"] > 0: if _has_valid_root(v, tree): tree[u] = v dist[u] = dist[v] + 1 timestamp[u] = time break else: - nbrs = ((n, attr, dist[n]) for n, attr in neighbors[u].items() - if n in tree) + nbrs = ( + (n, attr, dist[n]) for n, attr in neighbors[u].items() if n in tree + ) for v, attr, d in sorted(nbrs, key=itemgetter(2)): - if attr['capacity'] - attr['flow'] > 0: + if attr["capacity"] - attr["flow"] > 0: if v not in active: active.append(v) if tree[v] == u: @@ -356,12 +357,12 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): adopt() if flow_value * 2 > INF: - raise nx.NetworkXUnbounded('Infinite capacity path, flow unbounded above.') + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") # Add source and target tree in a graph attribute. # A partition that defines a minimum cut can be directly # computed from the search trees as explained in the docstrings. - R.graph['trees'] = (source_tree, target_tree) + R.graph["trees"] = (source_tree, target_tree) # Add the standard flow_value graph attribute. - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R diff --git a/networkx/algorithms/flow/capacityscaling.py b/networkx/algorithms/flow/capacityscaling.py index dd57d9f6..c2582be0 100644 --- a/networkx/algorithms/flow/capacityscaling.py +++ b/networkx/algorithms/flow/capacityscaling.py @@ -2,7 +2,7 @@ Capacity scaling minimum cost flow algorithm. """ -__all__ = ['capacity_scaling'] +__all__ = ["capacity_scaling"] from itertools import chain from math import log @@ -21,26 +21,27 @@ def _detect_unboundedness(R): G.add_nodes_from(R) # Value simulating infinity. - inf = R.graph['inf'] + inf = R.graph["inf"] # True infinity. - f_inf = float('inf') + f_inf = float("inf") for u in R: for v, e in R[u].items(): # Compute the minimum weight of infinite-capacity (u, v) edges. w = f_inf for k, e in e.items(): - if e['capacity'] == inf: - w = min(w, e['weight']) + if e["capacity"] == inf: + w = min(w, e["weight"]) if w != f_inf: G.add_edge(u, v, weight=w) if nx.negative_edge_cycle(G): raise nx.NetworkXUnbounded( - 'Negative cost cycle of infinite capacity found. ' - 'Min cost flow may be unbounded below.') + "Negative cost cycle of infinite capacity found. " + "Min cost flow may be unbounded below." + ) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def _build_residual_network(G, demand, capacity, weight): """Build a residual network and initialize a zero flow. """ @@ -48,33 +49,49 @@ def _build_residual_network(G, demand, capacity, weight): raise nx.NetworkXUnfeasible("Sum of the demands should be 0.") R = nx.MultiDiGraph() - R.add_nodes_from((u, {'excess': -G.nodes[u].get(demand, 0), - 'potential': 0}) for u in G) + R.add_nodes_from( + (u, {"excess": -G.nodes[u].get(demand, 0), "potential": 0}) for u in G + ) - inf = float('inf') + inf = float("inf") # Detect selfloops with infinite capacities and negative weights. for u, v, e in nx.selfloop_edges(G, data=True): if e.get(weight, 0) < 0 and e.get(capacity, inf) == inf: raise nx.NetworkXUnbounded( - 'Negative cost cycle of infinite capacity found. ' - 'Min cost flow may be unbounded below.') + "Negative cost cycle of infinite capacity found. " + "Min cost flow may be unbounded below." + ) # Extract edges with positive capacities. Self loops excluded. if G.is_multigraph(): - edge_list = [(u, v, k, e) - for u, v, k, e in G.edges(data=True, keys=True) - if u != v and e.get(capacity, inf) > 0] + edge_list = [ + (u, v, k, e) + for u, v, k, e in G.edges(data=True, keys=True) + if u != v and e.get(capacity, inf) > 0 + ] else: - edge_list = [(u, v, 0, e) for u, v, e in G.edges(data=True) - if u != v and e.get(capacity, inf) > 0] + edge_list = [ + (u, v, 0, e) + for u, v, e in G.edges(data=True) + if u != v and e.get(capacity, inf) > 0 + ] # Simulate infinity with the larger of the sum of absolute node imbalances # the sum of finite edge capacities or any positive value if both sums are # zero. This allows the infinite-capacity edges to be distinguished for # unboundedness detection and directly participate in residual capacity # calculation. - inf = max(sum(abs(R.nodes[u]['excess']) for u in R), - 2 * sum(e[capacity] for u, v, k, e in edge_list - if capacity in e and e[capacity] != inf)) or 1 + inf = ( + max( + sum(abs(R.nodes[u]["excess"]) for u in R), + 2 + * sum( + e[capacity] + for u, v, k, e in edge_list + if capacity in e and e[capacity] != inf + ), + ) + or 1 + ) for u, v, k, e in edge_list: r = min(e.get(capacity, inf), inf) w = e.get(weight, 0) @@ -85,7 +102,7 @@ def _build_residual_network(G, demand, capacity, weight): R.add_edge(v, u, key=(k, False), capacity=0, weight=-w, flow=0) # Record the value simulating infinity. - R.graph['inf'] = inf + R.graph["inf"] = inf _detect_unboundedness(R) @@ -95,7 +112,7 @@ def _build_residual_network(G, demand, capacity, weight): def _build_flow_dict(G, R, capacity, weight): """Build a flow dictionary from a residual network. """ - inf = float('inf') + inf = float("inf") flow_dict = {} if G.is_multigraph(): for u in G: @@ -103,28 +120,43 @@ def _build_flow_dict(G, R, capacity, weight): for v, es in G[u].items(): flow_dict[u][v] = { # Always saturate negative selfloops. - k: (0 if (u != v or e.get(capacity, inf) <= 0 or - e.get(weight, 0) >= 0) else e[capacity]) - for k, e in es.items()} + k: ( + 0 + if ( + u != v or e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0 + ) + else e[capacity] + ) + for k, e in es.items() + } for v, es in R[u].items(): if v in flow_dict[u]: - flow_dict[u][v].update((k[0], e['flow']) - for k, e in es.items() - if e['flow'] > 0) + flow_dict[u][v].update( + (k[0], e["flow"]) for k, e in es.items() if e["flow"] > 0 + ) else: for u in G: flow_dict[u] = { # Always saturate negative selfloops. - v: (0 if (u != v or e.get(capacity, inf) <= 0 or - e.get(weight, 0) >= 0) else e[capacity]) - for v, e in G[u].items()} - flow_dict[u].update((v, e['flow']) for v, es in R[u].items() - for e in es.values() if e['flow'] > 0) + v: ( + 0 + if (u != v or e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0) + else e[capacity] + ) + for v, e in G[u].items() + } + flow_dict[u].update( + (v, e["flow"]) + for v, es in R[u].items() + for e in es.values() + if e["flow"] > 0 + ) return flow_dict -def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', - heap=BinaryHeap): +def capacity_scaling( + G, demand="demand", capacity="capacity", weight="weight", heap=BinaryHeap +): r"""Find a minimum cost flow satisfying all demands in digraph G. This is a capacity scaling successive shortest augmenting path algorithm. @@ -255,16 +287,17 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', """ R = _build_residual_network(G, demand, capacity, weight) - inf = float('inf') + inf = float("inf") # Account cost of negative selfloops. flow_cost = sum( - 0 if e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0 + 0 + if e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0 else e[capacity] * e[weight] - for u, v, e in nx.selfloop_edges(G, data=True)) + for u, v, e in nx.selfloop_edges(G, data=True) + ) # Determine the maxmimum edge capacity. - wmax = max(chain([-inf], - (e['capacity'] for u, v, e in R.edges(data=True)))) + wmax = max(chain([-inf], (e["capacity"] for u, v, e in R.edges(data=True)))) if wmax == -inf: # Residual network has no edges. return flow_cost, _build_flow_dict(G, R, capacity, weight) @@ -277,17 +310,17 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', # Saturate Δ-residual edges with negative reduced costs to achieve # Δ-optimality. for u in R: - p_u = R_nodes[u]['potential'] + p_u = R_nodes[u]["potential"] for v, es in R_succ[u].items(): for k, e in es.items(): - flow = e['capacity'] - e['flow'] - if e['weight'] - p_u + R_nodes[v]['potential'] < 0: - flow = e['capacity'] - e['flow'] + flow = e["capacity"] - e["flow"] + if e["weight"] - p_u + R_nodes[v]["potential"] < 0: + flow = e["capacity"] - e["flow"] if flow >= delta: - e['flow'] += flow - R_succ[v][u][(k[0], not k[1])]['flow'] -= flow - R_nodes[u]['excess'] -= flow - R_nodes[v]['excess'] += flow + e["flow"] += flow + R_succ[v][u][(k[0], not k[1])]["flow"] -= flow + R_nodes[u]["excess"] -= flow + R_nodes[v]["excess"] += flow # Determine the Δ-active nodes. S = set() T = set() @@ -296,7 +329,7 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', T_add = T.add T_remove = T.remove for u in R: - excess = R_nodes[u]['excess'] + excess = R_nodes[u]["excess"] if excess >= delta: S_add(u) elif excess <= -delta: @@ -321,15 +354,15 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', # Path found. t = u break - p_u = R_nodes[u]['potential'] + p_u = R_nodes[u]["potential"] for v, es in R_succ[u].items(): if v in d: continue wmin = inf # Find the minimum-weighted (u, v) Δ-residual edge. for k, e in es.items(): - if e['capacity'] - e['flow'] >= delta: - w = e['weight'] + if e["capacity"] - e["flow"] >= delta: + w = e["weight"] if w < wmin: wmin = w kmin = k @@ -337,7 +370,7 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', if wmin == inf: continue # Update the distance label of v. - d_v = d_u + wmin - p_u + R_nodes[v]['potential'] + d_v = d_u + wmin - p_u + R_nodes[v]["potential"] if h_insert(v, d_v): pred[v] = (u, kmin, emin) if t is not None: @@ -345,33 +378,33 @@ def capacity_scaling(G, demand='demand', capacity='capacity', weight='weight', while u != s: v = u u, k, e = pred[v] - e['flow'] += delta - R_succ[v][u][(k[0], not k[1])]['flow'] -= delta + e["flow"] += delta + R_succ[v][u][(k[0], not k[1])]["flow"] -= delta # Account node excess and deficit. - R_nodes[s]['excess'] -= delta - R_nodes[t]['excess'] += delta - if R_nodes[s]['excess'] < delta: + R_nodes[s]["excess"] -= delta + R_nodes[t]["excess"] += delta + if R_nodes[s]["excess"] < delta: S_remove(s) - if R_nodes[t]['excess'] > -delta: + if R_nodes[t]["excess"] > -delta: T_remove(t) # Update node potentials. d_t = d[t] for u, d_u in d.items(): - R_nodes[u]['potential'] -= d_u - d_t + R_nodes[u]["potential"] -= d_u - d_t else: # Path not found. S_remove(s) delta //= 2 - if any(R.nodes[u]['excess'] != 0 for u in R): - raise nx.NetworkXUnfeasible('No flow satisfying all demands.') + if any(R.nodes[u]["excess"] != 0 for u in R): + raise nx.NetworkXUnfeasible("No flow satisfying all demands.") # Calculate the flow cost. for u in R: for v, es in R_succ[u].items(): for e in es.values(): - flow = e['flow'] + flow = e["flow"] if flow > 0: - flow_cost += flow * e['weight'] + flow_cost += flow * e["weight"] return flow_cost, _build_flow_dict(G, R, capacity, weight) diff --git a/networkx/algorithms/flow/dinitz_alg.py b/networkx/algorithms/flow/dinitz_alg.py index 7387c815..e0869d97 100644 --- a/networkx/algorithms/flow/dinitz_alg.py +++ b/networkx/algorithms/flow/dinitz_alg.py @@ -7,10 +7,10 @@ import networkx as nx from networkx.algorithms.flow.utils import build_residual_network from networkx.utils import pairwise -__all__ = ['dinitz'] +__all__ = ["dinitz"] -def dinitz(G, s, t, capacity='capacity', residual=None, value_only=False, cutoff=None): +def dinitz(G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None): """Find a maximum single-commodity flow using Dinitz' algorithm. This function returns the residual network resulting after computing @@ -134,7 +134,7 @@ def dinitz(G, s, t, capacity='capacity', residual=None, value_only=False, cutoff """ R = dinitz_impl(G, s, t, capacity, residual, cutoff) - R.graph['algorithm'] = 'dinitz' + R.graph["algorithm"] = "dinitz" return R @@ -144,7 +144,7 @@ def dinitz_impl(G, s, t, capacity, residual, cutoff): if t not in G: raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if residual is None: R = build_residual_network(G, capacity) @@ -154,11 +154,11 @@ def dinitz_impl(G, s, t, capacity, residual, cutoff): # Initialize/reset the residual network. for u in R: for e in R[u].values(): - e['flow'] = 0 + e["flow"] = 0 # Use an arbitrary high value as infinite. It is computed # when building the residual network. - INF = R.graph['inf'] + INF = R.graph["inf"] if cutoff is None: cutoff = INF @@ -175,7 +175,7 @@ def dinitz_impl(G, s, t, capacity, residual, cutoff): u = queue.popleft() for v in R_succ[u]: attr = R_succ[u][v] - if v not in parents and attr['capacity'] - attr['flow'] > 0: + if v not in parents and attr["capacity"] - attr["flow"] > 0: parents[v] = u queue.append(v) return parents @@ -188,14 +188,14 @@ def dinitz_impl(G, s, t, capacity, residual, cutoff): while u != s: path.append(u) v = parents[u] - flow = min(flow, R_pred[u][v]['capacity'] - R_pred[u][v]['flow']) + flow = min(flow, R_pred[u][v]["capacity"] - R_pred[u][v]["flow"]) u = v path.append(s) # Augment the flow along the path found if flow > 0: for u, v in pairwise(path): - R_pred[u][v]['flow'] += flow - R_pred[v][u]['flow'] -= flow + R_pred[u][v]["flow"] += flow + R_pred[v][u]["flow"] -= flow return flow flow_value = 0 @@ -205,9 +205,8 @@ def dinitz_impl(G, s, t, capacity, residual, cutoff): break this_flow = depth_first_search(parents) if this_flow * 2 > INF: - raise nx.NetworkXUnbounded( - 'Infinite capacity path, flow unbounded above.') + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") flow_value += this_flow - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R diff --git a/networkx/algorithms/flow/edmondskarp.py b/networkx/algorithms/flow/edmondskarp.py index 02bfae87..b4da2bd9 100644 --- a/networkx/algorithms/flow/edmondskarp.py +++ b/networkx/algorithms/flow/edmondskarp.py @@ -5,7 +5,7 @@ Edmonds-Karp algorithm for maximum flow problems. import networkx as nx from networkx.algorithms.flow.utils import build_residual_network -__all__ = ['edmonds_karp'] +__all__ = ["edmonds_karp"] def edmonds_karp_core(R, s, t, cutoff): @@ -15,7 +15,7 @@ def edmonds_karp_core(R, s, t, cutoff): R_pred = R.pred R_succ = R.succ - inf = R.graph['inf'] + inf = R.graph["inf"] def augment(path): """Augment flow along a path from s to t. @@ -26,17 +26,16 @@ def edmonds_karp_core(R, s, t, cutoff): u = next(it) for v in it: attr = R_succ[u][v] - flow = min(flow, attr['capacity'] - attr['flow']) + flow = min(flow, attr["capacity"] - attr["flow"]) u = v if flow * 2 > inf: - raise nx.NetworkXUnbounded( - 'Infinite capacity path, flow unbounded above.') + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") # Augment flow along the path. it = iter(path) u = next(it) for v in it: - R_succ[u][v]['flow'] += flow - R_succ[v][u]['flow'] -= flow + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow u = v return flow @@ -52,7 +51,7 @@ def edmonds_karp_core(R, s, t, cutoff): if len(q_s) <= len(q_t): for u in q_s: for v, attr in R_succ[u].items(): - if v not in pred and attr['flow'] < attr['capacity']: + if v not in pred and attr["flow"] < attr["capacity"]: pred[v] = u if v in succ: return v, pred, succ @@ -63,7 +62,7 @@ def edmonds_karp_core(R, s, t, cutoff): else: for u in q_t: for v, attr in R_pred[u].items(): - if v not in succ and attr['flow'] < attr['capacity']: + if v not in succ and attr["flow"] < attr["capacity"]: succ[v] = u if v in pred: return v, pred, succ @@ -103,7 +102,7 @@ def edmonds_karp_impl(G, s, t, capacity, residual, cutoff): if t not in G: raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if residual is None: R = build_residual_network(G, capacity) @@ -113,17 +112,18 @@ def edmonds_karp_impl(G, s, t, capacity, residual, cutoff): # Initialize/reset the residual network. for u in R: for e in R[u].values(): - e['flow'] = 0 + e["flow"] = 0 if cutoff is None: - cutoff = float('inf') - R.graph['flow_value'] = edmonds_karp_core(R, s, t, cutoff) + cutoff = float("inf") + R.graph["flow_value"] = edmonds_karp_core(R, s, t, cutoff) return R -def edmonds_karp(G, s, t, capacity='capacity', residual=None, value_only=False, - cutoff=None): +def edmonds_karp( + G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None +): """Find a maximum single-commodity flow using the Edmonds-Karp algorithm. This function returns the residual network resulting after computing @@ -240,5 +240,5 @@ def edmonds_karp(G, s, t, capacity='capacity', residual=None, value_only=False, """ R = edmonds_karp_impl(G, s, t, capacity, residual, cutoff) - R.graph['algorithm'] = 'edmonds_karp' + R.graph["algorithm"] = "edmonds_karp" return R diff --git a/networkx/algorithms/flow/gomory_hu.py b/networkx/algorithms/flow/gomory_hu.py index bf60376f..d199e277 100644 --- a/networkx/algorithms/flow/gomory_hu.py +++ b/networkx/algorithms/flow/gomory_hu.py @@ -9,11 +9,11 @@ from .utils import build_residual_network default_flow_func = edmonds_karp -__all__ = ['gomory_hu_tree'] +__all__ = ["gomory_hu_tree"] -@not_implemented_for('directed') -def gomory_hu_tree(G, capacity='capacity', flow_func=None): +@not_implemented_for("directed") +def gomory_hu_tree(G, capacity="capacity", flow_func=None): r"""Returns the Gomory-Hu tree of an undirected graph G. A Gomory-Hu tree of an undirected graph with capacities is a @@ -133,7 +133,7 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None): flow_func = default_flow_func if len(G) == 0: # empty graph - msg = 'Empty Graph does not have a Gomory-Hu tree representation' + msg = "Empty Graph does not have a Gomory-Hu tree representation" raise nx.NetworkXError(msg) # Start the tree as a star graph with an arbitrary node at the center @@ -152,10 +152,9 @@ def gomory_hu_tree(G, capacity='capacity', flow_func=None): # Find neighbor in the tree target = tree[source] # compute minimum cut - cut_value, partition = nx.minimum_cut(G, source, target, - capacity=capacity, - flow_func=flow_func, - residual=R) + cut_value, partition = nx.minimum_cut( + G, source, target, capacity=capacity, flow_func=flow_func, residual=R + ) labels[(source, target)] = cut_value # Update the tree # Source will always be in partition[0] and target in partition[1] diff --git a/networkx/algorithms/flow/maxflow.py b/networkx/algorithms/flow/maxflow.py index c341146f..263bb3cb 100644 --- a/networkx/algorithms/flow/maxflow.py +++ b/networkx/algorithms/flow/maxflow.py @@ -9,6 +9,7 @@ from .edmondskarp import edmonds_karp from .preflowpush import preflow_push from .shortestaugmentingpath import shortest_augmenting_path from .utils import build_flow_dict + # Define the default flow function for computing maximum flow. default_flow_func = preflow_push # Functions that don't support cutoff for minimum cut computations. @@ -20,14 +21,10 @@ flow_funcs = [ shortest_augmenting_path, ] -__all__ = ['maximum_flow', - 'maximum_flow_value', - 'minimum_cut', - 'minimum_cut_value'] +__all__ = ["maximum_flow", "maximum_flow_value", "minimum_cut", "minimum_cut_value"] -def maximum_flow(flowG, _s, _t, - capacity='capacity', flow_func=None, **kwargs): +def maximum_flow(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Find a maximum single-commodity flow. Parameters @@ -158,8 +155,10 @@ def maximum_flow(flowG, _s, _t, """ if flow_func is None: if kwargs: - raise nx.NetworkXError("You have to explicitly set a flow_func if" - " you need to pass parameters via kwargs.") + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) flow_func = default_flow_func if not callable(flow_func): @@ -168,11 +167,10 @@ def maximum_flow(flowG, _s, _t, R = flow_func(flowG, _s, _t, capacity=capacity, value_only=False, **kwargs) flow_dict = build_flow_dict(flowG, R) - return (R.graph['flow_value'], flow_dict) + return (R.graph["flow_value"], flow_dict) -def maximum_flow_value(flowG, _s, _t, - capacity='capacity', flow_func=None, **kwargs): +def maximum_flow_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Find the value of maximum single-commodity flow. Parameters @@ -297,8 +295,10 @@ def maximum_flow_value(flowG, _s, _t, """ if flow_func is None: if kwargs: - raise nx.NetworkXError("You have to explicitly set a flow_func if" - " you need to pass parameters via kwargs.") + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) flow_func = default_flow_func if not callable(flow_func): @@ -306,11 +306,10 @@ def maximum_flow_value(flowG, _s, _t, R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs) - return R.graph['flow_value'] + return R.graph["flow_value"] -def minimum_cut(flowG, _s, _t, - capacity='capacity', flow_func=None, **kwargs): +def minimum_cut(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Compute the value and the node partition of a minimum (s, t)-cut. Use the max-flow min-cut theorem, i.e., the capacity of a minimum @@ -446,20 +445,21 @@ def minimum_cut(flowG, _s, _t, """ if flow_func is None: if kwargs: - raise nx.NetworkXError("You have to explicitly set a flow_func if" - " you need to pass parameters via kwargs.") + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) flow_func = default_flow_func if not callable(flow_func): raise nx.NetworkXError("flow_func has to be callable.") - if kwargs.get('cutoff') is not None and flow_func in flow_funcs: + if kwargs.get("cutoff") is not None and flow_func in flow_funcs: raise nx.NetworkXError("cutoff should not be specified.") R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs) # Remove saturated edges from the residual network - cutset = [(u, v, d) for u, v, d in R.edges(data=True) - if d['flow'] == d['capacity']] + cutset = [(u, v, d) for u, v, d in R.edges(data=True) if d["flow"] == d["capacity"]] R.remove_edges_from(cutset) # Then, reachable and non reachable nodes from source in the @@ -471,11 +471,10 @@ def minimum_cut(flowG, _s, _t, # sure that it is reusable. if cutset is not None: R.add_edges_from(cutset) - return (R.graph['flow_value'], partition) + return (R.graph["flow_value"], partition) -def minimum_cut_value(flowG, _s, _t, - capacity='capacity', flow_func=None, **kwargs): +def minimum_cut_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): """Compute the value of a minimum (s, t)-cut. Use the max-flow min-cut theorem, i.e., the capacity of a minimum @@ -597,16 +596,18 @@ def minimum_cut_value(flowG, _s, _t, """ if flow_func is None: if kwargs: - raise nx.NetworkXError("You have to explicitly set a flow_func if" - " you need to pass parameters via kwargs.") + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) flow_func = default_flow_func if not callable(flow_func): raise nx.NetworkXError("flow_func has to be callable.") - if kwargs.get('cutoff') is not None and flow_func in flow_funcs: + if kwargs.get("cutoff") is not None and flow_func in flow_funcs: raise nx.NetworkXError("cutoff should not be specified.") R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs) - return R.graph['flow_value'] + return R.graph["flow_value"] diff --git a/networkx/algorithms/flow/mincost.py b/networkx/algorithms/flow/mincost.py index 1f9e415e..1260d9b7 100644 --- a/networkx/algorithms/flow/mincost.py +++ b/networkx/algorithms/flow/mincost.py @@ -2,16 +2,12 @@ Minimum cost flow algorithms on directed connected graphs. """ -__all__ = ['min_cost_flow_cost', - 'min_cost_flow', - 'cost_of_flow', - 'max_flow_min_cost'] +__all__ = ["min_cost_flow_cost", "min_cost_flow", "cost_of_flow", "max_flow_min_cost"] import networkx as nx -def min_cost_flow_cost(G, demand='demand', capacity='capacity', - weight='weight'): +def min_cost_flow_cost(G, demand="demand", capacity="capacity", weight="weight"): r"""Find the cost of a minimum cost flow satisfying all demands in digraph G. G is a digraph with edge costs and capacities and in which nodes @@ -98,12 +94,10 @@ def min_cost_flow_cost(G, demand='demand', capacity='capacity', >>> flowCost 24 """ - return nx.network_simplex(G, demand=demand, capacity=capacity, - weight=weight)[0] + return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[0] -def min_cost_flow(G, demand='demand', capacity='capacity', - weight='weight'): +def min_cost_flow(G, demand="demand", capacity="capacity", weight="weight"): r"""Returns a minimum cost flow satisfying all demands in digraph G. G is a digraph with edge costs and capacities and in which nodes @@ -189,11 +183,10 @@ def min_cost_flow(G, demand='demand', capacity='capacity', >>> G.add_edge('c', 'd', weight = 2, capacity = 5) >>> flowDict = nx.min_cost_flow(G) """ - return nx.network_simplex(G, demand=demand, capacity=capacity, - weight=weight)[1] + return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[1] -def cost_of_flow(G, flowDict, weight='weight'): +def cost_of_flow(G, flowDict, weight="weight"): """Compute the cost of the flow given by flowDict on graph G. Note that this function does not check for the validity of the @@ -234,11 +227,10 @@ def cost_of_flow(G, flowDict, weight='weight'): multiplying the relevant edge attributes by a convenient constant factor (eg 100). """ - return sum((flowDict[u][v] * d.get(weight, 0) - for u, v, d in G.edges(data=True))) + return sum((flowDict[u][v] * d.get(weight, 0) for u, v, d in G.edges(data=True))) -def max_flow_min_cost(G, s, t, capacity='capacity', weight='weight'): +def max_flow_min_cost(G, s, t, capacity="capacity", weight="weight"): """Returns a maximum (s, t)-flow of minimum cost. G is a digraph with edge costs and capacities. There is a source diff --git a/networkx/algorithms/flow/networksimplex.py b/networkx/algorithms/flow/networksimplex.py index 4242690b..d8285063 100644 --- a/networkx/algorithms/flow/networksimplex.py +++ b/networkx/algorithms/flow/networksimplex.py @@ -2,7 +2,7 @@ Minimum cost flow algorithms on directed connected graphs. """ -__all__ = ['network_simplex'] +__all__ = ["network_simplex"] from itertools import chain, islice, repeat from math import ceil, sqrt @@ -10,8 +10,8 @@ import networkx as nx from networkx.utils import not_implemented_for -@not_implemented_for('undirected') -def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): +@not_implemented_for("undirected") +def network_simplex(G, demand="demand", capacity="capacity", weight="weight"): r"""Find a minimum cost flow satisfying all demands in digraph G. This is a primal network simplex algorithm that uses the leaving @@ -170,19 +170,19 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): ########################################################################### if len(G) == 0: - raise nx.NetworkXError('graph has no nodes') + raise nx.NetworkXError("graph has no nodes") # Number all nodes and edges and hereafter reference them using ONLY their # numbers - N = list(G) # nodes - I = {u: i for i, u in enumerate(N)} # node indices + N = list(G) # nodes + I = {u: i for i, u in enumerate(N)} # node indices D = [G.nodes[u].get(demand, 0) for u in N] # node demands - inf = float('inf') + inf = float("inf") for p, b in zip(N, D): if abs(b) == inf: - raise nx.NetworkXError(f'node {p!r} has infinite demand') + raise nx.NetworkXError(f"node {p!r} has infinite demand") multigraph = G.is_multigraph() S = [] # edge sources @@ -197,8 +197,7 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): edges = G.edges(data=True) else: edges = G.edges(data=True, keys=True) - edges = (e for e in edges - if e[0] != e[1] and e[-1].get(capacity, inf) != 0) + edges = (e for e in edges if e[0] != e[1] and e[-1].get(capacity, inf) != 0) for i, e in enumerate(edges): S.append(I[e[0]]) T.append(I[e[1]]) @@ -210,31 +209,31 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): for e, c in zip(E, C): if abs(c) == inf: - raise nx.NetworkXError(f'edge {e!r} has infinite weight') + raise nx.NetworkXError(f"edge {e!r} has infinite weight") if not multigraph: edges = nx.selfloop_edges(G, data=True) else: edges = nx.selfloop_edges(G, data=True, keys=True) for e in edges: if abs(e[-1].get(weight, 0)) == inf: - raise nx.NetworkXError(f'edge {e[:-1]!r} has infinite weight') + raise nx.NetworkXError(f"edge {e[:-1]!r} has infinite weight") ########################################################################### # Quick infeasibility detection ########################################################################### if sum(D) != 0: - raise nx.NetworkXUnfeasible('total node demand is not zero') + raise nx.NetworkXUnfeasible("total node demand is not zero") for e, u in zip(E, U): if u < 0: - raise nx.NetworkXUnfeasible(f'edge {e!r} has negative capacity') + raise nx.NetworkXUnfeasible(f"edge {e!r} has negative capacity") if not multigraph: edges = nx.selfloop_edges(G, data=True) else: edges = nx.selfloop_edges(G, data=True, keys=True) for e in edges: if e[-1].get(capacity, inf) < 0: - raise nx.NetworkXUnfeasible(f'edge {e[:-1]!r} has negative capacity') + raise nx.NetworkXUnfeasible(f"edge {e[:-1]!r} has negative capacity") ########################################################################### # Initialization @@ -256,22 +255,29 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): else: S.append(p) T.append(-1) - faux_inf = 3 * max(chain([sum(u for u in U if u < inf), - sum(abs(c) for c in C)], - (abs(d) for d in D))) or 1 + faux_inf = ( + 3 + * max( + chain( + [sum(u for u in U if u < inf), sum(abs(c) for c in C)], + (abs(d) for d in D), + ) + ) + or 1 + ) C.extend(repeat(faux_inf, n)) U.extend(repeat(faux_inf, n)) # Construct the initial spanning tree. - e = len(E) # number of edges - x = list(chain(repeat(0, e), (abs(d) for d in D))) # edge flows + e = len(E) # number of edges + x = list(chain(repeat(0, e), (abs(d) for d in D))) # edge flows pi = [faux_inf if d <= 0 else -faux_inf for d in D] # node potentials parent = list(chain(repeat(-1, n), [None])) # parent nodes - edge = list(range(e, e + n)) # edges to parents - size = list(chain(repeat(1, n), [n + 1])) # subtree sizes - next = list(chain(range(1, n), [-1, 0])) # next nodes in depth-first thread - prev = list(range(-1, n)) # previous nodes in depth-first thread - last = list(chain(range(n), [n - 1])) # last descendants in depth-first thread + edge = list(range(e, e + n)) # edges to parents + size = list(chain(repeat(1, n), [n + 1])) # subtree sizes + next = list(chain(range(1, n), [-1, 0])) # next nodes in depth-first thread + prev = list(range(-1, n)) # previous nodes in depth-first thread + last = list(chain(range(n), [n - 1])) # last descendants in depth-first thread ########################################################################### # Pivot loop @@ -294,10 +300,10 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): # each block, Dantzig's rule is applied to find an entering edge. The # blocks to search is determined following Bland's rule. B = int(ceil(sqrt(e))) # pivot block size - M = (e + B - 1) // B # number of blocks needed to cover all edges - m = 0 # number of consecutive blocks without eligible + M = (e + B - 1) // B # number of blocks needed to cover all edges + m = 0 # number of consecutive blocks without eligible # entering edges - f = 0 # first edge in block + f = 0 # first edge in block while m < M: # Determine the next block of edges. l = f + B @@ -387,8 +393,9 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): def find_leaving_edge(Wn, We): """Returns the leaving edge in a cycle represented by Wn and We. """ - j, s = min(zip(reversed(We), reversed(Wn)), - key=lambda i_p: residual_capacity(*i_p)) + j, s = min( + zip(reversed(We), reversed(Wn)), key=lambda i_p: residual_capacity(*i_p) + ) t = T[j] if S[j] == s else S[j] return j, s, t @@ -528,13 +535,13 @@ def network_simplex(G, demand='demand', capacity='capacity', weight='weight'): ########################################################################### if any(x[i] != 0 for i in range(-n, 0)): - raise nx.NetworkXUnfeasible('no flow satisfies all node demands') + raise nx.NetworkXUnfeasible("no flow satisfies all node demands") - if (any(x[i] * 2 >= faux_inf for i in range(e)) or - any(e[-1].get(capacity, inf) == inf and e[-1].get(weight, 0) < 0 - for e in nx.selfloop_edges(G, data=True))): - raise nx.NetworkXUnbounded( - 'negative cycle with infinite capacity found') + if any(x[i] * 2 >= faux_inf for i in range(e)) or any( + e[-1].get(capacity, inf) == inf and e[-1].get(weight, 0) < 0 + for e in nx.selfloop_edges(G, data=True) + ): + raise nx.NetworkXUnbounded("negative cycle with infinite capacity found") ########################################################################### # Flow cost calculation and flow dict construction diff --git a/networkx/algorithms/flow/preflowpush.py b/networkx/algorithms/flow/preflowpush.py index 910d2ad6..8ec844d9 100644 --- a/networkx/algorithms/flow/preflowpush.py +++ b/networkx/algorithms/flow/preflowpush.py @@ -12,11 +12,10 @@ from .utils import detect_unboundedness from .utils import GlobalRelabelThreshold from .utils import Level -__all__ = ['preflow_push'] +__all__ = ["preflow_push"] -def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, - value_only): +def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only): """Implementation of the highest-label preflow-push algorithm. """ if s not in G: @@ -24,12 +23,12 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, if t not in G: raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if global_relabel_freq is None: global_relabel_freq = 0 if global_relabel_freq < 0: - raise nx.NetworkXError('global_relabel_freq must be nonnegative.') + raise nx.NetworkXError("global_relabel_freq must be nonnegative.") if residual is None: R = build_residual_network(G, capacity) @@ -44,9 +43,9 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, # Initialize/reset the residual network. for u in R: - R_nodes[u]['excess'] = 0 + R_nodes[u]["excess"] = 0 for e in R_succ[u].values(): - e['flow'] = 0 + e["flow"] = 0 def reverse_bfs(src): """Perform a reverse breadth-first search from src in the residual @@ -58,7 +57,7 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, u, height = q.popleft() height += 1 for v, attr in R_pred[u].items(): - if v not in heights and attr['flow'] < attr['capacity']: + if v not in heights and attr["flow"] < attr["capacity"]: heights[v] = height q.append((v, height)) return heights @@ -69,7 +68,7 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, if s not in heights: # t is not reachable from s in the residual network. The maximum flow # must be zero. - R.graph['flow_value'] = 0 + R.graph["flow_value"] = 0 return R n = len(R) @@ -82,21 +81,21 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, # Initialize heights and 'current edge' data structures of the nodes. for u in R: - R_nodes[u]['height'] = heights[u] if u in heights else n + 1 - R_nodes[u]['curr_edge'] = CurrentEdge(R_succ[u]) + R_nodes[u]["height"] = heights[u] if u in heights else n + 1 + R_nodes[u]["curr_edge"] = CurrentEdge(R_succ[u]) def push(u, v, flow): """Push flow units of flow from u to v. """ - R_succ[u][v]['flow'] += flow - R_succ[v][u]['flow'] -= flow - R_nodes[u]['excess'] -= flow - R_nodes[v]['excess'] += flow + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow + R_nodes[u]["excess"] -= flow + R_nodes[v]["excess"] += flow # The maximum flow must be nonzero now. Initialize the preflow by # saturating all edges emanating from s. for u, attr in R_succ[s].items(): - flow = attr['capacity'] + flow = attr["capacity"] if flow > 0: push(s, u, flow) @@ -104,8 +103,8 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, levels = [Level() for i in range(2 * n)] for u in R: if u != s and u != t: - level = levels[R_nodes[u]['height']] - if R_nodes[u]['excess'] > 0: + level = levels[R_nodes[u]["height"]] + if R_nodes[u]["excess"] > 0: level.active.add(u) else: level.inactive.add(u) @@ -114,7 +113,7 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, """Move a node from the inactive set to the active set of its level. """ if v != s and v != t: - level = levels[R_nodes[v]['height']] + level = levels[R_nodes[v]["height"]] if v in level.inactive: level.inactive.remove(v) level.active.add(v) @@ -123,29 +122,33 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, """Relabel a node to create an admissible edge. """ grt.add_work(len(R_succ[u])) - return min(R_nodes[v]['height'] for v, attr in R_succ[u].items() - if attr['flow'] < attr['capacity']) + 1 + return ( + min( + R_nodes[v]["height"] + for v, attr in R_succ[u].items() + if attr["flow"] < attr["capacity"] + ) + + 1 + ) def discharge(u, is_phase1): """Discharge a node until it becomes inactive or, during phase 1 (see below), its height reaches at least n. The node is known to have the largest height among active nodes. """ - height = R_nodes[u]['height'] - curr_edge = R_nodes[u]['curr_edge'] + height = R_nodes[u]["height"] + curr_edge = R_nodes[u]["curr_edge"] # next_height represents the next height to examine after discharging # the current node. During phase 1, it is capped to below n. next_height = height levels[height].active.remove(u) while True: v, attr = curr_edge.get() - if (height == R_nodes[v]['height'] + 1 and - attr['flow'] < attr['capacity']): - flow = min(R_nodes[u]['excess'], - attr['capacity'] - attr['flow']) + if height == R_nodes[v]["height"] + 1 and attr["flow"] < attr["capacity"]: + flow = min(R_nodes[u]["excess"], attr["capacity"] - attr["flow"]) push(u, v, flow) activate(v) - if R_nodes[u]['excess'] == 0: + if R_nodes[u]["excess"] == 0: # The node has become inactive. levels[height].inactive.add(u) break @@ -166,7 +169,7 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, # structure is not rewound. Use height instead of (height - 1) # in case other active nodes at the same level are missed. next_height = height - R_nodes[u]['height'] = height + R_nodes[u]["height"] = height return next_height def gap_heuristic(height): @@ -175,9 +178,9 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, # Move all nodes at levels (height + 1) to max_height to level n + 1. for level in islice(levels, height + 1, max_height + 1): for u in level.active: - R_nodes[u]['height'] = n + 1 + R_nodes[u]["height"] = n + 1 for u in level.inactive: - R_nodes[u]['height'] = n + 1 + R_nodes[u]["height"] = n + 1 levels[n + 1].active.update(level.active) level.active.clear() levels[n + 1].inactive.update(level.inactive) @@ -196,7 +199,7 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, # Also mark nodes from which t is unreachable for relabeling. This # serves the same purpose as the gap heuristic. for u in R: - if u not in heights and R_nodes[u]['height'] < n: + if u not in heights and R_nodes[u]["height"] < n: heights[u] = n + 1 else: # Shift the computed heights because the height of s is n. @@ -205,7 +208,7 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, max_height += n del heights[src] for u, new_height in heights.items(): - old_height = R_nodes[u]['height'] + old_height = R_nodes[u]["height"] if new_height != old_height: if u in levels[old_height].active: levels[old_height].active.remove(u) @@ -213,7 +216,7 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, else: levels[old_height].inactive.remove(u) levels[new_height].inactive.add(u) - R_nodes[u]['height'] = new_height + R_nodes[u]["height"] = new_height return max_height # Phase 1: Find the maximum preflow by pushing as much flow as possible to @@ -256,7 +259,7 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, # A maximum preflow has been found. The excess at t is the maximum flow # value. if value_only: - R.graph['flow_value'] = R_nodes[t]['excess'] + R.graph["flow_value"] = R_nodes[t]["excess"] return R # Phase 2: Convert the maximum preflow into a maximum flow by returning the @@ -283,12 +286,13 @@ def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, height = global_relabel(False) grt.clear_work() - R.graph['flow_value'] = R_nodes[t]['excess'] + R.graph["flow_value"] = R_nodes[t]["excess"] return R -def preflow_push(G, s, t, capacity='capacity', residual=None, - global_relabel_freq=1, value_only=False): +def preflow_push( + G, s, t, capacity="capacity", residual=None, global_relabel_freq=1, value_only=False +): r"""Find a maximum single-commodity flow using the highest-label preflow-push algorithm. @@ -417,7 +421,6 @@ def preflow_push(G, s, t, capacity='capacity', residual=None, True """ - R = preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, - value_only) - R.graph['algorithm'] = 'preflow_push' + R = preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only) + R.graph["algorithm"] = "preflow_push" return R diff --git a/networkx/algorithms/flow/shortestaugmentingpath.py b/networkx/algorithms/flow/shortestaugmentingpath.py index fd7e0ee9..6af134f1 100644 --- a/networkx/algorithms/flow/shortestaugmentingpath.py +++ b/networkx/algorithms/flow/shortestaugmentingpath.py @@ -7,11 +7,10 @@ import networkx as nx from .utils import build_residual_network, CurrentEdge from .edmondskarp import edmonds_karp_core -__all__ = ['shortest_augmenting_path'] +__all__ = ["shortest_augmenting_path"] -def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, - cutoff): +def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff): """Implementation of the shortest augmenting path algorithm. """ if s not in G: @@ -19,7 +18,7 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, if t not in G: raise nx.NetworkXError(f"node {str(t)} not in graph") if s == t: - raise nx.NetworkXError('source and sink are the same node') + raise nx.NetworkXError("source and sink are the same node") if residual is None: R = build_residual_network(G, capacity) @@ -33,7 +32,7 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, # Initialize/reset the residual network. for u in R: for e in R_succ[u].values(): - e['flow'] = 0 + e["flow"] = 0 # Initialize heights of the nodes. heights = {t: 0} @@ -42,14 +41,14 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, u, height = q.popleft() height += 1 for v, attr in R_pred[u].items(): - if v not in heights and attr['flow'] < attr['capacity']: + if v not in heights and attr["flow"] < attr["capacity"]: heights[v] = height q.append((v, height)) if s not in heights: # t is not reachable from s in the residual network. The maximum flow # must be zero. - R.graph['flow_value'] = 0 + R.graph["flow_value"] = 0 return R n = len(G) @@ -57,15 +56,15 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, # Initialize heights and 'current edge' data structures of the nodes. for u in R: - R_nodes[u]['height'] = heights[u] if u in heights else n - R_nodes[u]['curr_edge'] = CurrentEdge(R_succ[u]) + R_nodes[u]["height"] = heights[u] if u in heights else n + R_nodes[u]["curr_edge"] = CurrentEdge(R_succ[u]) # Initialize counts of nodes in each level. counts = [0] * (2 * n - 1) for u in R: - counts[R_nodes[u]['height']] += 1 + counts[R_nodes[u]["height"]] += 1 - inf = R.graph['inf'] + inf = R.graph["inf"] def augment(path): """Augment flow along a path from s to t. @@ -76,17 +75,16 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, u = next(it) for v in it: attr = R_succ[u][v] - flow = min(flow, attr['capacity'] - attr['flow']) + flow = min(flow, attr["capacity"] - attr["flow"]) u = v if flow * 2 > inf: - raise nx.NetworkXUnbounded( - 'Infinite capacity path, flow unbounded above.') + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") # Augment flow along the path. it = iter(path) u = next(it) for v in it: - R_succ[u][v]['flow'] += flow - R_succ[v][u]['flow'] -= flow + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow u = v return flow @@ -95,28 +93,27 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, """ height = n - 1 for v, attr in R_succ[u].items(): - if attr['flow'] < attr['capacity']: - height = min(height, R_nodes[v]['height']) + if attr["flow"] < attr["capacity"]: + height = min(height, R_nodes[v]["height"]) return height + 1 if cutoff is None: - cutoff = float('inf') + cutoff = float("inf") # Phase 1: Look for shortest augmenting paths using depth-first search. flow_value = 0 path = [s] u = s - d = n if not two_phase else int(min(m ** 0.5, 2 * n ** (2. / 3))) - done = R_nodes[s]['height'] >= d + d = n if not two_phase else int(min(m ** 0.5, 2 * n ** (2.0 / 3))) + done = R_nodes[s]["height"] >= d while not done: - height = R_nodes[u]['height'] - curr_edge = R_nodes[u]['curr_edge'] + height = R_nodes[u]["height"] + curr_edge = R_nodes[u]["curr_edge"] # Depth-first search for the next node on the path to t. while True: v, attr = curr_edge.get() - if (height == R_nodes[v]['height'] + 1 and - attr['flow'] < attr['capacity']): + if height == R_nodes[v]["height"] + 1 and attr["flow"] < attr["capacity"]: # Advance to the next node following an admissible edge. path.append(v) u = v @@ -129,21 +126,21 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, # Gap heuristic: If relabeling causes a level to become # empty, a minimum cut has been identified. The algorithm # can now be terminated. - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R height = relabel(u) if u == s and height >= d: if not two_phase: # t is disconnected from s in the residual network. No # more augmenting paths exist. - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R else: # t is at least d steps away from s. End of phase 1. done = True break counts[height] += 1 - R_nodes[u]['height'] = height + R_nodes[u]["height"] = height if u != s: # After relabeling, the last edge on the path is no longer # admissible. Retreat one step to look for an alternative. @@ -155,7 +152,7 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, # depth-first search. flow_value += augment(path) if flow_value >= cutoff: - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R path = [s] u = s @@ -163,12 +160,20 @@ def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, # Phase 2: Look for shortest augmenting paths using breadth-first search. flow_value += edmonds_karp_core(R, s, t, cutoff - flow_value) - R.graph['flow_value'] = flow_value + R.graph["flow_value"] = flow_value return R -def shortest_augmenting_path(G, s, t, capacity='capacity', residual=None, - value_only=False, two_phase=False, cutoff=None): +def shortest_augmenting_path( + G, + s, + t, + capacity="capacity", + residual=None, + value_only=False, + two_phase=False, + cutoff=None, +): r"""Find a maximum single-commodity flow using the shortest augmenting path algorithm. @@ -290,7 +295,6 @@ def shortest_augmenting_path(G, s, t, capacity='capacity', residual=None, True """ - R = shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, - cutoff) - R.graph['algorithm'] = 'shortest_augmenting_path' + R = shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff) + R.graph["algorithm"] = "shortest_augmenting_path" return R diff --git a/networkx/algorithms/flow/tests/test_maxflow.py b/networkx/algorithms/flow/tests/test_maxflow.py index 01a4d4dc..17b82b4d 100644 --- a/networkx/algorithms/flow/tests/test_maxflow.py +++ b/networkx/algorithms/flow/tests/test_maxflow.py @@ -10,7 +10,13 @@ from networkx.algorithms.flow import preflow_push from networkx.algorithms.flow import shortest_augmenting_path from networkx.algorithms.flow import dinitz -flow_funcs = [boykov_kolmogorov, dinitz, edmonds_karp, preflow_push, shortest_augmenting_path] +flow_funcs = [ + boykov_kolmogorov, + dinitz, + edmonds_karp, + preflow_push, + shortest_augmenting_path, +] max_min_funcs = [nx.maximum_flow, nx.minimum_cut] flow_value_funcs = [nx.maximum_flow_value, nx.minimum_cut_value] interface_funcs = sum([max_min_funcs, flow_value_funcs], []) @@ -62,30 +68,29 @@ def validate_cuts(G, s, t, solnValue, partition, capacity, flow_func): assert not nx.is_strongly_connected(H), errmsg -def compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity='capacity'): +def compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity="capacity"): for flow_func in flow_funcs: errmsg = f"Assertion failed in function: {flow_func.__name__}" R = flow_func(G, s, t, capacity) # Test both legacy and new implementations. - flow_value = R.graph['flow_value'] + flow_value = R.graph["flow_value"] flow_dict = build_flow_dict(G, R) assert flow_value == solnValue, errmsg validate_flows(G, s, t, flow_dict, solnValue, capacity, flow_func) # Minimum cut - cut_value, partition = nx.minimum_cut(G, s, t, capacity=capacity, - flow_func=flow_func) + cut_value, partition = nx.minimum_cut( + G, s, t, capacity=capacity, flow_func=flow_func + ) validate_cuts(G, s, t, solnValue, partition, capacity, flow_func) class TestMaxflowMinCutCommon: - def test_graph1(self): # Trivial undirected graph G = nx.Graph() G.add_edge(1, 2, capacity=1.0) - solnFlows = {1: {2: 1.0}, - 2: {1: 1.0}} + solnFlows = {1: {2: 1.0}, 2: {1: 1.0}} compare_flows_and_cuts(G, 1, 2, solnFlows, 1.0) @@ -93,239 +98,260 @@ class TestMaxflowMinCutCommon: # A more complex undirected graph # adapted from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow G = nx.Graph() - G.add_edge('x', 'a', capacity=3.0) - G.add_edge('x', 'b', capacity=1.0) - G.add_edge('a', 'c', capacity=3.0) - G.add_edge('b', 'c', capacity=5.0) - G.add_edge('b', 'd', capacity=4.0) - G.add_edge('d', 'e', capacity=2.0) - G.add_edge('c', 'y', capacity=2.0) - G.add_edge('e', 'y', capacity=3.0) - - H = {'x': {'a': 3, 'b': 1}, - 'a': {'c': 3, 'x': 3}, - 'b': {'c': 1, 'd': 2, 'x': 1}, - 'c': {'a': 3, 'b': 1, 'y': 2}, - 'd': {'b': 2, 'e': 2}, - 'e': {'d': 2, 'y': 2}, - 'y': {'c': 2, 'e': 2}} - - compare_flows_and_cuts(G, 'x', 'y', H, 4.0) + G.add_edge("x", "a", capacity=3.0) + G.add_edge("x", "b", capacity=1.0) + G.add_edge("a", "c", capacity=3.0) + G.add_edge("b", "c", capacity=5.0) + G.add_edge("b", "d", capacity=4.0) + G.add_edge("d", "e", capacity=2.0) + G.add_edge("c", "y", capacity=2.0) + G.add_edge("e", "y", capacity=3.0) + + H = { + "x": {"a": 3, "b": 1}, + "a": {"c": 3, "x": 3}, + "b": {"c": 1, "d": 2, "x": 1}, + "c": {"a": 3, "b": 1, "y": 2}, + "d": {"b": 2, "e": 2}, + "e": {"d": 2, "y": 2}, + "y": {"c": 2, "e": 2}, + } + + compare_flows_and_cuts(G, "x", "y", H, 4.0) def test_digraph1(self): # The classic directed graph example G = nx.DiGraph() - G.add_edge('a', 'b', capacity=1000.0) - G.add_edge('a', 'c', capacity=1000.0) - G.add_edge('b', 'c', capacity=1.0) - G.add_edge('b', 'd', capacity=1000.0) - G.add_edge('c', 'd', capacity=1000.0) + G.add_edge("a", "b", capacity=1000.0) + G.add_edge("a", "c", capacity=1000.0) + G.add_edge("b", "c", capacity=1.0) + G.add_edge("b", "d", capacity=1000.0) + G.add_edge("c", "d", capacity=1000.0) - H = {'a': {'b': 1000.0, 'c': 1000.0}, - 'b': {'c': 0, 'd': 1000.0}, - 'c': {'d': 1000.0}, - 'd': {}} + H = { + "a": {"b": 1000.0, "c": 1000.0}, + "b": {"c": 0, "d": 1000.0}, + "c": {"d": 1000.0}, + "d": {}, + } - compare_flows_and_cuts(G, 'a', 'd', H, 2000.0) + compare_flows_and_cuts(G, "a", "d", H, 2000.0) def test_digraph2(self): # An example in which some edges end up with zero flow. G = nx.DiGraph() - G.add_edge('s', 'b', capacity=2) - G.add_edge('s', 'c', capacity=1) - G.add_edge('c', 'd', capacity=1) - G.add_edge('d', 'a', capacity=1) - G.add_edge('b', 'a', capacity=2) - G.add_edge('a', 't', capacity=2) - - H = {'s': {'b': 2, 'c': 0}, - 'c': {'d': 0}, - 'd': {'a': 0}, - 'b': {'a': 2}, - 'a': {'t': 2}, - 't': {}} - - compare_flows_and_cuts(G, 's', 't', H, 2) + G.add_edge("s", "b", capacity=2) + G.add_edge("s", "c", capacity=1) + G.add_edge("c", "d", capacity=1) + G.add_edge("d", "a", capacity=1) + G.add_edge("b", "a", capacity=2) + G.add_edge("a", "t", capacity=2) + + H = { + "s": {"b": 2, "c": 0}, + "c": {"d": 0}, + "d": {"a": 0}, + "b": {"a": 2}, + "a": {"t": 2}, + "t": {}, + } + + compare_flows_and_cuts(G, "s", "t", H, 2) def test_digraph3(self): # A directed graph example from Cormen et al. G = nx.DiGraph() - G.add_edge('s', 'v1', capacity=16.0) - G.add_edge('s', 'v2', capacity=13.0) - G.add_edge('v1', 'v2', capacity=10.0) - G.add_edge('v2', 'v1', capacity=4.0) - G.add_edge('v1', 'v3', capacity=12.0) - G.add_edge('v3', 'v2', capacity=9.0) - G.add_edge('v2', 'v4', capacity=14.0) - G.add_edge('v4', 'v3', capacity=7.0) - G.add_edge('v3', 't', capacity=20.0) - G.add_edge('v4', 't', capacity=4.0) - - H = {'s': {'v1': 12.0, 'v2': 11.0}, - 'v2': {'v1': 0, 'v4': 11.0}, - 'v1': {'v2': 0, 'v3': 12.0}, - 'v3': {'v2': 0, 't': 19.0}, - 'v4': {'v3': 7.0, 't': 4.0}, - 't': {}} - - compare_flows_and_cuts(G, 's', 't', H, 23.0) + G.add_edge("s", "v1", capacity=16.0) + G.add_edge("s", "v2", capacity=13.0) + G.add_edge("v1", "v2", capacity=10.0) + G.add_edge("v2", "v1", capacity=4.0) + G.add_edge("v1", "v3", capacity=12.0) + G.add_edge("v3", "v2", capacity=9.0) + G.add_edge("v2", "v4", capacity=14.0) + G.add_edge("v4", "v3", capacity=7.0) + G.add_edge("v3", "t", capacity=20.0) + G.add_edge("v4", "t", capacity=4.0) + + H = { + "s": {"v1": 12.0, "v2": 11.0}, + "v2": {"v1": 0, "v4": 11.0}, + "v1": {"v2": 0, "v3": 12.0}, + "v3": {"v2": 0, "t": 19.0}, + "v4": {"v3": 7.0, "t": 4.0}, + "t": {}, + } + + compare_flows_and_cuts(G, "s", "t", H, 23.0) def test_digraph4(self): # A more complex directed graph # from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow G = nx.DiGraph() - G.add_edge('x', 'a', capacity=3.0) - G.add_edge('x', 'b', capacity=1.0) - G.add_edge('a', 'c', capacity=3.0) - G.add_edge('b', 'c', capacity=5.0) - G.add_edge('b', 'd', capacity=4.0) - G.add_edge('d', 'e', capacity=2.0) - G.add_edge('c', 'y', capacity=2.0) - G.add_edge('e', 'y', capacity=3.0) - - H = {'x': {'a': 2.0, 'b': 1.0}, - 'a': {'c': 2.0}, - 'b': {'c': 0, 'd': 1.0}, - 'c': {'y': 2.0}, - 'd': {'e': 1.0}, - 'e': {'y': 1.0}, - 'y': {}} - - compare_flows_and_cuts(G, 'x', 'y', H, 3.0) + G.add_edge("x", "a", capacity=3.0) + G.add_edge("x", "b", capacity=1.0) + G.add_edge("a", "c", capacity=3.0) + G.add_edge("b", "c", capacity=5.0) + G.add_edge("b", "d", capacity=4.0) + G.add_edge("d", "e", capacity=2.0) + G.add_edge("c", "y", capacity=2.0) + G.add_edge("e", "y", capacity=3.0) + + H = { + "x": {"a": 2.0, "b": 1.0}, + "a": {"c": 2.0}, + "b": {"c": 0, "d": 1.0}, + "c": {"y": 2.0}, + "d": {"e": 1.0}, + "e": {"y": 1.0}, + "y": {}, + } + + compare_flows_and_cuts(G, "x", "y", H, 3.0) def test_wikipedia_dinitz_example(self): # Nice example from https://en.wikipedia.org/wiki/Dinic's_algorithm G = nx.DiGraph() - G.add_edge('s', 1, capacity=10) - G.add_edge('s', 2, capacity=10) + G.add_edge("s", 1, capacity=10) + G.add_edge("s", 2, capacity=10) G.add_edge(1, 3, capacity=4) G.add_edge(1, 4, capacity=8) G.add_edge(1, 2, capacity=2) G.add_edge(2, 4, capacity=9) - G.add_edge(3, 't', capacity=10) + G.add_edge(3, "t", capacity=10) G.add_edge(4, 3, capacity=6) - G.add_edge(4, 't', capacity=10) + G.add_edge(4, "t", capacity=10) - solnFlows = {1: {2: 0, 3: 4, 4: 6}, - 2: {4: 9}, - 3: {'t': 9}, - 4: {3: 5, 't': 10}, - 's': {1: 10, 2: 9}, - 't': {}} + solnFlows = { + 1: {2: 0, 3: 4, 4: 6}, + 2: {4: 9}, + 3: {"t": 9}, + 4: {3: 5, "t": 10}, + "s": {1: 10, 2: 9}, + "t": {}, + } - compare_flows_and_cuts(G, 's', 't', solnFlows, 19) + compare_flows_and_cuts(G, "s", "t", solnFlows, 19) def test_optional_capacity(self): # Test optional capacity parameter. G = nx.DiGraph() - G.add_edge('x', 'a', spam=3.0) - G.add_edge('x', 'b', spam=1.0) - G.add_edge('a', 'c', spam=3.0) - G.add_edge('b', 'c', spam=5.0) - G.add_edge('b', 'd', spam=4.0) - G.add_edge('d', 'e', spam=2.0) - G.add_edge('c', 'y', spam=2.0) - G.add_edge('e', 'y', spam=3.0) - - solnFlows = {'x': {'a': 2.0, 'b': 1.0}, - 'a': {'c': 2.0}, - 'b': {'c': 0, 'd': 1.0}, - 'c': {'y': 2.0}, - 'd': {'e': 1.0}, - 'e': {'y': 1.0}, - 'y': {}} + G.add_edge("x", "a", spam=3.0) + G.add_edge("x", "b", spam=1.0) + G.add_edge("a", "c", spam=3.0) + G.add_edge("b", "c", spam=5.0) + G.add_edge("b", "d", spam=4.0) + G.add_edge("d", "e", spam=2.0) + G.add_edge("c", "y", spam=2.0) + G.add_edge("e", "y", spam=3.0) + + solnFlows = { + "x": {"a": 2.0, "b": 1.0}, + "a": {"c": 2.0}, + "b": {"c": 0, "d": 1.0}, + "c": {"y": 2.0}, + "d": {"e": 1.0}, + "e": {"y": 1.0}, + "y": {}, + } solnValue = 3.0 - s = 'x' - t = 'y' + s = "x" + t = "y" - compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity='spam') + compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity="spam") def test_digraph_infcap_edges(self): # DiGraph with infinite capacity edges G = nx.DiGraph() - G.add_edge('s', 'a') - G.add_edge('s', 'b', capacity=30) - G.add_edge('a', 'c', capacity=25) - G.add_edge('b', 'c', capacity=12) - G.add_edge('a', 't', capacity=60) - G.add_edge('c', 't') - - H = {'s': {'a': 85, 'b': 12}, - 'a': {'c': 25, 't': 60}, - 'b': {'c': 12}, - 'c': {'t': 37}, - 't': {}} - - compare_flows_and_cuts(G, 's', 't', H, 97) + G.add_edge("s", "a") + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c", capacity=25) + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t") + + H = { + "s": {"a": 85, "b": 12}, + "a": {"c": 25, "t": 60}, + "b": {"c": 12}, + "c": {"t": 37}, + "t": {}, + } + + compare_flows_and_cuts(G, "s", "t", H, 97) # DiGraph with infinite capacity digon G = nx.DiGraph() - G.add_edge('s', 'a', capacity=85) - G.add_edge('s', 'b', capacity=30) - G.add_edge('a', 'c') - G.add_edge('c', 'a') - G.add_edge('b', 'c', capacity=12) - G.add_edge('a', 't', capacity=60) - G.add_edge('c', 't', capacity=37) - - H = {'s': {'a': 85, 'b': 12}, - 'a': {'c': 25, 't': 60}, - 'c': {'a': 0, 't': 37}, - 'b': {'c': 12}, - 't': {}} - - compare_flows_and_cuts(G, 's', 't', H, 97) + G.add_edge("s", "a", capacity=85) + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c") + G.add_edge("c", "a") + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t", capacity=37) + + H = { + "s": {"a": 85, "b": 12}, + "a": {"c": 25, "t": 60}, + "c": {"a": 0, "t": 37}, + "b": {"c": 12}, + "t": {}, + } + + compare_flows_and_cuts(G, "s", "t", H, 97) def test_digraph_infcap_path(self): # Graph with infinite capacity (s, t)-path G = nx.DiGraph() - G.add_edge('s', 'a') - G.add_edge('s', 'b', capacity=30) - G.add_edge('a', 'c') - G.add_edge('b', 'c', capacity=12) - G.add_edge('a', 't', capacity=60) - G.add_edge('c', 't') + G.add_edge("s", "a") + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c") + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t") for flow_func in all_funcs: - pytest.raises(nx.NetworkXUnbounded, - flow_func, G, 's', 't') + pytest.raises(nx.NetworkXUnbounded, flow_func, G, "s", "t") def test_graph_infcap_edges(self): # Undirected graph with infinite capacity edges G = nx.Graph() - G.add_edge('s', 'a') - G.add_edge('s', 'b', capacity=30) - G.add_edge('a', 'c', capacity=25) - G.add_edge('b', 'c', capacity=12) - G.add_edge('a', 't', capacity=60) - G.add_edge('c', 't') - - H = {'s': {'a': 85, 'b': 12}, - 'a': {'c': 25, 's': 85, 't': 60}, - 'b': {'c': 12, 's': 12}, - 'c': {'a': 25, 'b': 12, 't': 37}, - 't': {'a': 60, 'c': 37}} - - compare_flows_and_cuts(G, 's', 't', H, 97) + G.add_edge("s", "a") + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c", capacity=25) + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t") + + H = { + "s": {"a": 85, "b": 12}, + "a": {"c": 25, "s": 85, "t": 60}, + "b": {"c": 12, "s": 12}, + "c": {"a": 25, "b": 12, "t": 37}, + "t": {"a": 60, "c": 37}, + } + + compare_flows_and_cuts(G, "s", "t", H, 97) def test_digraph5(self): # From ticket #429 by mfrasca. G = nx.DiGraph() - G.add_edge('s', 'a', capacity=2) - G.add_edge('s', 'b', capacity=2) - G.add_edge('a', 'b', capacity=5) - G.add_edge('a', 't', capacity=1) - G.add_edge('b', 'a', capacity=1) - G.add_edge('b', 't', capacity=3) - flowSoln = {'a': {'b': 1, 't': 1}, - 'b': {'a': 0, 't': 3}, - 's': {'a': 2, 'b': 2}, - 't': {}} - compare_flows_and_cuts(G, 's', 't', flowSoln, 4) + G.add_edge("s", "a", capacity=2) + G.add_edge("s", "b", capacity=2) + G.add_edge("a", "b", capacity=5) + G.add_edge("a", "t", capacity=1) + G.add_edge("b", "a", capacity=1) + G.add_edge("b", "t", capacity=3) + flowSoln = { + "a": {"b": 1, "t": 1}, + "b": {"a": 0, "t": 3}, + "s": {"a": 2, "b": 2}, + "t": {}, + } + compare_flows_and_cuts(G, "s", "t", flowSoln, 4) def test_disconnected(self): G = nx.Graph() - G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity') + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") G.remove_node(1) assert nx.maximum_flow_value(G, 0, 3) == 0 flowSoln = {0: {}, 2: {3: 0}, 3: {2: 0}} @@ -333,11 +359,11 @@ class TestMaxflowMinCutCommon: def test_source_target_not_in_graph(self): G = nx.Graph() - G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity') + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") G.remove_node(0) for flow_func in all_funcs: pytest.raises(nx.NetworkXError, flow_func, G, 0, 3) - G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity') + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") G.remove_node(3) for flow_func in all_funcs: pytest.raises(nx.NetworkXError, flow_func, G, 0, 3) @@ -357,17 +383,16 @@ class TestMaxflowMinCutCommon: class TestMaxFlowMinCutInterface: - def setup(self): G = nx.DiGraph() - G.add_edge('x', 'a', capacity=3.0) - G.add_edge('x', 'b', capacity=1.0) - G.add_edge('a', 'c', capacity=3.0) - G.add_edge('b', 'c', capacity=5.0) - G.add_edge('b', 'd', capacity=4.0) - G.add_edge('d', 'e', capacity=2.0) - G.add_edge('c', 'y', capacity=2.0) - G.add_edge('e', 'y', capacity=3.0) + G.add_edge("x", "a", capacity=3.0) + G.add_edge("x", "b", capacity=1.0) + G.add_edge("a", "c", capacity=3.0) + G.add_edge("b", "c", capacity=5.0) + G.add_edge("b", "d", capacity=4.0) + G.add_edge("d", "e", capacity=2.0) + G.add_edge("c", "y", capacity=2.0) + G.add_edge("e", "y", capacity=3.0) self.G = G H = nx.DiGraph() H.add_edge(0, 1, capacity=1.0) @@ -375,15 +400,13 @@ class TestMaxFlowMinCutInterface: self.H = H def test_flow_func_not_callable(self): - elements = ['this_should_be_callable', 10, {1, 2, 3}] + elements = ["this_should_be_callable", 10, {1, 2, 3}] G = nx.Graph() - G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight='capacity') + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") for flow_func in interface_funcs: for element in elements: - pytest.raises(nx.NetworkXError, - flow_func, G, 0, 1, flow_func=element) - pytest.raises(nx.NetworkXError, - flow_func, G, 0, 1, flow_func=element) + pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element) + pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element) def test_flow_func_parameters(self): G = self.G @@ -394,7 +417,7 @@ class TestMaxFlowMinCutInterface: f"Assertion failed in function: {flow_func.__name__} " f"in interface {interface_func.__name__}" ) - result = interface_func(G, 'x', 'y', flow_func=flow_func) + result = interface_func(G, "x", "y", flow_func=flow_func) if interface_func in max_min_funcs: result = result[0] assert fv == result, errmsg @@ -402,10 +425,24 @@ class TestMaxFlowMinCutInterface: def test_minimum_cut_no_cutoff(self): G = self.G for flow_func in flow_funcs: - pytest.raises(nx.NetworkXError, nx.minimum_cut, G, 'x', 'y', - flow_func=flow_func, cutoff=1.0) - pytest.raises(nx.NetworkXError, nx.minimum_cut_value, G, 'x', 'y', - flow_func=flow_func, cutoff=1.0) + pytest.raises( + nx.NetworkXError, + nx.minimum_cut, + G, + "x", + "y", + flow_func=flow_func, + cutoff=1.0, + ) + pytest.raises( + nx.NetworkXError, + nx.minimum_cut_value, + G, + "x", + "y", + flow_func=flow_func, + cutoff=1.0, + ) def test_kwargs(self): G = self.H @@ -428,14 +465,15 @@ class TestMaxFlowMinCutInterface: def test_kwargs_default_flow_func(self): G = self.H for interface_func in interface_funcs: - pytest.raises(nx.NetworkXError, interface_func, - G, 0, 1, global_relabel_freq=2) + pytest.raises( + nx.NetworkXError, interface_func, G, 0, 1, global_relabel_freq=2 + ) def test_reusing_residual(self): G = self.G fv = 3.0 - s, t = 'x', 'y' - R = build_residual_network(G, 'capacity') + s, t = "x", "y" + R = build_residual_network(G, "capacity") for interface_func in interface_funcs: for flow_func in flow_funcs: errmsg = ( @@ -443,8 +481,9 @@ class TestMaxFlowMinCutInterface: f"in interface {interface_func.__name__}" ) for i in range(3): - result = interface_func(G, 'x', 'y', flow_func=flow_func, - residual=R) + result = interface_func( + G, "x", "y", flow_func=flow_func, residual=R + ) if interface_func in max_min_funcs: result = result[0] assert fv == result, errmsg @@ -455,9 +494,8 @@ def test_preflow_push_global_relabel_freq(): G = nx.DiGraph() G.add_edge(1, 2, capacity=1) R = preflow_push(G, 1, 2, global_relabel_freq=None) - assert R.graph['flow_value'] == 1 - pytest.raises(nx.NetworkXError, preflow_push, G, 1, 2, - global_relabel_freq=-1) + assert R.graph["flow_value"] == 1 + pytest.raises(nx.NetworkXError, preflow_push, G, 1, 2, global_relabel_freq=-1) def test_preflow_push_makes_enough_space(): @@ -466,7 +504,7 @@ def test_preflow_push_makes_enough_space(): nx.add_path(G, [0, 1, 3], capacity=1) nx.add_path(G, [1, 2, 3], capacity=1) R = preflow_push(G, 0, 3, value_only=False) - assert R.graph['flow_value'] == 1 + assert R.graph["flow_value"] == 1 def test_shortest_augmenting_path_two_phase(): @@ -474,38 +512,37 @@ def test_shortest_augmenting_path_two_phase(): p = 1000 G = nx.DiGraph() for i in range(k): - G.add_edge('s', (i, 0), capacity=1) + G.add_edge("s", (i, 0), capacity=1) nx.add_path(G, ((i, j) for j in range(p)), capacity=1) - G.add_edge((i, p - 1), 't', capacity=1) - R = shortest_augmenting_path(G, 's', 't', two_phase=True) - assert R.graph['flow_value'] == k - R = shortest_augmenting_path(G, 's', 't', two_phase=False) - assert R.graph['flow_value'] == k + G.add_edge((i, p - 1), "t", capacity=1) + R = shortest_augmenting_path(G, "s", "t", two_phase=True) + assert R.graph["flow_value"] == k + R = shortest_augmenting_path(G, "s", "t", two_phase=False) + assert R.graph["flow_value"] == k class TestCutoff: - def test_cutoff(self): k = 5 p = 1000 G = nx.DiGraph() for i in range(k): - G.add_edge('s', (i, 0), capacity=2) + G.add_edge("s", (i, 0), capacity=2) nx.add_path(G, ((i, j) for j in range(p)), capacity=2) - G.add_edge((i, p - 1), 't', capacity=2) - R = shortest_augmenting_path(G, 's', 't', two_phase=True, cutoff=k) - assert k <= R.graph['flow_value'] <= (2 * k) - R = shortest_augmenting_path(G, 's', 't', two_phase=False, cutoff=k) - assert k <= R.graph['flow_value'] <= (2 * k) - R = edmonds_karp(G, 's', 't', cutoff=k) - assert k <= R.graph['flow_value'] <= (2 * k) + G.add_edge((i, p - 1), "t", capacity=2) + R = shortest_augmenting_path(G, "s", "t", two_phase=True, cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) + R = shortest_augmenting_path(G, "s", "t", two_phase=False, cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) + R = edmonds_karp(G, "s", "t", cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) def test_complete_graph_cutoff(self): G = nx.complete_graph(5) - nx.set_edge_attributes(G, {(u, v): 1 for u, v in G.edges()}, - 'capacity') + nx.set_edge_attributes(G, {(u, v): 1 for u, v in G.edges()}, "capacity") for flow_func in [shortest_augmenting_path, edmonds_karp]: for cutoff in [3, 2, 1]: - result = nx.maximum_flow_value(G, 0, 4, flow_func=flow_func, - cutoff=cutoff) + result = nx.maximum_flow_value( + G, 0, 4, flow_func=flow_func, cutoff=cutoff + ) assert cutoff == result, f"cutoff error in {flow_func.__name__}" diff --git a/networkx/algorithms/flow/tests/test_mincost.py b/networkx/algorithms/flow/tests/test_mincost.py index 7346eae1..fbb839a9 100644 --- a/networkx/algorithms/flow/tests/test_mincost.py +++ b/networkx/algorithms/flow/tests/test_mincost.py @@ -6,17 +6,14 @@ import os class TestMinCostFlow: def test_simple_digraph(self): G = nx.DiGraph() - G.add_node('a', demand=-5) - G.add_node('d', demand=5) - G.add_edge('a', 'b', weight=3, capacity=4) - G.add_edge('a', 'c', weight=6, capacity=10) - G.add_edge('b', 'd', weight=1, capacity=9) - G.add_edge('c', 'd', weight=2, capacity=5) + G.add_node("a", demand=-5) + G.add_node("d", demand=5) + G.add_edge("a", "b", weight=3, capacity=4) + G.add_edge("a", "c", weight=6, capacity=10) + G.add_edge("b", "d", weight=1, capacity=9) + G.add_edge("c", "d", weight=2, capacity=5) flowCost, H = nx.network_simplex(G) - soln = {'a': {'b': 4, 'c': 1}, - 'b': {'d': 4}, - 'c': {'d': 1}, - 'd': {}} + soln = {"a": {"b": 4, "c": 1}, "b": {"d": 4}, "c": {"d": 1}, "d": {}} assert flowCost == 24 assert nx.min_cost_flow_cost(G) == 24 assert H == soln @@ -30,78 +27,80 @@ class TestMinCostFlow: def test_negcycle_infcap(self): G = nx.DiGraph() - G.add_node('s', demand=-5) - G.add_node('t', demand=5) - G.add_edge('s', 'a', weight=1, capacity=3) - G.add_edge('a', 'b', weight=3) - G.add_edge('c', 'a', weight=-6) - G.add_edge('b', 'd', weight=1) - G.add_edge('d', 'c', weight=-2) - G.add_edge('d', 't', weight=1, capacity=3) + G.add_node("s", demand=-5) + G.add_node("t", demand=5) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("c", "a", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("d", "c", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) def test_sum_demands_not_zero(self): G = nx.DiGraph() - G.add_node('s', demand=-5) - G.add_node('t', demand=4) - G.add_edge('s', 'a', weight=1, capacity=3) - G.add_edge('a', 'b', weight=3) - G.add_edge('a', 'c', weight=-6) - G.add_edge('b', 'd', weight=1) - G.add_edge('c', 'd', weight=-2) - G.add_edge('d', 't', weight=1, capacity=3) + G.add_node("s", demand=-5) + G.add_node("t", demand=4) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("a", "c", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("c", "d", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) def test_no_flow_satisfying_demands(self): G = nx.DiGraph() - G.add_node('s', demand=-5) - G.add_node('t', demand=5) - G.add_edge('s', 'a', weight=1, capacity=3) - G.add_edge('a', 'b', weight=3) - G.add_edge('a', 'c', weight=-6) - G.add_edge('b', 'd', weight=1) - G.add_edge('c', 'd', weight=-2) - G.add_edge('d', 't', weight=1, capacity=3) + G.add_node("s", demand=-5) + G.add_node("t", demand=5) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("a", "c", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("c", "d", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) def test_transshipment(self): G = nx.DiGraph() - G.add_node('a', demand=1) - G.add_node('b', demand=-2) - G.add_node('c', demand=-2) - G.add_node('d', demand=3) - G.add_node('e', demand=-4) - G.add_node('f', demand=-4) - G.add_node('g', demand=3) - G.add_node('h', demand=2) - G.add_node('r', demand=3) - G.add_edge('a', 'c', weight=3) - G.add_edge('r', 'a', weight=2) - G.add_edge('b', 'a', weight=9) - G.add_edge('r', 'c', weight=0) - G.add_edge('b', 'r', weight=-6) - G.add_edge('c', 'd', weight=5) - G.add_edge('e', 'r', weight=4) - G.add_edge('e', 'f', weight=3) - G.add_edge('h', 'b', weight=4) - G.add_edge('f', 'd', weight=7) - G.add_edge('f', 'h', weight=12) - G.add_edge('g', 'd', weight=12) - G.add_edge('f', 'g', weight=-1) - G.add_edge('h', 'g', weight=-10) + G.add_node("a", demand=1) + G.add_node("b", demand=-2) + G.add_node("c", demand=-2) + G.add_node("d", demand=3) + G.add_node("e", demand=-4) + G.add_node("f", demand=-4) + G.add_node("g", demand=3) + G.add_node("h", demand=2) + G.add_node("r", demand=3) + G.add_edge("a", "c", weight=3) + G.add_edge("r", "a", weight=2) + G.add_edge("b", "a", weight=9) + G.add_edge("r", "c", weight=0) + G.add_edge("b", "r", weight=-6) + G.add_edge("c", "d", weight=5) + G.add_edge("e", "r", weight=4) + G.add_edge("e", "f", weight=3) + G.add_edge("h", "b", weight=4) + G.add_edge("f", "d", weight=7) + G.add_edge("f", "h", weight=12) + G.add_edge("g", "d", weight=12) + G.add_edge("f", "g", weight=-1) + G.add_edge("h", "g", weight=-10) flowCost, H = nx.network_simplex(G) - soln = {'a': {'c': 0}, - 'b': {'a': 0, 'r': 2}, - 'c': {'d': 3}, - 'd': {}, - 'e': {'r': 3, 'f': 1}, - 'f': {'d': 0, 'g': 3, 'h': 2}, - 'g': {'d': 0}, - 'h': {'b': 0, 'g': 0}, - 'r': {'a': 1, 'c': 1}} + soln = { + "a": {"c": 0}, + "b": {"a": 0, "r": 2}, + "c": {"d": 3}, + "d": {}, + "e": {"r": 3, "f": 1}, + "f": {"d": 0, "g": 3, "h": 2}, + "g": {"d": 0}, + "h": {"b": 0, "g": 0}, + "r": {"a": 1, "c": 1}, + } assert flowCost == 41 assert nx.min_cost_flow_cost(G) == 41 assert H == soln @@ -115,32 +114,32 @@ class TestMinCostFlow: def test_max_flow_min_cost(self): G = nx.DiGraph() - G.add_edge('s', 'a', bandwidth=6) - G.add_edge('s', 'c', bandwidth=10, cost=10) - G.add_edge('a', 'b', cost=6) - G.add_edge('b', 'd', bandwidth=8, cost=7) - G.add_edge('c', 'd', cost=10) - G.add_edge('d', 't', bandwidth=5, cost=5) - soln = {'s': {'a': 5, 'c': 0}, - 'a': {'b': 5}, - 'b': {'d': 5}, - 'c': {'d': 0}, - 'd': {'t': 5}, - 't': {}} - flow = nx.max_flow_min_cost(G, 's', 't', capacity='bandwidth', - weight='cost') + G.add_edge("s", "a", bandwidth=6) + G.add_edge("s", "c", bandwidth=10, cost=10) + G.add_edge("a", "b", cost=6) + G.add_edge("b", "d", bandwidth=8, cost=7) + G.add_edge("c", "d", cost=10) + G.add_edge("d", "t", bandwidth=5, cost=5) + soln = { + "s": {"a": 5, "c": 0}, + "a": {"b": 5}, + "b": {"d": 5}, + "c": {"d": 0}, + "d": {"t": 5}, + "t": {}, + } + flow = nx.max_flow_min_cost(G, "s", "t", capacity="bandwidth", weight="cost") assert flow == soln - assert nx.cost_of_flow(G, flow, weight='cost') == 90 + assert nx.cost_of_flow(G, flow, weight="cost") == 90 - G.add_edge('t', 's', cost=-100) - flowCost, flow = nx.capacity_scaling(G, capacity='bandwidth', - weight='cost') - G.remove_edge('t', 's') + G.add_edge("t", "s", cost=-100) + flowCost, flow = nx.capacity_scaling(G, capacity="bandwidth", weight="cost") + G.remove_edge("t", "s") assert flowCost == -410 - assert flow['t']['s'] == 5 - del flow['t']['s'] + assert flow["t"]["s"] == 5 + del flow["t"]["s"] assert flow == soln - assert nx.cost_of_flow(G, flow, weight='cost') == 90 + assert nx.cost_of_flow(G, flow, weight="cost") == 90 def test_digraph1(self): # From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied @@ -149,21 +148,27 @@ class TestMinCostFlow: G.add_node(1, demand=-20) G.add_node(4, demand=5) G.add_node(5, demand=15) - G.add_edges_from([(1, 2, {'capacity': 15, 'weight': 4}), - (1, 3, {'capacity': 8, 'weight': 4}), - (2, 3, {'weight': 2}), - (2, 4, {'capacity': 4, 'weight': 2}), - (2, 5, {'capacity': 10, 'weight': 6}), - (3, 4, {'capacity': 15, 'weight': 1}), - (3, 5, {'capacity': 5, 'weight': 3}), - (4, 5, {'weight': 2}), - (5, 3, {'capacity': 4, 'weight': 1})]) + G.add_edges_from( + [ + (1, 2, {"capacity": 15, "weight": 4}), + (1, 3, {"capacity": 8, "weight": 4}), + (2, 3, {"weight": 2}), + (2, 4, {"capacity": 4, "weight": 2}), + (2, 5, {"capacity": 10, "weight": 6}), + (3, 4, {"capacity": 15, "weight": 1}), + (3, 5, {"capacity": 5, "weight": 3}), + (4, 5, {"weight": 2}), + (5, 3, {"capacity": 4, "weight": 1}), + ] + ) flowCost, H = nx.network_simplex(G) - soln = {1: {2: 12, 3: 8}, - 2: {3: 8, 4: 4, 5: 0}, - 3: {4: 11, 5: 5}, - 4: {5: 10}, - 5: {3: 0}} + soln = { + 1: {2: 12, 3: 8}, + 2: {3: 8, 4: 4, 5: 0}, + 3: {4: 11, 5: 5}, + 4: {5: 10}, + 5: {3: 0}, + } assert flowCost == 150 assert nx.min_cost_flow_cost(G) == 150 assert H == soln @@ -179,9 +184,9 @@ class TestMinCostFlow: # Example from ticket #430 from mfrasca. Original source: # http://www.cs.princeton.edu/courses/archive/spr03/cs226/lectures/mincost.4up.pdf, slide 11. G = nx.DiGraph() - G.add_edge('s', 1, capacity=12) - G.add_edge('s', 2, capacity=6) - G.add_edge('s', 3, capacity=14) + G.add_edge("s", 1, capacity=12) + G.add_edge("s", 2, capacity=6) + G.add_edge("s", 3, capacity=14) G.add_edge(1, 2, capacity=11, weight=4) G.add_edge(2, 3, capacity=9, weight=6) G.add_edge(1, 4, capacity=5, weight=5) @@ -191,26 +196,28 @@ class TestMinCostFlow: G.add_edge(3, 6, capacity=31, weight=3) G.add_edge(4, 5, capacity=18, weight=4) G.add_edge(5, 6, capacity=9, weight=5) - G.add_edge(4, 't', capacity=3) - G.add_edge(5, 't', capacity=7) - G.add_edge(6, 't', capacity=22) - flow = nx.max_flow_min_cost(G, 's', 't') - soln = {1: {2: 6, 4: 5, 5: 1}, - 2: {3: 6, 5: 4, 6: 2}, - 3: {6: 20}, - 4: {5: 2, 't': 3}, - 5: {6: 0, 't': 7}, - 6: {'t': 22}, - 's': {1: 12, 2: 6, 3: 14}, - 't': {}} + G.add_edge(4, "t", capacity=3) + G.add_edge(5, "t", capacity=7) + G.add_edge(6, "t", capacity=22) + flow = nx.max_flow_min_cost(G, "s", "t") + soln = { + 1: {2: 6, 4: 5, 5: 1}, + 2: {3: 6, 5: 4, 6: 2}, + 3: {6: 20}, + 4: {5: 2, "t": 3}, + 5: {6: 0, "t": 7}, + 6: {"t": 22}, + "s": {1: 12, 2: 6, 3: 14}, + "t": {}, + } assert flow == soln - G.add_edge('t', 's', weight=-100) + G.add_edge("t", "s", weight=-100) flowCost, flow = nx.capacity_scaling(G) - G.remove_edge('t', 's') - assert flow['t']['s'] == 32 + G.remove_edge("t", "s") + assert flow["t"]["s"] == 32 assert flowCost == -3007 - del flow['t']['s'] + del flow["t"]["s"] assert flow == soln assert nx.cost_of_flow(G, flow) == 193 @@ -221,64 +228,64 @@ class TestMinCostFlow: by mfrasca.""" G = nx.DiGraph() - G.add_edge('s', 'a') - G['s']['a'].update({0: 2, 1: 4}) - G.add_edge('s', 'b') - G['s']['b'].update({0: 2, 1: 1}) - G.add_edge('a', 'b') - G['a']['b'].update({0: 5, 1: 2}) - G.add_edge('a', 't') - G['a']['t'].update({0: 1, 1: 5}) - G.add_edge('b', 'a') - G['b']['a'].update({0: 1, 1: 3}) - G.add_edge('b', 't') - G['b']['t'].update({0: 3, 1: 2}) + G.add_edge("s", "a") + G["s"]["a"].update({0: 2, 1: 4}) + G.add_edge("s", "b") + G["s"]["b"].update({0: 2, 1: 1}) + G.add_edge("a", "b") + G["a"]["b"].update({0: 5, 1: 2}) + G.add_edge("a", "t") + G["a"]["t"].update({0: 1, 1: 5}) + G.add_edge("b", "a") + G["b"]["a"].update({0: 1, 1: 3}) + G.add_edge("b", "t") + G["b"]["t"].update({0: 3, 1: 2}) "PS.ex.7.1: testing main function" - sol = nx.max_flow_min_cost(G, 's', 't', capacity=0, weight=1) - flow = sum(v for v in sol['s'].values()) + sol = nx.max_flow_min_cost(G, "s", "t", capacity=0, weight=1) + flow = sum(v for v in sol["s"].values()) assert 4 == flow assert 23 == nx.cost_of_flow(G, sol, weight=1) - assert sol['s'] == {'a': 2, 'b': 2} - assert sol['a'] == {'b': 1, 't': 1} - assert sol['b'] == {'a': 0, 't': 3} - assert sol['t'] == {} + assert sol["s"] == {"a": 2, "b": 2} + assert sol["a"] == {"b": 1, "t": 1} + assert sol["b"] == {"a": 0, "t": 3} + assert sol["t"] == {} - G.add_edge('t', 's') - G['t']['s'].update({1: -100}) + G.add_edge("t", "s") + G["t"]["s"].update({1: -100}) flowCost, sol = nx.capacity_scaling(G, capacity=0, weight=1) - G.remove_edge('t', 's') - flow = sum(v for v in sol['s'].values()) + G.remove_edge("t", "s") + flow = sum(v for v in sol["s"].values()) assert 4 == flow - assert sol['t']['s'] == 4 + assert sol["t"]["s"] == 4 assert flowCost == -377 - del sol['t']['s'] - assert sol['s'] == {'a': 2, 'b': 2} - assert sol['a'] == {'b': 1, 't': 1} - assert sol['b'] == {'a': 0, 't': 3} - assert sol['t'] == {} + del sol["t"]["s"] + assert sol["s"] == {"a": 2, "b": 2} + assert sol["a"] == {"b": 1, "t": 1} + assert sol["b"] == {"a": 0, "t": 3} + assert sol["t"] == {} assert nx.cost_of_flow(G, sol, weight=1) == 23 def test_zero_capacity_edges(self): """Address issue raised in ticket #617 by arv.""" G = nx.DiGraph() - G.add_edges_from([(1, 2, {'capacity': 1, 'weight': 1}), - (1, 5, {'capacity': 1, 'weight': 1}), - (2, 3, {'capacity': 0, 'weight': 1}), - (2, 5, {'capacity': 1, 'weight': 1}), - (5, 3, {'capacity': 2, 'weight': 1}), - (5, 4, {'capacity': 0, 'weight': 1}), - (3, 4, {'capacity': 2, 'weight': 1})]) - G.nodes[1]['demand'] = -1 - G.nodes[2]['demand'] = -1 - G.nodes[4]['demand'] = 2 + G.add_edges_from( + [ + (1, 2, {"capacity": 1, "weight": 1}), + (1, 5, {"capacity": 1, "weight": 1}), + (2, 3, {"capacity": 0, "weight": 1}), + (2, 5, {"capacity": 1, "weight": 1}), + (5, 3, {"capacity": 2, "weight": 1}), + (5, 4, {"capacity": 0, "weight": 1}), + (3, 4, {"capacity": 2, "weight": 1}), + ] + ) + G.nodes[1]["demand"] = -1 + G.nodes[2]["demand"] = -1 + G.nodes[4]["demand"] = 2 flowCost, H = nx.network_simplex(G) - soln = {1: {2: 0, 5: 1}, - 2: {3: 0, 5: 1}, - 3: {4: 2}, - 4: {}, - 5: {3: 2, 4: 0}} + soln = {1: {2: 0, 5: 1}, 2: {3: 0, 5: 1}, 3: {4: 2}, 4: {}, 5: {3: 2, 4: 0}} assert flowCost == 6 assert nx.min_cost_flow_cost(G) == 6 assert H == soln @@ -293,21 +300,21 @@ class TestMinCostFlow: def test_digon(self): """Check if digons are handled properly. Taken from ticket #618 by arv.""" - nodes = [(1, {}), - (2, {'demand': -4}), - (3, {'demand': 4}), - ] - edges = [(1, 2, {'capacity': 3, 'weight': 600000}), - (2, 1, {'capacity': 2, 'weight': 0}), - (2, 3, {'capacity': 5, 'weight': 714285}), - (3, 2, {'capacity': 2, 'weight': 0}), - ] + nodes = [ + (1, {}), + (2, {"demand": -4}), + (3, {"demand": 4}), + ] + edges = [ + (1, 2, {"capacity": 3, "weight": 600000}), + (2, 1, {"capacity": 2, "weight": 0}), + (2, 3, {"capacity": 5, "weight": 714285}), + (3, 2, {"capacity": 2, "weight": 0}), + ] G = nx.DiGraph(edges) G.add_nodes_from(nodes) flowCost, H = nx.network_simplex(G) - soln = {1: {2: 0}, - 2: {1: 0, 3: 4}, - 3: {2: 0}} + soln = {1: {2: 0}, 2: {1: 0, 3: 4}, 3: {2: 0}} assert flowCost == 2857140 assert nx.min_cost_flow_cost(G) == 2857140 assert H == soln @@ -325,8 +332,8 @@ class TestMinCostFlow: G = nx.DiGraph() G.add_nodes_from(range(5), demand=0) - G.nodes[4]['demand'] = -13 - G.nodes[3]['demand'] = 13 + G.nodes[4]["demand"] = -13 + G.nodes[3]["demand"] = 13 G.add_edges_from([(0, 2), (0, 3), (2, 1)], capacity=20, weight=0.1) pytest.raises(nx.NetworkXUnfeasible, nx.min_cost_flow, G) @@ -334,15 +341,17 @@ class TestMinCostFlow: def test_infinite_capacity_neg_digon(self): """An infinite capacity negative cost digon results in an unbounded instance.""" - nodes = [(1, {}), - (2, {'demand': -4}), - (3, {'demand': 4}), - ] - edges = [(1, 2, {'weight': -600}), - (2, 1, {'weight': 0}), - (2, 3, {'capacity': 5, 'weight': 714285}), - (3, 2, {'capacity': 2, 'weight': 0}), - ] + nodes = [ + (1, {}), + (2, {"demand": -4}), + (3, {"demand": 4}), + ] + edges = [ + (1, 2, {"weight": -600}), + (2, 1, {"weight": 0}), + (2, 3, {"capacity": 5, "weight": 714285}), + (3, 2, {"capacity": 2, "weight": 0}), + ] G = nx.DiGraph(edges) G.add_nodes_from(nodes) pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) @@ -352,20 +361,20 @@ class TestMinCostFlow: """The digon should receive the maximum amount of flow it can handle. Taken from ticket #749 by @chuongdo.""" G = nx.DiGraph() - G.add_edge('a', 'b', capacity=1, weight=-1) - G.add_edge('b', 'a', capacity=1, weight=-1) + G.add_edge("a", "b", capacity=1, weight=-1) + G.add_edge("b", "a", capacity=1, weight=-1) min_cost = -2 assert nx.min_cost_flow_cost(G) == min_cost flowCost, H = nx.capacity_scaling(G) assert flowCost == -2 - assert H == {'a': {'b': 1}, 'b': {'a': 1}} + assert H == {"a": {"b": 1}, "b": {"a": 1}} assert nx.cost_of_flow(G, H) == -2 def test_multidigraph(self): """Multidigraphs are acceptable.""" G = nx.MultiDiGraph() - G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight='capacity') + G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight="capacity") flowCost, H = nx.network_simplex(G) assert flowCost == 0 assert H == {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}} @@ -382,7 +391,7 @@ class TestMinCostFlow: G.add_edge(1, 1, weight=-1) pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) - G[1][1]['capacity'] = 2 + G[1][1]["capacity"] = 2 flowCost, H = nx.network_simplex(G) assert flowCost == -2 assert H == {1: {1: 2}} @@ -391,17 +400,17 @@ class TestMinCostFlow: assert H == {1: {1: 2}} G = nx.MultiDiGraph() - G.add_edge(1, 1, 'x', weight=-1) - G.add_edge(1, 1, 'y', weight=1) + G.add_edge(1, 1, "x", weight=-1) + G.add_edge(1, 1, "y", weight=1) pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) - G[1][1]['x']['capacity'] = 2 + G[1][1]["x"]["capacity"] = 2 flowCost, H = nx.network_simplex(G) assert flowCost == -2 - assert H == {1: {1: {'x': 2, 'y': 0}}} + assert H == {1: {1: {"x": 2, "y": 0}}} flowCost, H = nx.capacity_scaling(G) assert flowCost == -2 - assert H == {1: {1: {'x': 2, 'y': 0}}} + assert H == {1: {1: {"x": 2, "y": 0}}} def test_bone_shaped(self): # From #1283 @@ -419,12 +428,10 @@ class TestMinCostFlow: G.add_edge(0, 3, capacity=0) flowCost, H = nx.network_simplex(G) assert flowCost == 0 - assert ( - H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}}) + assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}} flowCost, H = nx.capacity_scaling(G) assert flowCost == 0 - assert ( - H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}}) + assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}} def test_exceptions(self): G = nx.Graph() @@ -436,29 +443,29 @@ class TestMinCostFlow: G = nx.DiGraph() pytest.raises(nx.NetworkXError, nx.network_simplex, G) pytest.raises(nx.NetworkXError, nx.capacity_scaling, G) - G.add_node(0, demand=float('inf')) + G.add_node(0, demand=float("inf")) pytest.raises(nx.NetworkXError, nx.network_simplex, G) pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) - G.nodes[0]['demand'] = 0 + G.nodes[0]["demand"] = 0 G.add_node(1, demand=0) - G.add_edge(0, 1, weight=-float('inf')) + G.add_edge(0, 1, weight=-float("inf")) pytest.raises(nx.NetworkXError, nx.network_simplex, G) pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) - G[0][1]['weight'] = 0 - G.add_edge(0, 0, weight=float('inf')) + G[0][1]["weight"] = 0 + G.add_edge(0, 0, weight=float("inf")) pytest.raises(nx.NetworkXError, nx.network_simplex, G) # pytest.raises(nx.NetworkXError, nx.capacity_scaling, G) - G[0][0]['weight'] = 0 - G[0][1]['capacity'] = -1 + G[0][0]["weight"] = 0 + G[0][1]["capacity"] = -1 pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) # pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) - G[0][1]['capacity'] = 0 - G[0][0]['capacity'] = -1 + G[0][1]["capacity"] = 0 + G[0][0]["capacity"] = -1 pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) # pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) def test_large(self): - fname = os.path.join(os.path.dirname(__file__), 'netgen-2.gpickle.bz2') + fname = os.path.join(os.path.dirname(__file__), "netgen-2.gpickle.bz2") G = nx.read_gpickle(fname) flowCost, flowDict = nx.network_simplex(G) assert 6749969302 == flowCost diff --git a/networkx/algorithms/flow/utils.py b/networkx/algorithms/flow/utils.py index 5e000a8c..a0b8309e 100644 --- a/networkx/algorithms/flow/utils.py +++ b/networkx/algorithms/flow/utils.py @@ -5,15 +5,22 @@ Utility classes and functions for network flow algorithms. from collections import deque import networkx as nx -__all__ = ['CurrentEdge', 'Level', 'GlobalRelabelThreshold', - 'build_residual_network', 'detect_unboundedness', 'build_flow_dict'] +__all__ = [ + "CurrentEdge", + "Level", + "GlobalRelabelThreshold", + "build_residual_network", + "detect_unboundedness", + "build_flow_dict", +] class CurrentEdge: """Mechanism for iterating over out-edges incident to a node in a circular manner. StopIteration exception is raised when wraparound occurs. """ - __slots__ = ('_edges', '_it', '_curr') + + __slots__ = ("_edges", "_it", "_curr") def __init__(self, edges): self._edges = edges @@ -38,7 +45,8 @@ class CurrentEdge: class Level: """Active and inactive nodes in a level. """ - __slots__ = ('active', 'inactive') + + __slots__ = ("active", "inactive") def __init__(self): self.active = set() @@ -51,7 +59,7 @@ class GlobalRelabelThreshold: """ def __init__(self, n, m, freq): - self._threshold = (n + m) / freq if freq else float('inf') + self._threshold = (n + m) / freq if freq else float("inf") self._work = 0 def add_work(self, work): @@ -90,16 +98,18 @@ def build_residual_network(G, capacity): """ if G.is_multigraph(): - raise nx.NetworkXError( - 'MultiGraph and MultiDiGraph not supported (yet).') + raise nx.NetworkXError("MultiGraph and MultiDiGraph not supported (yet).") R = nx.DiGraph() R.add_nodes_from(G) - inf = float('inf') + inf = float("inf") # Extract edges with positive capacities. Self loops excluded. - edge_list = [(u, v, attr) for u, v, attr in G.edges(data=True) - if u != v and attr.get(capacity, inf) > 0] + edge_list = [ + (u, v, attr) + for u, v, attr in G.edges(data=True) + if u != v and attr.get(capacity, inf) > 0 + ] # Simulate infinity with three times the sum of the finite edge capacities # or any positive value if the sum is zero. This allows the # infinite-capacity edges to be distinguished for unboundedness detection @@ -110,8 +120,15 @@ def build_residual_network(G, capacity): # finite-capacity edge is at most 1/3 of inf, if an operation moves more # than 1/3 of inf units of flow to t, there must be an infinite-capacity # s-t path in G. - inf = 3 * sum(attr[capacity] for u, v, attr in edge_list - if capacity in attr and attr[capacity] != inf) or 1 + inf = ( + 3 + * sum( + attr[capacity] + for u, v, attr in edge_list + if capacity in attr and attr[capacity] != inf + ) + or 1 + ) if G.is_directed(): for u, v, attr in edge_list: r = min(attr.get(capacity, inf), inf) @@ -122,7 +139,7 @@ def build_residual_network(G, capacity): R.add_edge(v, u, capacity=0) else: # The edge (u, v) was added when (v, u) was visited. - R[u][v]['capacity'] = r + R[u][v]["capacity"] = r else: for u, v, attr in edge_list: # Add a pair of edges with equal residual capacities. @@ -131,7 +148,7 @@ def build_residual_network(G, capacity): R.add_edge(v, u, capacity=r) # Record the value simulating infinity. - R.graph['inf'] = inf + R.graph["inf"] = inf return R @@ -141,14 +158,15 @@ def detect_unboundedness(R, s, t): """ q = deque([s]) seen = {s} - inf = R.graph['inf'] + inf = R.graph["inf"] while q: u = q.popleft() for v, attr in R[u].items(): - if attr['capacity'] == inf and v not in seen: + if attr["capacity"] == inf and v not in seen: if v == t: raise nx.NetworkXUnbounded( - 'Infinite capacity path, flow unbounded above.') + "Infinite capacity path, flow unbounded above." + ) seen.add(v) q.append(v) @@ -159,6 +177,7 @@ def build_flow_dict(G, R): flow_dict = {} for u in G: flow_dict[u] = {v: 0 for v in G[u]} - flow_dict[u].update((v, attr['flow']) for v, attr in R[u].items() - if attr['flow'] > 0) + flow_dict[u].update( + (v, attr["flow"]) for v, attr in R[u].items() if attr["flow"] > 0 + ) return flow_dict diff --git a/networkx/algorithms/graph_hashing.py b/networkx/algorithms/graph_hashing.py index d0c639a2..ab95fe61 100644 --- a/networkx/algorithms/graph_hashing.py +++ b/networkx/algorithms/graph_hashing.py @@ -13,11 +13,7 @@ __all__ = [ def weisfeiler_lehman_graph_hash( - G, - edge_attr=None, - node_attr=None, - iterations=3, - digest_size=16 + G, edge_attr=None, node_attr=None, iterations=3, digest_size=16 ): """Return Weisfeiler Lehman (WL) graph hash. @@ -106,7 +102,7 @@ def weisfeiler_lehman_graph_hash( for nei in G.neighbors(node): prefix = "" if not edge_attr else G[node][nei][edge_attr] label_list.append(prefix + node_labels[nei]) - return ''.join(sorted(label_list)) + return "".join(sorted(label_list)) def weisfeiler_lehman_step(G, labels, edge_attr=None, node_attr=None): """ @@ -116,8 +112,9 @@ def weisfeiler_lehman_graph_hash( """ new_labels = dict() for node in G.nodes(): - new_labels[node] = neighborhood_aggregate(G, node, labels, - edge_attr=edge_attr) + new_labels[node] = neighborhood_aggregate( + G, node, labels, edge_attr=edge_attr + ) return new_labels items = [] @@ -129,22 +126,21 @@ def weisfeiler_lehman_graph_hash( elif node_attr: node_labels[node] = str(G.nodes[node][node_attr]) else: - node_labels[node] = '' + node_labels[node] = "" for k in range(iterations): - node_labels = weisfeiler_lehman_step(G, node_labels, - edge_attr=edge_attr) + node_labels = weisfeiler_lehman_step(G, node_labels, edge_attr=edge_attr) counter = Counter() # count node labels for node, d in node_labels.items(): h = blake2b(digest_size=digest_size) - h.update(d.encode('ascii')) + h.update(d.encode("ascii")) counter.update([h.hexdigest()]) # sort the counter, extend total counts items.extend(sorted(counter.items(), key=lambda x: x[0])) # hash the final counter h = blake2b(digest_size=digest_size) - h.update(str(tuple(items)).encode('ascii')) + h.update(str(tuple(items)).encode("ascii")) h = h.hexdigest() return h diff --git a/networkx/algorithms/graphical.py b/networkx/algorithms/graphical.py index 309ea58c..1931db5c 100644 --- a/networkx/algorithms/graphical.py +++ b/networkx/algorithms/graphical.py @@ -3,16 +3,17 @@ import heapq import networkx as nx -__all__ = ['is_graphical', - 'is_multigraphical', - 'is_pseudographical', - 'is_digraphical', - 'is_valid_degree_sequence_erdos_gallai', - 'is_valid_degree_sequence_havel_hakimi', - ] +__all__ = [ + "is_graphical", + "is_multigraphical", + "is_pseudographical", + "is_digraphical", + "is_valid_degree_sequence_erdos_gallai", + "is_valid_degree_sequence_havel_hakimi", +] -def is_graphical(sequence, method='eg'): +def is_graphical(sequence, method="eg"): """Returns True if sequence is a valid degree sequence. A degree sequence is valid if some graph can realize it. @@ -47,9 +48,9 @@ def is_graphical(sequence, method='eg'): Havel-Hakimi [havel1955]_, [hakimi1962]_, [CL1996]_ """ - if method == 'eg': + if method == "eg": valid = is_valid_degree_sequence_erdos_gallai(list(sequence)) - elif method == 'hh': + elif method == "hh": valid = is_valid_degree_sequence_havel_hakimi(list(sequence)) else: msg = "`method` must be 'eg' or 'hh'" @@ -211,12 +212,12 @@ def is_valid_degree_sequence_erdos_gallai(deg_sequence): # Perform the EG checks using the reformulation of Zverovich and Zverovich k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0 for dk in range(dmax, dmin - 1, -1): - if dk < k + 1: # Check if already past Durfee index + if dk < k + 1: # Check if already past Durfee index return True if num_degs[dk] > 0: run_size = num_degs[dk] # Process a run of identical-valued degrees - if dk < k + run_size: # Check if end of run is past Durfee index - run_size = dk - k # Adjust back to Durfee index + if dk < k + run_size: # Check if end of run is past Durfee index + run_size = dk - k # Adjust back to Durfee index sum_deg += run_size * dk for v in range(run_size): sum_nj += num_degs[k + v] diff --git a/networkx/algorithms/hierarchy.py b/networkx/algorithms/hierarchy.py index 71b0c307..fcf25bd0 100644 --- a/networkx/algorithms/hierarchy.py +++ b/networkx/algorithms/hierarchy.py @@ -3,7 +3,7 @@ Flow Hierarchy. """ import networkx as nx -__all__ = ['flow_hierarchy'] +__all__ = ["flow_hierarchy"] def flow_hierarchy(G, weight=None): @@ -44,4 +44,4 @@ def flow_hierarchy(G, weight=None): if not G.is_directed(): raise nx.NetworkXError("G must be a digraph in flow_hierarchy") scc = nx.strongly_connected_components(G) - return 1. - sum(G.subgraph(c).size(weight) for c in scc) / float(G.size(weight)) + return 1.0 - sum(G.subgraph(c).size(weight) for c in scc) / float(G.size(weight)) diff --git a/networkx/algorithms/hybrid.py b/networkx/algorithms/hybrid.py index cca1b01b..58868e89 100644 --- a/networkx/algorithms/hybrid.py +++ b/networkx/algorithms/hybrid.py @@ -6,7 +6,7 @@ graphs. import copy import networkx as nx -__all__ = ['kl_connected_subgraph', 'is_kl_connected'] +__all__ = ["kl_connected_subgraph", "is_kl_connected"] def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): @@ -59,7 +59,7 @@ def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): 2004. 89--104. """ - H = copy.deepcopy(G) # subgraph we construct by removing from G + H = copy.deepcopy(G) # subgraph we construct by removing from G graphOK = True deleted_some = True # hack to start off the while loop @@ -95,7 +95,7 @@ def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): if prev != w: G2.remove_edge(prev, w) prev = w -# path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? + # path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? try: path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1? except nx.NetworkXNoPath: @@ -179,7 +179,7 @@ def is_kl_connected(G, k, l, low_memory=False): if w != prev: G2.remove_edge(prev, w) prev = w -# path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? + # path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? try: path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1? except nx.NetworkXNoPath: diff --git a/networkx/algorithms/isolate.py b/networkx/algorithms/isolate.py index 86c0bc1e..1b7d622f 100644 --- a/networkx/algorithms/isolate.py +++ b/networkx/algorithms/isolate.py @@ -2,7 +2,7 @@ Functions for identifying isolate (degree zero) nodes. """ -__all__ = ['is_isolate', 'isolates', 'number_of_isolates'] +__all__ = ["is_isolate", "isolates", "number_of_isolates"] def is_isolate(G, n): diff --git a/networkx/algorithms/isomorphism/ismags.py b/networkx/algorithms/isomorphism/ismags.py index db4f3a0a..4f674cf0 100644 --- a/networkx/algorithms/isomorphism/ismags.py +++ b/networkx/algorithms/isomorphism/ismags.py @@ -111,7 +111,7 @@ References .. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph """ -__all__ = ['ISMAGS'] +__all__ = ["ISMAGS"] from collections import defaultdict, Counter from functools import reduce, wraps @@ -140,7 +140,7 @@ def are_all_equal(iterable): pass else: if len(shape) > 1: - message = 'The function does not works on multidimension arrays.' + message = "The function does not works on multidimension arrays." raise NotImplementedError(message) from None iterator = iter(iterable) @@ -270,8 +270,8 @@ class ISMAGS: Enumeration", PLoS One 9(5): e97896, 2014. https://doi.org/10.1371/journal.pone.0097896 """ - def __init__(self, graph, subgraph, node_match=None, edge_match=None, - cache=None): + + def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None): """ Parameters ---------- @@ -339,32 +339,40 @@ class ISMAGS: @property def _sgn_partitions(self): if self._sgn_partitions_ is None: + def nodematch(node1, node2): return self.node_equality(self.subgraph, node1, self.subgraph, node2) + self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch) return self._sgn_partitions_ @property def _sge_partitions(self): if self._sge_partitions_ is None: + def edgematch(edge1, edge2): return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2) + self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch) return self._sge_partitions_ @property def _gn_partitions(self): if self._gn_partitions_ is None: + def nodematch(node1, node2): return self.node_equality(self.graph, node1, self.graph, node2) + self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch) return self._gn_partitions_ @property def _ge_partitions(self): if self._ge_partitions_ is None: + def edgematch(edge1, edge2): return self.edge_equality(self.graph, edge1, self.graph, edge2) + self._ge_partitions_ = make_partitions(self.graph.edges, edgematch) return self._ge_partitions_ @@ -397,8 +405,9 @@ class ISMAGS: if self._node_compat_ is not None: return self._node_compat_ self._node_compat_ = {} - for sgn_part_color, gn_part_color in itertools.product(range(len(self._sgn_partitions)), - range(len(self._gn_partitions))): + for sgn_part_color, gn_part_color in itertools.product( + range(len(self._sgn_partitions)), range(len(self._gn_partitions)) + ): sgn = next(iter(self._sgn_partitions[sgn_part_color])) gn = next(iter(self._gn_partitions[gn_part_color])) if self.node_equality(self.subgraph, sgn, self.graph, gn): @@ -410,8 +419,9 @@ class ISMAGS: if self._edge_compat_ is not None: return self._edge_compat_ self._edge_compat_ = {} - for sge_part_color, ge_part_color in itertools.product(range(len(self._sge_partitions)), - range(len(self._ge_partitions))): + for sge_part_color, ge_part_color in itertools.product( + range(len(self._sge_partitions)), range(len(self._ge_partitions)) + ): sge = next(iter(self._sge_partitions[sge_part_color])) ge = next(iter(self._ge_partitions[ge_part_color])) if self.edge_equality(self.subgraph, sge, self.graph, ge): @@ -423,6 +433,7 @@ class ISMAGS: @wraps(cmp) def comparer(graph1, node1, graph2, node2): return cmp(graph1.nodes[node1], graph2.nodes[node2]) + return comparer @staticmethod @@ -430,6 +441,7 @@ class ISMAGS: @wraps(cmp) def comparer(graph1, edge1, graph2, edge2): return cmp(graph1.edges[edge1], graph2.edges[edge2]) + return comparer def find_isomorphisms(self, symmetry=True): @@ -459,9 +471,9 @@ class ISMAGS: return if symmetry: - _, cosets = self.analyze_symmetry(self.subgraph, - self._sgn_partitions, - self._sge_colors) + _, cosets = self.analyze_symmetry( + self.subgraph, self._sgn_partitions, self._sge_colors + ) constraints = self._make_constraints(cosets) else: constraints = [] @@ -505,14 +517,14 @@ class ISMAGS: """ g_counts = {} for gn in self.graph: - g_counts[gn] = self._find_neighbor_color_count(self.graph, gn, - self._gn_colors, - self._ge_colors) + g_counts[gn] = self._find_neighbor_color_count( + self.graph, gn, self._gn_colors, self._ge_colors + ) candidates = defaultdict(set) for sgn in self.subgraph: - sg_count = self._find_neighbor_color_count(self.subgraph, sgn, - self._sgn_colors, - self._sge_colors) + sg_count = self._find_neighbor_color_count( + self.subgraph, sgn, self._sgn_colors, self._sge_colors + ) new_sg_count = Counter() for (sge_color, sgn_color), count in sg_count.items(): try: @@ -554,9 +566,9 @@ class ISMAGS: return if symmetry: - _, cosets = self.analyze_symmetry(self.subgraph, - self._sgn_partitions, - self._sge_colors) + _, cosets = self.analyze_symmetry( + self.subgraph, self._sgn_partitions, self._sge_colors + ) constraints = self._make_constraints(cosets) else: constraints = [] @@ -584,19 +596,24 @@ class ISMAGS: can be interchanged without changing nodes less than `key`. """ if self._symmetry_cache is not None: - key = hash((tuple(graph.nodes), tuple(graph.edges), - tuple(map(tuple, node_partitions)), tuple(edge_colors.items()))) + key = hash( + ( + tuple(graph.nodes), + tuple(graph.edges), + tuple(map(tuple, node_partitions)), + tuple(edge_colors.items()), + ) + ) if key in self._symmetry_cache: return self._symmetry_cache[key] - node_partitions = list(self._refine_node_partitions(graph, - node_partitions, - edge_colors)) + node_partitions = list( + self._refine_node_partitions(graph, node_partitions, edge_colors) + ) assert len(node_partitions) == 1 node_partitions = node_partitions[0] - permutations, cosets = self._process_ordered_pair_partitions(graph, - node_partitions, - node_partitions, - edge_colors) + permutations, cosets = self._process_ordered_pair_partitions( + graph, node_partitions, node_partitions, edge_colors + ) if self._symmetry_cache is not None: self._symmetry_cache[key] = permutations, cosets return permutations, cosets @@ -610,7 +627,9 @@ class ISMAGS: ------- bool """ - return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic(symmetry) + return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic( + symmetry + ) def subgraph_is_isomorphic(self, symmetry=False): """ @@ -714,7 +733,9 @@ class ISMAGS: for item in items: by_len[len(item)].append(item) - yield from itertools.product(*(itertools.permutations(by_len[l]) for l in sorted(by_len))) + yield from itertools.product( + *(itertools.permutations(by_len[l]) for l in sorted(by_len)) + ) @classmethod def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False): @@ -723,14 +744,17 @@ class ISMAGS: that all nodes in a partition have 1) the same color, and 2) the same number of edges to specific other partitions. """ + def equal_color(node1, node2): return node_edge_colors[node1] == node_edge_colors[node2] node_partitions = list(node_partitions) node_colors = partition_to_color(node_partitions) node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors) - if all(are_all_equal(node_edge_colors[node] for node in partition) - for partition in node_partitions): + if all( + are_all_equal(node_edge_colors[node] for node in partition) + for partition in node_partitions + ): yield node_partitions return @@ -739,8 +763,11 @@ class ISMAGS: for partition in node_partitions: if not are_all_equal(node_edge_colors[node] for node in partition): refined = make_partitions(partition, equal_color) - if (branch and len(refined) != 1 and - len({len(r) for r in refined}) != len([len(r) for r in refined])): + if ( + branch + and len(refined) != 1 + and len({len(r) for r in refined}) != len([len(r) for r in refined]) + ): # This is where it breaks. There are multiple new cells # in refined with the same length, and their order # matters. @@ -827,7 +854,9 @@ class ISMAGS: # is a dict of frozensets of frozensets of node indices it's # a bit clunky. We can't do .add, and + also doesn't work. We # could do |, but I deem union to be clearer. - new_candidates[sgn2] = new_candidates[sgn2].union([frozenset(gn2_options)]) + new_candidates[sgn2] = new_candidates[sgn2].union( + [frozenset(gn2_options)] + ) if (sgn, sgn2) in constraints: gn2_options = {gn2 for gn2 in self.graph if gn2 > gn} @@ -835,24 +864,28 @@ class ISMAGS: gn2_options = {gn2 for gn2 in self.graph if gn2 < gn} else: continue # pragma: no cover - new_candidates[sgn2] = new_candidates[sgn2].union([frozenset(gn2_options)]) + new_candidates[sgn2] = new_candidates[sgn2].union( + [frozenset(gn2_options)] + ) # The next node is the one that is unmapped and has fewest # candidates # Pylint disables because it's a one-shot function. - next_sgn = min(left_to_map, - key=lambda n: min(new_candidates[n], key=len)) # pylint: disable=cell-var-from-loop - yield from self._map_nodes(next_sgn, - new_candidates, - constraints, - mapping=mapping, - to_be_mapped=to_be_mapped) + next_sgn = min( + left_to_map, key=lambda n: min(new_candidates[n], key=len) + ) # pylint: disable=cell-var-from-loop + yield from self._map_nodes( + next_sgn, + new_candidates, + constraints, + mapping=mapping, + to_be_mapped=to_be_mapped, + ) # Unmap sgn-gn. Strictly not necessary since it'd get overwritten # when making a new mapping for sgn. # del mapping[sgn] - def _largest_common_subgraph(self, candidates, constraints, - to_be_mapped=None): + def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None): """ Find all largest common subgraphs honoring constraints. """ @@ -865,7 +898,9 @@ class ISMAGS: # smaller every iteration. # pylint disable becuase it's guarded against by default value - current_size = len(next(iter(to_be_mapped), [])) # pylint: disable=stop-iteration-return + current_size = len( + next(iter(to_be_mapped), []) + ) # pylint: disable=stop-iteration-return found_iso = False if current_size <= len(self.graph): @@ -879,8 +914,9 @@ class ISMAGS: for nodes in sorted(to_be_mapped, key=sorted): # Find the isomorphism between subgraph[to_be_mapped] <= graph next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len)) - isomorphs = self._map_nodes(next_sgn, candidates, constraints, - to_be_mapped=nodes) + isomorphs = self._map_nodes( + next_sgn, candidates, constraints, to_be_mapped=nodes + ) # This is effectively `yield from isomorphs`, except that we look # whether an item was yielded. @@ -915,8 +951,9 @@ class ISMAGS: new_nodes = self._remove_node(sgn, nodes, constraints) left_to_be_mapped.add(new_nodes) # COMBINATION - yield from self._largest_common_subgraph(candidates, constraints, - to_be_mapped=left_to_be_mapped) + yield from self._largest_common_subgraph( + candidates, constraints, to_be_mapped=left_to_be_mapped + ) @staticmethod def _remove_node(node, nodes, constraints): @@ -947,8 +984,10 @@ class ISMAGS: for top, bot in zip(top_partitions, bottom_partitions): # top and bot have only one element if len(top) != 1 or len(bot) != 1: - raise IndexError("Not all nodes are coupled. This is" - f" impossible: {top_partitions}, {bottom_partitions}") + raise IndexError( + "Not all nodes are coupled. This is" + f" impossible: {top_partitions}, {bottom_partitions}" + ) if top != bot: permutations.add(frozenset((next(iter(top)), next(iter(bot))))) return permutations @@ -976,8 +1015,16 @@ class ISMAGS: orbits[first].update(orbits[second]) del orbits[second] - def _couple_nodes(self, top_partitions, bottom_partitions, pair_idx, - t_node, b_node, graph, edge_colors): + def _couple_nodes( + self, + top_partitions, + bottom_partitions, + pair_idx, + t_node, + b_node, + graph, + edge_colors, + ): """ Generate new partitions from top and bottom_partitions where t_node is coupled to b_node. pair_idx is the index of the partitions where t_ and @@ -997,21 +1044,27 @@ class ISMAGS: new_top_partitions[pair_idx:pair_idx] = new_t_groups new_bottom_partitions[pair_idx:pair_idx] = new_b_groups - new_top_partitions = self._refine_node_partitions(graph, - new_top_partitions, - edge_colors) - new_bottom_partitions = self._refine_node_partitions(graph, - new_bottom_partitions, - edge_colors, branch=True) + new_top_partitions = self._refine_node_partitions( + graph, new_top_partitions, edge_colors + ) + new_bottom_partitions = self._refine_node_partitions( + graph, new_bottom_partitions, edge_colors, branch=True + ) new_top_partitions = list(new_top_partitions) assert len(new_top_partitions) == 1 new_top_partitions = new_top_partitions[0] for bot in new_bottom_partitions: yield list(new_top_partitions), bot - def _process_ordered_pair_partitions(self, graph, top_partitions, - bottom_partitions, edge_colors, - orbits=None, cosets=None): + def _process_ordered_pair_partitions( + self, + graph, + top_partitions, + bottom_partitions, + edge_colors, + orbits=None, + cosets=None, + ): """ Processes ordered pair partitions as per the reference paper. Finds and returns all permutations and cosets that leave the graph unchanged. @@ -1028,7 +1081,9 @@ class ISMAGS: else: cosets = cosets.copy() - assert all(len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions)) + assert all( + len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions) + ) # BASECASE if all(len(top) == 1 for top in top_partitions): @@ -1041,9 +1096,12 @@ class ISMAGS: return [], cosets permutations = [] - unmapped_nodes = {(node, idx) - for idx, t_partition in enumerate(top_partitions) - for node in t_partition if len(t_partition) > 1} + unmapped_nodes = { + (node, idx) + for idx, t_partition in enumerate(top_partitions) + for node in t_partition + if len(t_partition) > 1 + } node, pair_idx = min(unmapped_nodes) b_partition = bottom_partitions[pair_idx] @@ -1051,29 +1109,43 @@ class ISMAGS: if len(b_partition) == 1: # Can never result in symmetry continue - if node != node2 and any(node in orbit and node2 in orbit for orbit in orbits): + if node != node2 and any( + node in orbit and node2 in orbit for orbit in orbits + ): # Orbit prune branch continue # REDUCTION # Couple node to node2 - partitions = self._couple_nodes(top_partitions, bottom_partitions, - pair_idx, node, node2, graph, - edge_colors) + partitions = self._couple_nodes( + top_partitions, + bottom_partitions, + pair_idx, + node, + node2, + graph, + edge_colors, + ) for opp in partitions: new_top_partitions, new_bottom_partitions = opp - new_perms, new_cosets = self._process_ordered_pair_partitions(graph, - new_top_partitions, - new_bottom_partitions, - edge_colors, - orbits, - cosets) + new_perms, new_cosets = self._process_ordered_pair_partitions( + graph, + new_top_partitions, + new_bottom_partitions, + edge_colors, + orbits, + cosets, + ) # COMBINATION permutations += new_perms cosets.update(new_cosets) - mapped = {k for top, bottom in zip(top_partitions, bottom_partitions) - for k in top if len(top) == 1 and top == bottom} + mapped = { + k + for top, bottom in zip(top_partitions, bottom_partitions) + for k in top + if len(top) == 1 and top == bottom + } ks = {k for k in graph.nodes if k < node} # Have all nodes with ID < node been mapped? find_coset = ks <= mapped and node not in cosets diff --git a/networkx/algorithms/isomorphism/isomorph.py b/networkx/algorithms/isomorphism/isomorph.py index 6c6eeea7..bd5ef485 100644 --- a/networkx/algorithms/isomorphism/isomorph.py +++ b/networkx/algorithms/isomorphism/isomorph.py @@ -4,10 +4,12 @@ Graph isomorphism functions. import networkx as nx from networkx.exception import NetworkXError -__all__ = ['could_be_isomorphic', - 'fast_could_be_isomorphic', - 'faster_could_be_isomorphic', - 'is_isomorphic'] +__all__ = [ + "could_be_isomorphic", + "fast_could_be_isomorphic", + "faster_could_be_isomorphic", + "is_isomorphic", +] def could_be_isomorphic(G1, G2): diff --git a/networkx/algorithms/isomorphism/isomorphvf2.py b/networkx/algorithms/isomorphism/isomorphvf2.py index 43251dff..3eb541bb 100644 --- a/networkx/algorithms/isomorphism/isomorphvf2.py +++ b/networkx/algorithms/isomorphism/isomorphvf2.py @@ -141,8 +141,7 @@ polynomial-time algorithm is known to exist). import sys -__all__ = ['GraphMatcher', - 'DiGraphMatcher'] +__all__ = ["GraphMatcher", "DiGraphMatcher"] class GraphMatcher: @@ -182,7 +181,7 @@ class GraphMatcher: sys.setrecursionlimit(int(1.5 * expected_max_recursion_level)) # Declare that we will be searching for a graph-graph isomorphism. - self.test = 'graph' + self.test = "graph" # Initialize state self.initialize() @@ -224,7 +223,7 @@ class GraphMatcher: # If T1_inout and T2_inout were both empty.... # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} # if not (T1_inout or T2_inout): # as suggested by [2], incorrect - if 1: # as inferred from [1], correct + if 1: # as inferred from [1], correct # First we determine the candidate node for G2 other_node = min(G2_nodes - set(self.core_2), key=min_key) for node in self.G1: @@ -290,7 +289,7 @@ class GraphMatcher: def isomorphisms_iter(self): """Generator over isomorphisms between G1 and G2.""" # Declare that we are looking for a graph-graph isomorphism. - self.test = 'graph' + self.test = "graph" self.initialize() yield from self.match() @@ -375,23 +374,23 @@ class GraphMatcher: except StopIteration: return False -# subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) + # subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) def subgraph_isomorphisms_iter(self): """Generator over isomorphisms between a subgraph of G1 and G2.""" # Declare that we are looking for graph-subgraph isomorphism. - self.test = 'subgraph' + self.test = "subgraph" self.initialize() yield from self.match() def subgraph_monomorphisms_iter(self): """Generator over monomorphisms between a subgraph of G1 and G2.""" # Declare that we are looking for graph-subgraph monomorphism. - self.test = 'mono' + self.test = "mono" self.initialize() yield from self.match() -# subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) + # subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) def syntactic_feasibility(self, G1_node, G2_node): """Returns True if adding (G1_node, G2_node) is syntactically feasible. @@ -429,11 +428,15 @@ class GraphMatcher: # R_neighbor at the next recursion level. But it is good to prune the # search tree now. - if self.test == 'mono': - if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(G2_node, G2_node): + if self.test == "mono": + if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges( + G2_node, G2_node + ): return False else: - if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(G2_node, G2_node): + if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges( + G2_node, G2_node + ): return False # R_neighbor @@ -441,26 +444,32 @@ class GraphMatcher: # For each neighbor n' of n in the partial mapping, the corresponding # node m' is a neighbor of m, and vice versa. Also, the number of # edges must be equal. - if self.test != 'mono': + if self.test != "mono": for neighbor in self.G1[G1_node]: if neighbor in self.core_1: if not (self.core_1[neighbor] in self.G2[G2_node]): return False - elif self.G1.number_of_edges(neighbor, G1_node) != self.G2.number_of_edges(self.core_1[neighbor], G2_node): + elif self.G1.number_of_edges( + neighbor, G1_node + ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node): return False for neighbor in self.G2[G2_node]: if neighbor in self.core_2: if not (self.core_2[neighbor] in self.G1[G1_node]): return False - elif self.test == 'mono': - if self.G1.number_of_edges(self.core_2[neighbor], G1_node) < self.G2.number_of_edges(neighbor, G2_node): + elif self.test == "mono": + if self.G1.number_of_edges( + self.core_2[neighbor], G1_node + ) < self.G2.number_of_edges(neighbor, G2_node): return False else: - if self.G1.number_of_edges(self.core_2[neighbor], G1_node) != self.G2.number_of_edges(neighbor, G2_node): + if self.G1.number_of_edges( + self.core_2[neighbor], G1_node + ) != self.G2.number_of_edges(neighbor, G2_node): return False - if self.test != 'mono': + if self.test != "mono": # Look ahead 1 # R_terminout @@ -474,7 +483,7 @@ class GraphMatcher: for neighbor in self.G2[G2_node]: if (neighbor in self.inout_2) and (neighbor not in self.core_2): num2 += 1 - if self.test == 'graph': + if self.test == "graph": if not (num1 == num2): return False else: # self.test == 'subgraph' @@ -496,7 +505,7 @@ class GraphMatcher: for neighbor in self.G2[G2_node]: if neighbor not in self.inout_2: num2 += 1 - if self.test == 'graph': + if self.test == "graph": if not (num1 == num2): return False else: # self.test == 'subgraph' @@ -553,7 +562,7 @@ class DiGraphMatcher(GraphMatcher): # We compute the in-terminal sets. # elif not (T1_out or T2_out): # as suggested by [2], incorrect - else: # as suggested by [1], correct + else: # as suggested by [1], correct T1_in = [node for node in self.in_1 if node not in self.core_1] T2_in = [node for node in self.in_2 if node not in self.core_2] @@ -568,7 +577,7 @@ class DiGraphMatcher(GraphMatcher): # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} # elif not (T1_in or T2_in): # as suggested by [2], incorrect - else: # as inferred from [1], correct + else: # as inferred from [1], correct node_2 = min(G2_nodes - set(self.core_2), key=min_key) for node_1 in G1_nodes: if node_1 not in self.core_1: @@ -644,11 +653,15 @@ class DiGraphMatcher(GraphMatcher): # The number of selfloops for G1_node must equal the number of # self-loops for G2_node. Without this check, we would fail on R_pred # at the next recursion level. This should prune the tree even further. - if self.test == 'mono': - if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(G2_node, G2_node): + if self.test == "mono": + if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges( + G2_node, G2_node + ): return False else: - if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(G2_node, G2_node): + if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges( + G2_node, G2_node + ): return False # R_pred @@ -656,23 +669,29 @@ class DiGraphMatcher(GraphMatcher): # For each predecessor n' of n in the partial mapping, the # corresponding node m' is a predecessor of m, and vice versa. Also, # the number of edges must be equal - if self.test != 'mono': + if self.test != "mono": for predecessor in self.G1.pred[G1_node]: if predecessor in self.core_1: if not (self.core_1[predecessor] in self.G2.pred[G2_node]): return False - elif self.G1.number_of_edges(predecessor, G1_node) != self.G2.number_of_edges(self.core_1[predecessor], G2_node): + elif self.G1.number_of_edges( + predecessor, G1_node + ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node): return False for predecessor in self.G2.pred[G2_node]: if predecessor in self.core_2: if not (self.core_2[predecessor] in self.G1.pred[G1_node]): return False - elif self.test == 'mono': - if self.G1.number_of_edges(self.core_2[predecessor], G1_node) < self.G2.number_of_edges(predecessor, G2_node): + elif self.test == "mono": + if self.G1.number_of_edges( + self.core_2[predecessor], G1_node + ) < self.G2.number_of_edges(predecessor, G2_node): return False else: - if self.G1.number_of_edges(self.core_2[predecessor], G1_node) != self.G2.number_of_edges(predecessor, G2_node): + if self.G1.number_of_edges( + self.core_2[predecessor], G1_node + ) != self.G2.number_of_edges(predecessor, G2_node): return False # R_succ @@ -680,26 +699,32 @@ class DiGraphMatcher(GraphMatcher): # For each successor n' of n in the partial mapping, the corresponding # node m' is a successor of m, and vice versa. Also, the number of # edges must be equal. - if self.test != 'mono': + if self.test != "mono": for successor in self.G1[G1_node]: if successor in self.core_1: if not (self.core_1[successor] in self.G2[G2_node]): return False - elif self.G1.number_of_edges(G1_node, successor) != self.G2.number_of_edges(G2_node, self.core_1[successor]): + elif self.G1.number_of_edges( + G1_node, successor + ) != self.G2.number_of_edges(G2_node, self.core_1[successor]): return False for successor in self.G2[G2_node]: if successor in self.core_2: if not (self.core_2[successor] in self.G1[G1_node]): return False - elif self.test == 'mono': - if self.G1.number_of_edges(G1_node, self.core_2[successor]) < self.G2.number_of_edges(G2_node, successor): + elif self.test == "mono": + if self.G1.number_of_edges( + G1_node, self.core_2[successor] + ) < self.G2.number_of_edges(G2_node, successor): return False else: - if self.G1.number_of_edges(G1_node, self.core_2[successor]) != self.G2.number_of_edges(G2_node, successor): + if self.G1.number_of_edges( + G1_node, self.core_2[successor] + ) != self.G2.number_of_edges(G2_node, successor): return False - if self.test != 'mono': + if self.test != "mono": # Look ahead 1 @@ -714,7 +739,7 @@ class DiGraphMatcher(GraphMatcher): for predecessor in self.G2.pred[G2_node]: if (predecessor in self.in_2) and (predecessor not in self.core_2): num2 += 1 - if self.test == 'graph': + if self.test == "graph": if not (num1 == num2): return False else: # self.test == 'subgraph' @@ -731,7 +756,7 @@ class DiGraphMatcher(GraphMatcher): for successor in self.G2[G2_node]: if (successor in self.in_2) and (successor not in self.core_2): num2 += 1 - if self.test == 'graph': + if self.test == "graph": if not (num1 == num2): return False else: # self.test == 'subgraph' @@ -750,7 +775,7 @@ class DiGraphMatcher(GraphMatcher): for predecessor in self.G2.pred[G2_node]: if (predecessor in self.out_2) and (predecessor not in self.core_2): num2 += 1 - if self.test == 'graph': + if self.test == "graph": if not (num1 == num2): return False else: # self.test == 'subgraph' @@ -767,7 +792,7 @@ class DiGraphMatcher(GraphMatcher): for successor in self.G2[G2_node]: if (successor in self.out_2) and (successor not in self.core_2): num2 += 1 - if self.test == 'graph': + if self.test == "graph": if not (num1 == num2): return False else: # self.test == 'subgraph' @@ -789,7 +814,7 @@ class DiGraphMatcher(GraphMatcher): for predecessor in self.G2.pred[G2_node]: if (predecessor not in self.in_2) and (predecessor not in self.out_2): num2 += 1 - if self.test == 'graph': + if self.test == "graph": if not (num1 == num2): return False else: # self.test == 'subgraph' @@ -807,7 +832,7 @@ class DiGraphMatcher(GraphMatcher): for successor in self.G2[G2_node]: if (successor not in self.in_2) and (successor not in self.out_2): num2 += 1 - if self.test == 'graph': + if self.test == "graph": if not (num1 == num2): return False else: # self.test == 'subgraph' @@ -873,7 +898,9 @@ class GMState: # Updates for T_1^{inout} new_nodes = set() for node in GM.core_1: - new_nodes.update([neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]) + new_nodes.update( + [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1] + ) for node in new_nodes: if node not in GM.inout_1: GM.inout_1[node] = self.depth @@ -881,7 +908,9 @@ class GMState: # Updates for T_2^{inout} new_nodes = set() for node in GM.core_2: - new_nodes.update([neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]) + new_nodes.update( + [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2] + ) for node in new_nodes: if node not in GM.inout_2: GM.inout_2[node] = self.depth @@ -962,8 +991,13 @@ class DiGMState: # Updates for T_1^{in} new_nodes = set() for node in GM.core_1: - new_nodes.update([predecessor for predecessor in GM.G1.predecessors(node) - if predecessor not in GM.core_1]) + new_nodes.update( + [ + predecessor + for predecessor in GM.G1.predecessors(node) + if predecessor not in GM.core_1 + ] + ) for node in new_nodes: if node not in GM.in_1: GM.in_1[node] = self.depth @@ -971,8 +1005,13 @@ class DiGMState: # Updates for T_2^{in} new_nodes = set() for node in GM.core_2: - new_nodes.update([predecessor for predecessor in GM.G2.predecessors(node) - if predecessor not in GM.core_2]) + new_nodes.update( + [ + predecessor + for predecessor in GM.G2.predecessors(node) + if predecessor not in GM.core_2 + ] + ) for node in new_nodes: if node not in GM.in_2: GM.in_2[node] = self.depth @@ -980,7 +1019,13 @@ class DiGMState: # Updates for T_1^{out} new_nodes = set() for node in GM.core_1: - new_nodes.update([successor for successor in GM.G1.successors(node) if successor not in GM.core_1]) + new_nodes.update( + [ + successor + for successor in GM.G1.successors(node) + if successor not in GM.core_1 + ] + ) for node in new_nodes: if node not in GM.out_1: GM.out_1[node] = self.depth @@ -988,7 +1033,13 @@ class DiGMState: # Updates for T_2^{out} new_nodes = set() for node in GM.core_2: - new_nodes.update([successor for successor in GM.G2.successors(node) if successor not in GM.core_2]) + new_nodes.update( + [ + successor + for successor in GM.G2.successors(node) + if successor not in GM.core_2 + ] + ) for node in new_nodes: if node not in GM.out_2: GM.out_2[node] = self.depth diff --git a/networkx/algorithms/isomorphism/matchhelpers.py b/networkx/algorithms/isomorphism/matchhelpers.py index c7d11255..d0e99cc3 100644 --- a/networkx/algorithms/isomorphism/matchhelpers.py +++ b/networkx/algorithms/isomorphism/matchhelpers.py @@ -4,23 +4,24 @@ edge_match functions to use during isomorphism checks. from itertools import permutations import types -__all__ = ['categorical_node_match', - 'categorical_edge_match', - 'categorical_multiedge_match', - 'numerical_node_match', - 'numerical_edge_match', - 'numerical_multiedge_match', - 'generic_node_match', - 'generic_edge_match', - 'generic_multiedge_match', - ] +__all__ = [ + "categorical_node_match", + "categorical_edge_match", + "categorical_multiedge_match", + "numerical_node_match", + "numerical_edge_match", + "numerical_multiedge_match", + "generic_node_match", + "generic_edge_match", + "generic_multiedge_match", +] def copyfunc(f, name=None): """Returns a deepcopy of a function.""" - return types.FunctionType(f.__code__, f.__globals__, - name or f.__name__, f.__defaults__, - f.__closure__) + return types.FunctionType( + f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__ + ) def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08): @@ -88,18 +89,21 @@ Examples def categorical_node_match(attr, default): if isinstance(attr, str): + def match(data1, data2): return data1.get(attr, default) == data2.get(attr, default) + else: attrs = list(zip(attr, default)) # Python 3 def match(data1, data2): return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs) + return match try: - categorical_edge_match = copyfunc(categorical_node_match, 'categorical_edge_match') + categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match") except NotImplementedError: # IronPython lacks support for types.FunctionType. # https://github.com/networkx/networkx/issues/949 @@ -110,10 +114,12 @@ except NotImplementedError: def categorical_multiedge_match(attr, default): if isinstance(attr, str): + def match(datasets1, datasets2): values1 = {data.get(attr, default) for data in datasets1.values()} values2 = {data.get(attr, default) for data in datasets2.values()} return values1 == values2 + else: attrs = list(zip(attr, default)) # Python 3 @@ -127,14 +133,15 @@ def categorical_multiedge_match(attr, default): x = tuple(data2.get(attr, d) for attr, d in attrs) values2.add(x) return values1 == values2 + return match # Docstrings for categorical functions. categorical_node_match.__doc__ = categorical_doc -categorical_edge_match.__doc__ = categorical_doc.replace('node', 'edge') -tmpdoc = categorical_doc.replace('node', 'edge') -tmpdoc = tmpdoc.replace('categorical_edge_match', 'categorical_multiedge_match') +categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge") +tmpdoc = categorical_doc.replace("node", "edge") +tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match") categorical_multiedge_match.__doc__ = tmpdoc @@ -174,10 +181,12 @@ Examples def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): if isinstance(attr, str): + def match(data1, data2): - return close(data1.get(attr, default), - data2.get(attr, default), - rtol=rtol, atol=atol) + return close( + data1.get(attr, default), data2.get(attr, default), rtol=rtol, atol=atol + ) + else: attrs = list(zip(attr, default)) # Python 3 @@ -185,11 +194,12 @@ def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08) values1 = [data1.get(attr, d) for attr, d in attrs] values2 = [data2.get(attr, d) for attr, d in attrs] return allclose(values1, values2, rtol=rtol, atol=atol) + return match try: - numerical_edge_match = copyfunc(numerical_node_match, 'numerical_edge_match') + numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match") except NotImplementedError: # IronPython lacks support for types.FunctionType. # https://github.com/networkx/networkx/issues/949 @@ -200,10 +210,12 @@ except NotImplementedError: def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): if isinstance(attr, str): + def match(datasets1, datasets2): values1 = sorted([data.get(attr, default) for data in datasets1.values()]) values2 = sorted([data.get(attr, default) for data in datasets2.values()]) return allclose(values1, values2, rtol=rtol, atol=atol) + else: attrs = list(zip(attr, default)) # Python 3 @@ -223,14 +235,15 @@ def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1 return False else: return True + return match # Docstrings for numerical functions. numerical_node_match.__doc__ = numerical_doc -numerical_edge_match.__doc__ = numerical_doc.replace('node', 'edge') -tmpdoc = numerical_doc.replace('node', 'edge') -tmpdoc = tmpdoc.replace('numerical_edge_match', 'numerical_multiedge_match') +numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge") +tmpdoc = numerical_doc.replace("node", "edge") +tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match") numerical_multiedge_match.__doc__ = tmpdoc @@ -272,8 +285,10 @@ Examples def generic_node_match(attr, default, op): if isinstance(attr, str): + def match(data1, data2): return op(data1.get(attr, default), data2.get(attr, default)) + else: attrs = list(zip(attr, default, op)) # Python 3 @@ -283,11 +298,12 @@ def generic_node_match(attr, default, op): return False else: return True + return match try: - generic_edge_match = copyfunc(generic_node_match, 'generic_edge_match') + generic_edge_match = copyfunc(generic_node_match, "generic_edge_match") except NotImplementedError: # IronPython lacks support for types.FunctionType. # https://github.com/networkx/networkx/issues/949 @@ -364,9 +380,10 @@ def generic_multiedge_match(attr, default, op): else: # Then there are no isomorphisms between the multiedges. return False + return match # Docstrings for numerical functions. generic_node_match.__doc__ = generic_doc -generic_edge_match.__doc__ = generic_doc.replace('node', 'edge') +generic_edge_match.__doc__ = generic_doc.replace("node", "edge") diff --git a/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/networkx/algorithms/isomorphism/temporalisomorphvf2.py index 6a9ad53b..b46aade8 100644 --- a/networkx/algorithms/isomorphism/temporalisomorphvf2.py +++ b/networkx/algorithms/isomorphism/temporalisomorphvf2.py @@ -68,12 +68,10 @@ Handles directed and undirected graphs and graphs with parallel edges. import networkx as nx from .isomorphvf2 import GraphMatcher, DiGraphMatcher -__all__ = ['TimeRespectingGraphMatcher', - 'TimeRespectingDiGraphMatcher'] +__all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"] class TimeRespectingGraphMatcher(GraphMatcher): - def __init__(self, G1, G2, temporal_attribute_name, delta): """Initialize TimeRespectingGraphMatcher. @@ -106,17 +104,22 @@ class TimeRespectingGraphMatcher(GraphMatcher): if isinstance(Gx, nx.Graph): # Graph G[u][v] returns the data dictionary. dates.append(Gx[Gx_node][n][self.temporal_attribute_name]) else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. - for edge in Gx[Gx_node][n].values(): # Iterates all edges between node pair. + for edge in Gx[Gx_node][ + n + ].values(): # Iterates all edges between node pair. dates.append(edge[self.temporal_attribute_name]) if any(x is None for x in dates): - raise ValueError('Datetime not supplied for at least one edge.') + raise ValueError("Datetime not supplied for at least one edge.") return not dates or max(dates) - min(dates) <= self.delta def two_hop(self, Gx, core_x, Gx_node, neighbors): """ Paths of length 2 from Gx_node should be time-respecting. """ - return all(self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node]) for v in neighbors) + return all( + self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node]) + for v in neighbors + ) def semantic_feasibility(self, G1_node, G2_node): """Returns True if adding (G1_node, G2_node) is semantically @@ -136,7 +139,6 @@ class TimeRespectingGraphMatcher(GraphMatcher): class TimeRespectingDiGraphMatcher(DiGraphMatcher): - def __init__(self, G1, G2, temporal_attribute_name, delta): """Initialize TimeRespectingDiGraphMatcher. @@ -169,7 +171,9 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher): pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name]) else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. for n in pred: - for edge in Gx[n][Gx_node].values(): # Iterates all edge data between node pair. + for edge in Gx[n][ + Gx_node + ].values(): # Iterates all edge data between node pair. pred_dates.append(edge[self.temporal_attribute_name]) return pred_dates @@ -183,7 +187,9 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher): succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name]) else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. for n in succ: - for edge in Gx[Gx_node][n].values(): # Iterates all edge data between node pair. + for edge in Gx[Gx_node][ + n + ].values(): # Iterates all edge data between node pair. succ_dates.append(edge[self.temporal_attribute_name]) return succ_dates @@ -193,19 +199,39 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher): """ pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred) succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ) - return self.test_one(pred_dates, succ_dates) and self.test_two(pred_dates, succ_dates) + return self.test_one(pred_dates, succ_dates) and self.test_two( + pred_dates, succ_dates + ) def two_hop_pred(self, Gx, Gx_node, core_x, pred): """ The predeccessors of the ego node. """ - return all(self.one_hop(Gx, p, core_x, self.preds(Gx, core_x, p), self.succs(Gx, core_x, p, Gx_node)) for p in pred) + return all( + self.one_hop( + Gx, + p, + core_x, + self.preds(Gx, core_x, p), + self.succs(Gx, core_x, p, Gx_node), + ) + for p in pred + ) def two_hop_succ(self, Gx, Gx_node, core_x, succ): """ The successors of the ego node. """ - return all(self.one_hop(Gx, s, core_x, self.preds(Gx, core_x, s, Gx_node), self.succs(Gx, core_x, s)) for s in succ) + return all( + self.one_hop( + Gx, + s, + core_x, + self.preds(Gx, core_x, s, Gx_node), + self.succs(Gx, core_x, s), + ) + for s in succ + ) def preds(self, Gx, core_x, v, Gx_node=None): pred = [n for n in Gx.predecessors(v) if n in core_x] @@ -229,7 +255,7 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher): dates = pred_dates + succ_dates if any(x is None for x in dates): - raise ValueError('Date or datetime not supplied for at least one edge.') + raise ValueError("Date or datetime not supplied for at least one edge.") dates.sort() # Small to large. if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta): @@ -245,7 +271,11 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher): pred_dates.sort() succ_dates.sort() # First out before last in; negative of the necessary condition for time-respect. - if 0 < len(succ_dates) and 0 < len(pred_dates) and succ_dates[0] < pred_dates[-1]: + if ( + 0 < len(succ_dates) + and 0 < len(pred_dates) + and succ_dates[0] < pred_dates[-1] + ): time_respecting = False return time_respecting @@ -257,9 +287,13 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher): maintain the self.tests if needed, to keep the match() method functional. Implementations should consider multigraphs. """ - pred, succ = [n for n in self.G1.predecessors(G1_node) if n in self.core_1], [ - n for n in self.G1.successors(G1_node) if n in self.core_1] - if not self.one_hop(self.G1, G1_node, self.core_1, pred, succ): # Fail fast on first node. + pred, succ = ( + [n for n in self.G1.predecessors(G1_node) if n in self.core_1], + [n for n in self.G1.successors(G1_node) if n in self.core_1], + ) + if not self.one_hop( + self.G1, G1_node, self.core_1, pred, succ + ): # Fail fast on first node. return False if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred): return False diff --git a/networkx/algorithms/isomorphism/tests/test_ismags.py b/networkx/algorithms/isomorphism/tests/test_ismags.py index ed3d81ea..e4b61f64 100644 --- a/networkx/algorithms/isomorphism/tests/test_ismags.py +++ b/networkx/algorithms/isomorphism/tests/test_ismags.py @@ -18,27 +18,35 @@ def _matches_to_sets(matches): class TestSelfIsomorphism: data = [ ( - [(0, dict(name='a')), - (1, dict(name='a')), - (2, dict(name='b')), - (3, dict(name='b')), - (4, dict(name='a')), - (5, dict(name='a'))], - [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)] - ), - ( - range(1, 5), - [(1, 2), (2, 4), (4, 3), (3, 1)] - ), - ( - [], - [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 0), (0, 6), (6, 7), - (2, 8), (8, 9), (4, 10), (10, 11)] + [ + (0, dict(name="a")), + (1, dict(name="a")), + (2, dict(name="b")), + (3, dict(name="b")), + (4, dict(name="a")), + (5, dict(name="a")), + ], + [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)], ), + (range(1, 5), [(1, 2), (2, 4), (4, 3), (3, 1)]), ( [], - [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)] + [ + (0, 1), + (1, 2), + (2, 3), + (3, 4), + (4, 5), + (5, 0), + (0, 6), + (6, 7), + (2, 8), + (8, 9), + (4, 10), + (10, 11), + ], ), + ([], [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)]), ] def test_self_isomorphism(self): @@ -51,11 +59,14 @@ class TestSelfIsomorphism: graph.add_nodes_from(node_data) graph.add_edges_from(edge_data) - ismags = iso.ISMAGS(graph, graph, node_match=iso.categorical_node_match('name', None)) + ismags = iso.ISMAGS( + graph, graph, node_match=iso.categorical_node_match("name", None) + ) assert ismags.is_isomorphic() assert ismags.subgraph_is_isomorphic() - assert (list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == - [{n: n for n in graph.nodes}]) + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in graph.nodes} + ] def test_edgecase_self_isomorphism(self): """ @@ -87,11 +98,14 @@ class TestSelfIsomorphism: graph.add_nodes_from(node_data) graph.add_edges_from(edge_data) - ismags = iso.ISMAGS(graph, graph, node_match=iso.categorical_node_match('name', None)) + ismags = iso.ISMAGS( + graph, graph, node_match=iso.categorical_node_match("name", None) + ) assert ismags.is_isomorphic() assert ismags.subgraph_is_isomorphic() - assert (list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == - [{n: n for n in graph.nodes}]) + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in graph.nodes} + ] class TestSubgraphIsomorphism: @@ -103,8 +117,9 @@ class TestSubgraphIsomorphism: nx.add_cycle(g2, range(4)) g2.add_edges_from([(n, m) for n, m in zip(g2, range(4, 8))]) ismags = iso.ISMAGS(g2, g1) - assert (list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == - [{n: n for n in g1.nodes}]) + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in g1.nodes} + ] def test_isomorphism2(self): g1 = nx.Graph() @@ -115,69 +130,94 @@ class TestSubgraphIsomorphism: ismags = iso.ISMAGS(g2, g1) matches = ismags.subgraph_isomorphisms_iter(symmetry=True) - expected_symmetric = [{0: 0, 1: 1, 2: 2}, - {0: 0, 1: 1, 3: 2}, - {2: 0, 1: 1, 3: 2}] - assert (_matches_to_sets(matches) == - _matches_to_sets(expected_symmetric)) + expected_symmetric = [ + {0: 0, 1: 1, 2: 2}, + {0: 0, 1: 1, 3: 2}, + {2: 0, 1: 1, 3: 2}, + ] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) matches = ismags.subgraph_isomorphisms_iter(symmetry=False) - expected_asymmetric = [{0: 2, 1: 1, 2: 0}, - {0: 2, 1: 1, 3: 0}, - {2: 2, 1: 1, 3: 0}] - assert (_matches_to_sets(matches) == - _matches_to_sets(expected_symmetric + expected_asymmetric)) + expected_asymmetric = [ + {0: 2, 1: 1, 2: 0}, + {0: 2, 1: 1, 3: 0}, + {2: 2, 1: 1, 3: 0}, + ] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) def test_labeled_nodes(self): g1 = nx.Graph() nx.add_cycle(g1, range(3)) - g1.nodes[1]['attr'] = True + g1.nodes[1]["attr"] = True g2 = g1.copy() g2.add_edge(1, 3) ismags = iso.ISMAGS(g2, g1, node_match=lambda x, y: x == y) matches = ismags.subgraph_isomorphisms_iter(symmetry=True) expected_symmetric = [{0: 0, 1: 1, 2: 2}] - assert (_matches_to_sets(matches) == - _matches_to_sets(expected_symmetric)) + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) matches = ismags.subgraph_isomorphisms_iter(symmetry=False) expected_asymmetric = [{0: 2, 1: 1, 2: 0}] - assert (_matches_to_sets(matches) == - _matches_to_sets(expected_symmetric + expected_asymmetric)) + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) def test_labeled_edges(self): g1 = nx.Graph() nx.add_cycle(g1, range(3)) - g1.edges[1, 2]['attr'] = True + g1.edges[1, 2]["attr"] = True g2 = g1.copy() g2.add_edge(1, 3) ismags = iso.ISMAGS(g2, g1, edge_match=lambda x, y: x == y) matches = ismags.subgraph_isomorphisms_iter(symmetry=True) expected_symmetric = [{0: 0, 1: 1, 2: 2}] - assert (_matches_to_sets(matches) == - _matches_to_sets(expected_symmetric)) + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) matches = ismags.subgraph_isomorphisms_iter(symmetry=False) expected_asymmetric = [{1: 2, 0: 0, 2: 1}] - assert (_matches_to_sets(matches) == - _matches_to_sets(expected_symmetric + expected_asymmetric)) + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) class TestWikipediaExample: # Nodes 'a', 'b', 'c' and 'd' form a column. # Nodes 'g', 'h', 'i' and 'j' form a column. - g1edges = [['a', 'g'], ['a', 'h'], ['a', 'i'], - ['b', 'g'], ['b', 'h'], ['b', 'j'], - ['c', 'g'], ['c', 'i'], ['c', 'j'], - ['d', 'h'], ['d', 'i'], ['d', 'j']] + g1edges = [ + ["a", "g"], + ["a", "h"], + ["a", "i"], + ["b", "g"], + ["b", "h"], + ["b", "j"], + ["c", "g"], + ["c", "i"], + ["c", "j"], + ["d", "h"], + ["d", "i"], + ["d", "j"], + ] # Nodes 1,2,3,4 form the clockwise corners of a large square. # Nodes 5,6,7,8 form the clockwise corners of a small square - g2edges = [[1, 2], [2, 3], [3, 4], [4, 1], - [5, 6], [6, 7], [7, 8], [8, 5], - [1, 5], [2, 6], [3, 7], [4, 8]] + g2edges = [ + [1, 2], + [2, 3], + [3, 4], + [4, 1], + [5, 6], + [6, 7], + [7, 8], + [8, 5], + [1, 5], + [2, 6], + [3, 7], + [4, 8], + ] def test_graph(self): g1 = nx.Graph() @@ -193,30 +233,37 @@ class TestLargestCommonSubgraph: # Example graphs from DOI: 10.1002/spe.588 graph1 = nx.Graph() graph1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (4, 5)]) - graph1.nodes[1]['color'] = 0 + graph1.nodes[1]["color"] = 0 graph2 = nx.Graph() - graph2.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), - (5, 6), (5, 7), (6, 7)]) - graph2.nodes[1]['color'] = 1 - graph2.nodes[6]['color'] = 2 - graph2.nodes[7]['color'] = 2 - - ismags = iso.ISMAGS(graph1, graph2, node_match=iso.categorical_node_match('color', None)) + graph2.add_edges_from( + [(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), (5, 6), (5, 7), (6, 7)] + ) + graph2.nodes[1]["color"] = 1 + graph2.nodes[6]["color"] = 2 + graph2.nodes[7]["color"] = 2 + + ismags = iso.ISMAGS( + graph1, graph2, node_match=iso.categorical_node_match("color", None) + ) assert list(ismags.subgraph_isomorphisms_iter(True)) == [] assert list(ismags.subgraph_isomorphisms_iter(False)) == [] found_mcis = _matches_to_sets(ismags.largest_common_subgraph()) - expected = _matches_to_sets([{2: 2, 3: 4, 4: 3, 5: 5}, - {2: 4, 3: 2, 4: 3, 5: 5}]) + expected = _matches_to_sets( + [{2: 2, 3: 4, 4: 3, 5: 5}, {2: 4, 3: 2, 4: 3, 5: 5}] + ) assert expected == found_mcis - ismags = iso.ISMAGS(graph2, graph1, node_match=iso.categorical_node_match('color', None)) + ismags = iso.ISMAGS( + graph2, graph1, node_match=iso.categorical_node_match("color", None) + ) assert list(ismags.subgraph_isomorphisms_iter(True)) == [] assert list(ismags.subgraph_isomorphisms_iter(False)) == [] found_mcis = _matches_to_sets(ismags.largest_common_subgraph()) # Same answer, but reversed. - expected = _matches_to_sets([{2: 2, 3: 4, 4: 3, 5: 5}, - {4: 2, 2: 3, 3: 4, 5: 5}]) + expected = _matches_to_sets( + [{2: 2, 3: 4, 4: 3, 5: 5}, {4: 2, 2: 3, 3: 4, 5: 5}] + ) assert expected == found_mcis def test_symmetry_mcis(self): @@ -228,23 +275,30 @@ class TestLargestCommonSubgraph: graph2.add_edge(1, 3) # Only the symmetry of graph2 is taken into account here. - ismags1 = iso.ISMAGS(graph1, graph2, node_match=iso.categorical_node_match('color', None)) + ismags1 = iso.ISMAGS( + graph1, graph2, node_match=iso.categorical_node_match("color", None) + ) assert list(ismags1.subgraph_isomorphisms_iter(True)) == [] found_mcis = _matches_to_sets(ismags1.largest_common_subgraph()) - expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, - {1: 0, 3: 2, 2: 1}]) + expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, {1: 0, 3: 2, 2: 1}]) assert expected == found_mcis # Only the symmetry of graph1 is taken into account here. - ismags2 = iso.ISMAGS(graph2, graph1, node_match=iso.categorical_node_match('color', None)) + ismags2 = iso.ISMAGS( + graph2, graph1, node_match=iso.categorical_node_match("color", None) + ) assert list(ismags2.subgraph_isomorphisms_iter(True)) == [] found_mcis = _matches_to_sets(ismags2.largest_common_subgraph()) - expected = _matches_to_sets([{3: 2, 0: 0, 1: 1}, - {2: 0, 0: 2, 1: 1}, - {3: 0, 0: 2, 1: 1}, - {3: 0, 1: 1, 2: 2}, - {0: 0, 1: 1, 2: 2}, - {2: 0, 3: 2, 1: 1}]) + expected = _matches_to_sets( + [ + {3: 2, 0: 0, 1: 1}, + {2: 0, 0: 2, 1: 1}, + {3: 0, 0: 2, 1: 1}, + {3: 0, 1: 1, 2: 2}, + {0: 0, 1: 1, 2: 2}, + {2: 0, 3: 2, 1: 1}, + ] + ) assert expected == found_mcis @@ -253,17 +307,21 @@ class TestLargestCommonSubgraph: found_mcis2 = [{v: k for k, v in d.items()} for d in found_mcis2] found_mcis2 = _matches_to_sets(found_mcis2) - expected = _matches_to_sets([{3: 2, 1: 3, 2: 1}, - {2: 0, 0: 2, 1: 1}, - {1: 2, 3: 3, 2: 1}, - {3: 0, 1: 3, 2: 1}, - {0: 2, 2: 3, 1: 1}, - {3: 0, 1: 2, 2: 1}, - {2: 0, 0: 3, 1: 1}, - {0: 0, 2: 3, 1: 1}, - {1: 0, 3: 3, 2: 1}, - {1: 0, 3: 2, 2: 1}, - {0: 3, 1: 1, 2: 2}, - {0: 0, 1: 1, 2: 2}]) + expected = _matches_to_sets( + [ + {3: 2, 1: 3, 2: 1}, + {2: 0, 0: 2, 1: 1}, + {1: 2, 3: 3, 2: 1}, + {3: 0, 1: 3, 2: 1}, + {0: 2, 2: 3, 1: 1}, + {3: 0, 1: 2, 2: 1}, + {2: 0, 0: 3, 1: 1}, + {0: 0, 2: 3, 1: 1}, + {1: 0, 3: 3, 2: 1}, + {1: 0, 3: 2, 2: 1}, + {0: 3, 1: 1, 2: 2}, + {0: 0, 1: 1, 2: 2}, + ] + ) assert expected == found_mcis1 assert expected == found_mcis2 diff --git a/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py b/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py index 74ad11f6..06d041ea 100644 --- a/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py +++ b/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py @@ -15,16 +15,37 @@ class TestWikipediaExample: # Nodes 'a', 'b', 'c' and 'd' form a column. # Nodes 'g', 'h', 'i' and 'j' form a column. - g1edges = [['a', 'g'], ['a', 'h'], ['a', 'i'], - ['b', 'g'], ['b', 'h'], ['b', 'j'], - ['c', 'g'], ['c', 'i'], ['c', 'j'], - ['d', 'h'], ['d', 'i'], ['d', 'j']] + g1edges = [ + ["a", "g"], + ["a", "h"], + ["a", "i"], + ["b", "g"], + ["b", "h"], + ["b", "j"], + ["c", "g"], + ["c", "i"], + ["c", "j"], + ["d", "h"], + ["d", "i"], + ["d", "j"], + ] # Nodes 1,2,3,4 form the clockwise corners of a large square. # Nodes 5,6,7,8 form the clockwise corners of a small square - g2edges = [[1, 2], [2, 3], [3, 4], [4, 1], - [5, 6], [6, 7], [7, 8], [8, 5], - [1, 5], [2, 6], [3, 7], [4, 8]] + g2edges = [ + [1, 2], + [2, 3], + [3, 4], + [4, 1], + [5, 6], + [6, 7], + [7, 8], + [8, 5], + [1, 5], + [2, 6], + [3, 7], + [4, 8], + ] def test_graph(self): g1 = nx.Graph() @@ -37,11 +58,12 @@ class TestWikipediaExample: assert gm.subgraph_is_monomorphic() mapping = sorted(gm.mapping.items()) -# this mapping is only one of the possibilies -# so this test needs to be reconsidered -# isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8), -# ('g', 2), ('h', 5), ('i', 4), ('j', 7)] -# assert_equal(mapping, isomap) + + # this mapping is only one of the possibilies + # so this test needs to be reconsidered + # isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8), + # ('g', 2), ('h', 5), ('i', 4), ('j', 7)] + # assert_equal(mapping, isomap) def test_subgraph(self): g1 = nx.Graph() @@ -77,19 +99,19 @@ class TestVF2GraphDB: # This says, expect the data in little-endian encoding # as an unsigned short int and unpack 2 bytes from the file. - fh = open(filename, mode='rb') + fh = open(filename, mode="rb") # Grab the number of nodes. # Node numeration is 0-based, so the first node has index 0. - nodes = struct.unpack('<H', fh.read(2))[0] + nodes = struct.unpack("<H", fh.read(2))[0] graph = nx.Graph() for from_node in range(nodes): # Get the number of edges. - edges = struct.unpack('<H', fh.read(2))[0] + edges = struct.unpack("<H", fh.read(2))[0] for edge in range(edges): # Get the terminal node. - to_node = struct.unpack('<H', fh.read(2))[0] + to_node = struct.unpack("<H", fh.read(2))[0] graph.add_edge(from_node, to_node) fh.close() @@ -97,8 +119,8 @@ class TestVF2GraphDB: def test_graph(self): head, tail = os.path.split(__file__) - g1 = self.create_graph(os.path.join(head, 'iso_r01_s80.A99')) - g2 = self.create_graph(os.path.join(head, 'iso_r01_s80.B99')) + g1 = self.create_graph(os.path.join(head, "iso_r01_s80.A99")) + g2 = self.create_graph(os.path.join(head, "iso_r01_s80.B99")) gm = iso.GraphMatcher(g1, g2) assert gm.is_isomorphic() @@ -106,8 +128,8 @@ class TestVF2GraphDB: # A is the subgraph # B is the full graph head, tail = os.path.split(__file__) - subgraph = self.create_graph(os.path.join(head, 'si2_b06_m200.A99')) - graph = self.create_graph(os.path.join(head, 'si2_b06_m200.B99')) + subgraph = self.create_graph(os.path.join(head, "si2_b06_m200.A99")) + graph = self.create_graph(os.path.join(head, "si2_b06_m200.B99")) gm = iso.GraphMatcher(graph, subgraph) assert gm.subgraph_is_isomorphic() # Just testing some cases @@ -135,7 +157,7 @@ class TestAtlas: alphabet = list(range(26)) for graph in Atlas: nlist = list(graph) - labels = alphabet[:len(nlist)] + labels = alphabet[: len(nlist)] for s in range(10): random.shuffle(labels) d = dict(zip(nlist, labels)) @@ -147,13 +169,38 @@ class TestAtlas: def test_multiedge(): # Simple test for multigraphs # Need something much more rigorous - edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), - (5, 6), (6, 7), (7, 8), (8, 9), (9, 10), - (10, 11), (10, 11), (11, 12), (11, 12), - (12, 13), (12, 13), (13, 14), (13, 14), - (14, 15), (14, 15), (15, 16), (15, 16), - (16, 17), (16, 17), (17, 18), (17, 18), - (18, 19), (18, 19), (19, 0), (19, 0)] + edges = [ + (0, 1), + (1, 2), + (2, 3), + (3, 4), + (4, 5), + (5, 6), + (6, 7), + (7, 8), + (8, 9), + (9, 10), + (10, 11), + (10, 11), + (11, 12), + (11, 12), + (12, 13), + (12, 13), + (13, 14), + (13, 14), + (14, 15), + (14, 15), + (15, 16), + (15, 16), + (16, 17), + (16, 17), + (17, 18), + (17, 18), + (18, 19), + (18, 19), + (19, 0), + (19, 0), + ] nodes = list(range(20)) for g1 in [nx.MultiGraph(), nx.MultiDiGraph()]: @@ -174,8 +221,19 @@ def test_multiedge(): def test_selfloop(): # Simple test for graphs with selfloops - edges = [(0, 1), (0, 2), (1, 2), (1, 3), (2, 2), - (2, 4), (3, 1), (3, 2), (4, 2), (4, 5), (5, 4)] + edges = [ + (0, 1), + (0, 2), + (1, 2), + (1, 3), + (2, 2), + (2, 4), + (3, 1), + (3, 2), + (4, 2), + (4, 5), + (5, 4), + ] nodes = list(range(6)) for g1 in [nx.Graph(), nx.DiGraph()]: @@ -194,8 +252,18 @@ def test_selfloop(): def test_selfloop_mono(): # Simple test for graphs with selfloops - edges0 = [(0, 1), (0, 2), (1, 2), (1, 3), - (2, 4), (3, 1), (3, 2), (4, 2), (4, 5), (5, 4)] + edges0 = [ + (0, 1), + (0, 2), + (1, 2), + (1, 3), + (2, 4), + (3, 1), + (3, 2), + (4, 2), + (4, 5), + (5, 4), + ] edges = edges0 + [(2, 2)] nodes = list(range(6)) @@ -220,18 +288,18 @@ def test_isomorphism_iter1(): g1 = nx.DiGraph() g2 = nx.DiGraph() g3 = nx.DiGraph() - g1.add_edge('A', 'B') - g1.add_edge('B', 'C') - g2.add_edge('Y', 'Z') - g3.add_edge('Z', 'Y') + g1.add_edge("A", "B") + g1.add_edge("B", "C") + g2.add_edge("Y", "Z") + g3.add_edge("Z", "Y") gm12 = iso.DiGraphMatcher(g1, g2) gm13 = iso.DiGraphMatcher(g1, g3) x = list(gm12.subgraph_isomorphisms_iter()) y = list(gm13.subgraph_isomorphisms_iter()) - assert {'A': 'Y', 'B': 'Z'} in x - assert {'B': 'Y', 'C': 'Z'} in x - assert {'A': 'Z', 'B': 'Y'} in y - assert {'B': 'Z', 'C': 'Y'} in y + assert {"A": "Y", "B": "Z"} in x + assert {"B": "Y", "C": "Z"} in x + assert {"A": "Z", "B": "Y"} in y + assert {"B": "Z", "C": "Y"} in y assert len(x) == len(y) assert len(x) == 2 @@ -239,16 +307,16 @@ def test_isomorphism_iter1(): def test_monomorphism_iter1(): g1 = nx.DiGraph() g2 = nx.DiGraph() - g1.add_edge('A', 'B') - g1.add_edge('B', 'C') - g1.add_edge('C', 'A') - g2.add_edge('X', 'Y') - g2.add_edge('Y', 'Z') + g1.add_edge("A", "B") + g1.add_edge("B", "C") + g1.add_edge("C", "A") + g2.add_edge("X", "Y") + g2.add_edge("Y", "Z") gm12 = iso.DiGraphMatcher(g1, g2) x = list(gm12.subgraph_monomorphisms_iter()) - assert {'A': 'X', 'B': 'Y', 'C': 'Z'} in x - assert {'A': 'Y', 'B': 'Z', 'C': 'X'} in x - assert {'A': 'Z', 'B': 'X', 'C': 'Y'} in x + assert {"A": "X", "B": "Y", "C": "Z"} in x + assert {"A": "Y", "B": "Z", "C": "X"} in x + assert {"A": "Z", "B": "X", "C": "Y"} in x assert len(x) == 3 gm21 = iso.DiGraphMatcher(g2, g1) # Check if StopIteration exception returns False @@ -272,11 +340,11 @@ def test_isomorphism_iter2(): def test_multiple(): # Verify that we can use the graph matcher multiple times - edges = [('A', 'B'), ('B', 'A'), ('B', 'C')] + edges = [("A", "B"), ("B", "A"), ("B", "C")] for g1, g2 in [(nx.Graph(), nx.Graph()), (nx.DiGraph(), nx.DiGraph())]: g1.add_edges_from(edges) g2.add_edges_from(edges) - g3 = nx.subgraph(g2, ['A', 'B']) + g3 = nx.subgraph(g2, ["A", "B"]) if not g1.is_directed(): gmA = iso.GraphMatcher(g1, g2) gmB = iso.GraphMatcher(g1, g3) @@ -284,7 +352,7 @@ def test_multiple(): gmA = iso.DiGraphMatcher(g1, g2) gmB = iso.DiGraphMatcher(g1, g3) assert gmA.is_isomorphic() - g2.remove_node('C') + g2.remove_node("C") if not g1.is_directed(): gmA = iso.GraphMatcher(g1, g2) else: @@ -293,6 +361,8 @@ def test_multiple(): assert gmB.subgraph_is_isomorphic() assert gmA.subgraph_is_monomorphic() assert gmB.subgraph_is_monomorphic() + + # for m in [gmB.mapping, gmB.mapping]: # assert_true(m['A'] == 'A') # assert_true(m['B'] == 'B') @@ -333,5 +403,5 @@ def test_monomorphism_edge_match(): SG.add_node(6) SG.add_edge(5, 6, label="A") - gm = iso.DiGraphMatcher(G, SG, edge_match=iso.categorical_edge_match('label', None)) + gm = iso.DiGraphMatcher(G, SG, edge_match=iso.categorical_edge_match("label", None)) assert gm.subgraph_is_monomorphic() diff --git a/networkx/algorithms/isomorphism/tests/test_match_helpers.py b/networkx/algorithms/isomorphism/tests/test_match_helpers.py index 1650abeb..4c0f2c30 100644 --- a/networkx/algorithms/isomorphism/tests/test_match_helpers.py +++ b/networkx/algorithms/isomorphism/tests/test_match_helpers.py @@ -4,27 +4,40 @@ from networkx.algorithms import isomorphism as iso def test_categorical_node_match(): - nm = iso.categorical_node_match(['x', 'y', 'z'], [None] * 3) + nm = iso.categorical_node_match(["x", "y", "z"], [None] * 3) assert nm(dict(x=1, y=2, z=3), dict(x=1, y=2, z=3)) assert not nm(dict(x=1, y=2, z=2), dict(x=1, y=2, z=1)) class TestGenericMultiEdgeMatch: - def setup(self): self.G1 = nx.MultiDiGraph() self.G2 = nx.MultiDiGraph() self.G3 = nx.MultiDiGraph() self.G4 = nx.MultiDiGraph() - attr_dict1 = {'id': 'edge1', 'minFlow': 0, 'maxFlow': 10} - attr_dict2 = {'id': 'edge2', 'minFlow': -3, 'maxFlow': 7} - attr_dict3 = {'id': 'edge3', 'minFlow': 13, 'maxFlow': 117} - attr_dict4 = {'id': 'edge4', 'minFlow': 13, 'maxFlow': 117} - attr_dict5 = {'id': 'edge5', 'minFlow': 8, 'maxFlow': 12} - attr_dict6 = {'id': 'edge6', 'minFlow': 8, 'maxFlow': 12} - for attr_dict in [attr_dict1, attr_dict2, attr_dict3, attr_dict4, attr_dict5, attr_dict6]: + attr_dict1 = {"id": "edge1", "minFlow": 0, "maxFlow": 10} + attr_dict2 = {"id": "edge2", "minFlow": -3, "maxFlow": 7} + attr_dict3 = {"id": "edge3", "minFlow": 13, "maxFlow": 117} + attr_dict4 = {"id": "edge4", "minFlow": 13, "maxFlow": 117} + attr_dict5 = {"id": "edge5", "minFlow": 8, "maxFlow": 12} + attr_dict6 = {"id": "edge6", "minFlow": 8, "maxFlow": 12} + for attr_dict in [ + attr_dict1, + attr_dict2, + attr_dict3, + attr_dict4, + attr_dict5, + attr_dict6, + ]: self.G1.add_edge(1, 2, **attr_dict) - for attr_dict in [attr_dict5, attr_dict3, attr_dict6, attr_dict1, attr_dict4, attr_dict2]: + for attr_dict in [ + attr_dict5, + attr_dict3, + attr_dict6, + attr_dict1, + attr_dict4, + attr_dict2, + ]: self.G2.add_edge(2, 3, **attr_dict) for attr_dict in [attr_dict3, attr_dict5]: self.G3.add_edge(3, 4, **attr_dict) @@ -32,10 +45,14 @@ class TestGenericMultiEdgeMatch: self.G4.add_edge(4, 5, **attr_dict) def test_generic_multiedge_match(self): - full_match = iso.generic_multiedge_match(['id', 'flowMin', 'flowMax'], [None] * 3, [eq] * 3) - flow_match = iso.generic_multiedge_match(['flowMin', 'flowMax'], [None] * 2, [eq] * 2) - min_flow_match = iso.generic_multiedge_match('flowMin', None, eq) - id_match = iso.generic_multiedge_match('id', None, eq) + full_match = iso.generic_multiedge_match( + ["id", "flowMin", "flowMax"], [None] * 3, [eq] * 3 + ) + flow_match = iso.generic_multiedge_match( + ["flowMin", "flowMax"], [None] * 2, [eq] * 2 + ) + min_flow_match = iso.generic_multiedge_match("flowMin", None, eq) + id_match = iso.generic_multiedge_match("id", None, eq) assert flow_match(self.G1[1][2], self.G2[2][3]) assert min_flow_match(self.G1[1][2], self.G2[2][3]) assert id_match(self.G1[1][2], self.G2[2][3]) diff --git a/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py b/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py index 634a39d6..d13301c3 100644 --- a/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py +++ b/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py @@ -80,7 +80,7 @@ class TestTimeRespectingGraphMatcher: def test_timdelta_zero_timeRespecting_returnsTrue(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_same_time(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta() @@ -89,7 +89,7 @@ class TestTimeRespectingGraphMatcher: def test_timdelta_zero_datetime_timeRespecting_returnsTrue(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_same_datetime(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta() @@ -98,7 +98,7 @@ class TestTimeRespectingGraphMatcher: def test_attNameStrange_timdelta_zero_timeRespecting_returnsTrue(self): G1 = self.provide_g1_topology() - temporal_name = 'strange_name' + temporal_name = "strange_name" G1 = put_same_time(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta() @@ -107,7 +107,7 @@ class TestTimeRespectingGraphMatcher: def test_notTimeRespecting_returnsFalse(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_sequence_time(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta() @@ -116,7 +116,7 @@ class TestTimeRespectingGraphMatcher: def test_timdelta_one_config0_returns_no_embeddings(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_time_config_0(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta(days=1) @@ -126,7 +126,7 @@ class TestTimeRespectingGraphMatcher: def test_timdelta_one_config1_returns_four_embedding(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_time_config_1(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta(days=1) @@ -136,7 +136,7 @@ class TestTimeRespectingGraphMatcher: def test_timdelta_one_config2_returns_ten_embeddings(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_time_config_2(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta(days=1) @@ -163,7 +163,7 @@ class TestDiTimeRespectingGraphMatcher: def test_timdelta_zero_same_dates_returns_true(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_same_time(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta() @@ -172,7 +172,7 @@ class TestDiTimeRespectingGraphMatcher: def test_attNameStrange_timdelta_zero_same_dates_returns_true(self): G1 = self.provide_g1_topology() - temporal_name = 'strange' + temporal_name = "strange" G1 = put_same_time(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta() @@ -181,7 +181,7 @@ class TestDiTimeRespectingGraphMatcher: def test_timdelta_one_config0_returns_no_embeddings(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_time_config_0(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta(days=1) @@ -191,7 +191,7 @@ class TestDiTimeRespectingGraphMatcher: def test_timdelta_one_config1_returns_one_embedding(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_time_config_1(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta(days=1) @@ -201,7 +201,7 @@ class TestDiTimeRespectingGraphMatcher: def test_timdelta_one_config2_returns_two_embeddings(self): G1 = self.provide_g1_topology() - temporal_name = 'date' + temporal_name = "date" G1 = put_time_config_2(G1, temporal_name) G2 = self.provide_g2_path_3edges() d = timedelta(days=1) diff --git a/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py b/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py index 35aac984..1ae2a56a 100644 --- a/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py +++ b/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py @@ -10,20 +10,21 @@ import networkx.algorithms.isomorphism as iso def test_simple(): # 16 simple tests - w = 'weight' + w = "weight" edges = [(0, 0, 1), (0, 0, 1.5), (0, 1, 2), (1, 0, 3)] - for g1 in [nx.Graph(), - nx.DiGraph(), - nx.MultiGraph(), - nx.MultiDiGraph(), - ]: + for g1 in [ + nx.Graph(), + nx.DiGraph(), + nx.MultiGraph(), + nx.MultiDiGraph(), + ]: g1.add_weighted_edges_from(edges) g2 = g1.subgraph(g1.nodes()) if g1.is_multigraph(): - em = iso.numerical_multiedge_match('weight', 1) + em = iso.numerical_multiedge_match("weight", 1) else: - em = iso.numerical_edge_match('weight', 1) + em = iso.numerical_edge_match("weight", 1) assert nx.is_isomorphic(g1, g2, edge_match=em) for mod1, mod2 in [(False, True), (True, False), (True, True)]: @@ -31,14 +32,14 @@ def test_simple(): # mod2 tests a selfloop if g2.is_multigraph(): if mod1: - data1 = {0: {'weight': 10}} + data1 = {0: {"weight": 10}} if mod2: - data2 = {0: {'weight': 1}, 1: {'weight': 2.5}} + data2 = {0: {"weight": 1}, 1: {"weight": 2.5}} else: if mod1: - data1 = {'weight': 10} + data1 = {"weight": 10} if mod2: - data2 = {'weight': 2.5} + data2 = {"weight": 2.5} g2 = g1.subgraph(g1.nodes()).copy() if mod1: @@ -62,17 +63,17 @@ def test_weightkey(): g1 = nx.DiGraph() g2 = nx.DiGraph() - g1.add_edge('A', 'B', weight=1) - g2.add_edge('C', 'D', weight=0) + g1.add_edge("A", "B", weight=1) + g2.add_edge("C", "D", weight=0) assert nx.is_isomorphic(g1, g2) - em = iso.numerical_edge_match('nonexistent attribute', 1) + em = iso.numerical_edge_match("nonexistent attribute", 1) assert nx.is_isomorphic(g1, g2, edge_match=em) - em = iso.numerical_edge_match('weight', 1) + em = iso.numerical_edge_match("weight", 1) assert not nx.is_isomorphic(g1, g2, edge_match=em) g2 = nx.DiGraph() - g2.add_edge('C', 'D') + g2.add_edge("C", "D") assert nx.is_isomorphic(g1, g2, edge_match=em) @@ -83,14 +84,14 @@ class TestNodeMatch_Graph: self.build() def build(self): - self.nm = iso.categorical_node_match('color', '') - self.em = iso.numerical_edge_match('weight', 1) + self.nm = iso.categorical_node_match("color", "") + self.em = iso.numerical_edge_match("weight", 1) - self.g1.add_node('A', color='red') - self.g2.add_node('C', color='blue') + self.g1.add_node("A", color="red") + self.g2.add_node("C", color="blue") - self.g1.add_edge('A', 'B', weight=1) - self.g2.add_edge('C', 'D', weight=1) + self.g1.add_edge("A", "B", weight=1) + self.g2.add_edge("C", "D", weight=1) def test_noweight_nocolor(self): assert nx.is_isomorphic(self.g1, self.g2) @@ -99,32 +100,31 @@ class TestNodeMatch_Graph: assert not nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) def test_color2(self): - self.g1.nodes['A']['color'] = 'blue' + self.g1.nodes["A"]["color"] = "blue" assert nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) def test_weight1(self): assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) def test_weight2(self): - self.g1.add_edge('A', 'B', weight=2) + self.g1.add_edge("A", "B", weight=2) assert not nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) def test_colorsandweights1(self): - iso = nx.is_isomorphic(self.g1, self.g2, - node_match=self.nm, edge_match=self.em) + iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em) assert not iso def test_colorsandweights2(self): - self.g1.nodes['A']['color'] = 'blue' - iso = nx.is_isomorphic(self.g1, self.g2, - node_match=self.nm, edge_match=self.em) + self.g1.nodes["A"]["color"] = "blue" + iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em) assert iso def test_colorsandweights3(self): # make the weights disagree - self.g1.add_edge('A', 'B', weight=2) - assert not nx.is_isomorphic(self.g1, self.g2, - node_match=self.nm, edge_match=self.em) + self.g1.add_edge("A", "B", weight=2) + assert not nx.is_isomorphic( + self.g1, self.g2, node_match=self.nm, edge_match=self.em + ) class TestEdgeMatch_MultiGraph: @@ -139,30 +139,34 @@ class TestEdgeMatch_MultiGraph: g2 = self.g2 # We will assume integer weights only. - g1.add_edge('A', 'B', color='green', weight=0, size=.5) - g1.add_edge('A', 'B', color='red', weight=1, size=.35) - g1.add_edge('A', 'B', color='red', weight=2, size=.65) + g1.add_edge("A", "B", color="green", weight=0, size=0.5) + g1.add_edge("A", "B", color="red", weight=1, size=0.35) + g1.add_edge("A", "B", color="red", weight=2, size=0.65) - g2.add_edge('C', 'D', color='green', weight=1, size=.5) - g2.add_edge('C', 'D', color='red', weight=0, size=.45) - g2.add_edge('C', 'D', color='red', weight=2, size=.65) + g2.add_edge("C", "D", color="green", weight=1, size=0.5) + g2.add_edge("C", "D", color="red", weight=0, size=0.45) + g2.add_edge("C", "D", color="red", weight=2, size=0.65) if g1.is_multigraph(): - self.em = iso.numerical_multiedge_match('weight', 1) - self.emc = iso.categorical_multiedge_match('color', '') - self.emcm = iso.categorical_multiedge_match(['color', 'weight'], ['', 1]) - self.emg1 = iso.generic_multiedge_match('color', 'red', eq) + self.em = iso.numerical_multiedge_match("weight", 1) + self.emc = iso.categorical_multiedge_match("color", "") + self.emcm = iso.categorical_multiedge_match(["color", "weight"], ["", 1]) + self.emg1 = iso.generic_multiedge_match("color", "red", eq) self.emg2 = iso.generic_multiedge_match( - ['color', 'weight', 'size'], ['red', 1, .5], - [eq, eq, iso.matchhelpers.close]) + ["color", "weight", "size"], + ["red", 1, 0.5], + [eq, eq, iso.matchhelpers.close], + ) else: - self.em = iso.numerical_edge_match('weight', 1) - self.emc = iso.categorical_edge_match('color', '') - self.emcm = iso.categorical_edge_match(['color', 'weight'], ['', 1]) - self.emg1 = iso.generic_multiedge_match('color', 'red', eq) + self.em = iso.numerical_edge_match("weight", 1) + self.emc = iso.categorical_edge_match("color", "") + self.emcm = iso.categorical_edge_match(["color", "weight"], ["", 1]) + self.emg1 = iso.generic_multiedge_match("color", "red", eq) self.emg2 = iso.generic_edge_match( - ['color', 'weight', 'size'], ['red', 1, .5], - [eq, eq, iso.matchhelpers.close]) + ["color", "weight", "size"], + ["red", 1, 0.5], + [eq, eq, iso.matchhelpers.close], + ) def test_weights_only(self): assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) diff --git a/networkx/algorithms/isomorphism/vf2userfunc.py b/networkx/algorithms/isomorphism/vf2userfunc.py index 57213c08..89e456ad 100644 --- a/networkx/algorithms/isomorphism/vf2userfunc.py +++ b/networkx/algorithms/isomorphism/vf2userfunc.py @@ -33,11 +33,12 @@ from . import isomorphvf2 as vf2 -__all__ = ['GraphMatcher', - 'DiGraphMatcher', - 'MultiGraphMatcher', - 'MultiDiGraphMatcher', - ] +__all__ = [ + "GraphMatcher", + "DiGraphMatcher", + "MultiGraphMatcher", + "MultiDiGraphMatcher", +] def _semantic_feasibility(self, G1_node, G2_node): @@ -61,13 +62,15 @@ def _semantic_feasibility(self, G1_node, G2_node): for neighbor in G1nbrs: # G1_node is not in core_1, so we must handle R_self separately if neighbor == G1_node: - if G2_node in G2nbrs and \ - not edge_match(G1nbrs[G1_node], G2nbrs[G2_node]): + if G2_node in G2nbrs and not edge_match( + G1nbrs[G1_node], G2nbrs[G2_node] + ): return False elif neighbor in core_1: G2_nbr = core_1[neighbor] - if G2_nbr in G2nbrs and \ - not edge_match(G1nbrs[neighbor], G2nbrs[G2_nbr]): + if G2_nbr in G2nbrs and not edge_match( + G1nbrs[neighbor], G2nbrs[G2_nbr] + ): return False # syntactic check has already verified that neighbors are symmetric @@ -184,6 +187,7 @@ class DiGraphMatcher(vf2.DiGraphMatcher): return feasible + # The "semantics" of edge_match are different for multi(di)graphs, but # the implementation is the same. So, technically we do not need to # provide "multi" versions, but we do so to match NetworkX's base classes. @@ -191,9 +195,11 @@ class DiGraphMatcher(vf2.DiGraphMatcher): class MultiGraphMatcher(GraphMatcher): """VF2 isomorphism checker for undirected multigraphs. """ + pass class MultiDiGraphMatcher(DiGraphMatcher): """VF2 isomorphism checker for directed multigraphs. """ + pass diff --git a/networkx/algorithms/link_analysis/hits_alg.py b/networkx/algorithms/link_analysis/hits_alg.py index 9e29576e..5b394df8 100644 --- a/networkx/algorithms/link_analysis/hits_alg.py +++ b/networkx/algorithms/link_analysis/hits_alg.py @@ -179,8 +179,7 @@ def hits_numpy(G, normalized=True): try: import numpy as np except ImportError as e: - raise ImportError("hits_numpy() requires NumPy: " - "http://numpy.org/") from e + raise ImportError("hits_numpy() requires NumPy: " "http://numpy.org/") from e if len(G) == 0: return {}, {} H = nx.hub_matrix(G, list(G)) @@ -271,8 +270,10 @@ def hits_scipy(G, max_iter=100, tol=1.0e-6, normalized=True): try: import numpy as np except ImportError as e: - raise ImportError("hits_scipy() requires SciPy and NumPy:" - "http://scipy.org/ http://numpy.org/") from e + raise ImportError( + "hits_scipy() requires SciPy and NumPy:" + "http://scipy.org/ http://numpy.org/" + ) from e if len(G) == 0: return {}, {} M = nx.to_scipy_sparse_matrix(G, nodelist=list(G)) diff --git a/networkx/algorithms/link_analysis/pagerank_alg.py b/networkx/algorithms/link_analysis/pagerank_alg.py index e6815dfc..935b2e43 100644 --- a/networkx/algorithms/link_analysis/pagerank_alg.py +++ b/networkx/algorithms/link_analysis/pagerank_alg.py @@ -2,13 +2,20 @@ import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['pagerank', 'pagerank_numpy', 'pagerank_scipy', 'google_matrix'] - - -@not_implemented_for('multigraph') -def pagerank(G, alpha=0.85, personalization=None, - max_iter=100, tol=1.0e-6, nstart=None, weight='weight', - dangling=None): +__all__ = ["pagerank", "pagerank_numpy", "pagerank_scipy", "google_matrix"] + + +@not_implemented_for("multigraph") +def pagerank( + G, + alpha=0.85, + personalization=None, + max_iter=100, + tol=1.0e-6, + nstart=None, + weight="weight", + dangling=None, +): """Returns the PageRank of the nodes in the graph. PageRank computes a ranking of the nodes in the graph G based on @@ -150,8 +157,9 @@ def pagerank(G, alpha=0.85, personalization=None, raise nx.PowerIterationFailedConvergence(max_iter) -def google_matrix(G, alpha=0.85, personalization=None, - nodelist=None, weight='weight', dangling=None): +def google_matrix( + G, alpha=0.85, personalization=None, nodelist=None, weight="weight", dangling=None +): """Returns the Google matrix of the graph. Parameters @@ -230,8 +238,7 @@ def google_matrix(G, alpha=0.85, personalization=None, dangling_weights = p else: # Convert the dangling dictionary into an array in nodelist order - dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], - dtype=float) + dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float) dangling_weights /= dangling_weights.sum() dangling_nodes = np.where(M.sum(axis=1) == 0)[0] @@ -244,8 +251,7 @@ def google_matrix(G, alpha=0.85, personalization=None, return alpha * M + (1 - alpha) * p -def pagerank_numpy(G, alpha=0.85, personalization=None, weight='weight', - dangling=None): +def pagerank_numpy(G, alpha=0.85, personalization=None, weight="weight", dangling=None): """Returns the PageRank of the nodes in the graph. PageRank computes a ranking of the nodes in the graph G based on @@ -314,10 +320,12 @@ def pagerank_numpy(G, alpha=0.85, personalization=None, weight='weight', http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf """ import numpy as np + if len(G) == 0: return {} - M = google_matrix(G, alpha, personalization=personalization, - weight=weight, dangling=dangling) + M = google_matrix( + G, alpha, personalization=personalization, weight=weight, dangling=dangling + ) # use numpy LAPACK solver eigenvalues, eigenvectors = np.linalg.eig(M.T) ind = np.argmax(eigenvalues) @@ -327,9 +335,16 @@ def pagerank_numpy(G, alpha=0.85, personalization=None, weight='weight', return dict(zip(G, map(float, largest / norm))) -def pagerank_scipy(G, alpha=0.85, personalization=None, - max_iter=100, tol=1.0e-6, nstart=None, weight='weight', - dangling=None): +def pagerank_scipy( + G, + alpha=0.85, + personalization=None, + max_iter=100, + tol=1.0e-6, + nstart=None, + weight="weight", + dangling=None, +): """Returns the PageRank of the nodes in the graph. PageRank computes a ranking of the nodes in the graph G based on @@ -420,11 +435,10 @@ def pagerank_scipy(G, alpha=0.85, personalization=None, return {} nodelist = list(G) - M = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, - dtype=float) + M = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, dtype=float) S = np.array(M.sum(axis=1)).flatten() S[S != 0] = 1.0 / S[S != 0] - Q = scipy.sparse.spdiags(S.T, 0, *M.shape, format='csr') + Q = scipy.sparse.spdiags(S.T, 0, *M.shape, format="csr") M = Q * M # initial vector @@ -446,16 +460,14 @@ def pagerank_scipy(G, alpha=0.85, personalization=None, dangling_weights = p else: # Convert the dangling dictionary into an array in nodelist order - dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], - dtype=float) + dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float) dangling_weights /= dangling_weights.sum() is_dangling = np.where(S == 0)[0] # power iteration: make up to max_iter iterations for _ in range(max_iter): xlast = x - x = alpha * (x * M + sum(x[is_dangling]) * dangling_weights) + \ - (1 - alpha) * p + x = alpha * (x * M + sum(x[is_dangling]) * dangling_weights) + (1 - alpha) * p # check convergence, l1 norm err = np.absolute(x - xlast).sum() if err < N * tol: diff --git a/networkx/algorithms/link_analysis/tests/test_hits.py b/networkx/algorithms/link_analysis/tests/test_hits.py index dd50ab66..e9522065 100644 --- a/networkx/algorithms/link_analysis/tests/test_hits.py +++ b/networkx/algorithms/link_analysis/tests/test_hits.py @@ -10,28 +10,25 @@ from networkx.testing import almost_equal class TestHITS: - @classmethod def setup_class(cls): G = networkx.DiGraph() - edges = [(1, 3), (1, 5), - (2, 1), - (3, 5), - (5, 4), (5, 3), - (6, 5)] + edges = [(1, 3), (1, 5), (2, 1), (3, 5), (5, 4), (5, 3), (6, 5)] G.add_edges_from(edges, weight=1) cls.G = G - cls.G.a = dict(zip(sorted(G), [0.000000, 0.000000, 0.366025, - 0.133975, 0.500000, 0.000000])) - cls.G.h = dict(zip(sorted(G), [0.366025, 0.000000, 0.211325, - 0.000000, 0.211325, 0.211325])) + cls.G.a = dict( + zip(sorted(G), [0.000000, 0.000000, 0.366025, 0.133975, 0.500000, 0.000000]) + ) + cls.G.h = dict( + zip(sorted(G), [0.366025, 0.000000, 0.211325, 0.000000, 0.211325, 0.211325]) + ) def test_hits(self): G = self.G - h, a = networkx.hits(G, tol=1.e-08) + h, a = networkx.hits(G, tol=1.0e-08) for n in G: assert almost_equal(h[n], G.h[n], places=4) for n in G: @@ -39,11 +36,11 @@ class TestHITS: def test_hits_nstart(self): G = self.G - nstart = {i: 1. / 2 for i in G} + nstart = {i: 1.0 / 2 for i in G} h, a = networkx.hits(G, nstart=nstart) def test_hits_numpy(self): - numpy = pytest.importorskip('numpy') + numpy = pytest.importorskip("numpy") G = self.G h, a = networkx.hits_numpy(G) for n in G: @@ -52,16 +49,16 @@ class TestHITS: assert almost_equal(a[n], G.a[n], places=4) def test_hits_scipy(self): - sp = pytest.importorskip('scipy') + sp = pytest.importorskip("scipy") G = self.G - h, a = networkx.hits_scipy(G, tol=1.e-08) + h, a = networkx.hits_scipy(G, tol=1.0e-08) for n in G: assert almost_equal(h[n], G.h[n], places=4) for n in G: assert almost_equal(a[n], G.a[n], places=4) def test_empty(self): - numpy = pytest.importorskip('numpy') + numpy = pytest.importorskip("numpy") G = networkx.Graph() assert networkx.hits(G) == ({}, {}) assert networkx.hits_numpy(G) == ({}, {}) @@ -69,7 +66,7 @@ class TestHITS: assert networkx.hub_matrix(G).shape == (0, 0) def test_empty_scipy(self): - scipy = pytest.importorskip('scipy') + scipy = pytest.importorskip("scipy") G = networkx.Graph() assert networkx.hits_scipy(G) == ({}, {}) diff --git a/networkx/algorithms/link_analysis/tests/test_pagerank.py b/networkx/algorithms/link_analysis/tests/test_pagerank.py index b0068464..4b8135f5 100644 --- a/networkx/algorithms/link_analysis/tests/test_pagerank.py +++ b/networkx/algorithms/link_analysis/tests/test_pagerank.py @@ -2,8 +2,9 @@ import random import networkx import pytest -numpy = pytest.importorskip('numpy') -scipy = pytest.importorskip('scipy') + +numpy = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") from networkx.testing import almost_equal @@ -13,36 +14,54 @@ from networkx.testing import almost_equal class TestPageRank: - @classmethod def setup_class(cls): G = networkx.DiGraph() - edges = [(1, 2), (1, 3), - # 2 is a dangling node - (3, 1), (3, 2), (3, 5), - (4, 5), (4, 6), - (5, 4), (5, 6), - (6, 4)] + edges = [ + (1, 2), + (1, 3), + # 2 is a dangling node + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ] G.add_edges_from(edges) cls.G = G - cls.G.pagerank = dict(zip(sorted(G), - [0.03721197, 0.05395735, 0.04150565, - 0.37508082, 0.20599833, 0.28624589])) + cls.G.pagerank = dict( + zip( + sorted(G), + [ + 0.03721197, + 0.05395735, + 0.04150565, + 0.37508082, + 0.20599833, + 0.28624589, + ], + ) + ) cls.dangling_node_index = 1 - cls.dangling_edges = {1: 2, 2: 3, - 3: 0, 4: 0, 5: 0, 6: 0} - cls.G.dangling_pagerank = dict(zip(sorted(G), - [0.10844518, 0.18618601, 0.0710892, - 0.2683668, 0.15919783, 0.20671497])) + cls.dangling_edges = {1: 2, 2: 3, 3: 0, 4: 0, 5: 0, 6: 0} + cls.G.dangling_pagerank = dict( + zip( + sorted(G), + [0.10844518, 0.18618601, 0.0710892, 0.2683668, 0.15919783, 0.20671497], + ) + ) def test_pagerank(self): G = self.G - p = networkx.pagerank(G, alpha=0.9, tol=1.e-08) + p = networkx.pagerank(G, alpha=0.9, tol=1.0e-08) for n in G: assert almost_equal(p[n], G.pagerank[n], places=4) nstart = {n: random.random() for n in G} - p = networkx.pagerank(G, alpha=0.9, tol=1.e-08, nstart=nstart) + p = networkx.pagerank(G, alpha=0.9, tol=1.0e-08, nstart=nstart) for n in G: assert almost_equal(p[n], G.pagerank[n], places=4) @@ -69,7 +88,12 @@ class TestPageRank: def test_personalization(self): G = networkx.complete_graph(4) personalize = {0: 1, 1: 1, 2: 4, 3: 4} - answer = {0: 0.23246732615667579, 1: 0.23246732615667579, 2: 0.267532673843324, 3: 0.2675326738433241} + answer = { + 0: 0.23246732615667579, + 1: 0.23246732615667579, + 2: 0.267532673843324, + 3: 0.2675326738433241, + } p = networkx.pagerank(G, alpha=0.85, personalization=personalize) for n in G: assert almost_equal(p[n], answer[n], places=4) @@ -77,13 +101,19 @@ class TestPageRank: def test_zero_personalization_vector(self): G = networkx.complete_graph(4) personalize = {0: 0, 1: 0, 2: 0, 3: 0} - pytest.raises(ZeroDivisionError, networkx.pagerank, G, - personalization=personalize) + pytest.raises( + ZeroDivisionError, networkx.pagerank, G, personalization=personalize + ) def test_one_nonzero_personalization_value(self): G = networkx.complete_graph(4) personalize = {0: 0, 1: 0, 2: 0, 3: 1} - answer = {0: 0.22077931820379187, 1: 0.22077931820379187, 2: 0.22077931820379187, 3: 0.3376620453886241} + answer = { + 0: 0.22077931820379187, + 1: 0.22077931820379187, + 2: 0.22077931820379187, + 3: 0.3376620453886241, + } p = networkx.pagerank(G, alpha=0.85, personalization=personalize) for n in G: assert almost_equal(p[n], answer[n], places=4) @@ -91,7 +121,12 @@ class TestPageRank: def test_incomplete_personalization(self): G = networkx.complete_graph(4) personalize = {3: 1} - answer = {0: 0.22077931820379187, 1: 0.22077931820379187, 2: 0.22077931820379187, 3: 0.3376620453886241} + answer = { + 0: 0.22077931820379187, + 1: 0.22077931820379187, + 2: 0.22077931820379187, + 3: 0.3376620453886241, + } p = networkx.pagerank(G, alpha=0.85, personalization=personalize) for n in G: assert almost_equal(p[n], answer[n], places=4) @@ -105,14 +140,13 @@ class TestPageRank: dangling = self.dangling_edges dangling_sum = float(sum(dangling.values())) M1 = networkx.google_matrix(G, personalization=dangling) - M2 = networkx.google_matrix(G, personalization=dangling, - dangling=dangling) + M2 = networkx.google_matrix(G, personalization=dangling, dangling=dangling) for i in range(len(G)): for j in range(len(G)): if i == self.dangling_node_index and (j + 1) in dangling: - assert almost_equal(M2[i, j], - dangling[j + 1] / dangling_sum, - places=4) + assert almost_equal( + M2[i, j], dangling[j + 1] / dangling_sum, places=4 + ) else: assert almost_equal(M2[i, j], M1[i, j], places=4) @@ -134,18 +168,18 @@ class TestPageRank: class TestPageRankScipy(TestPageRank): - def test_scipy_pagerank(self): G = self.G - p = networkx.pagerank_scipy(G, alpha=0.9, tol=1.e-08) + p = networkx.pagerank_scipy(G, alpha=0.9, tol=1.0e-08) for n in G: assert almost_equal(p[n], G.pagerank[n], places=4) personalize = {n: random.random() for n in G} - p = networkx.pagerank_scipy(G, alpha=0.9, tol=1.e-08, - personalization=personalize) + p = networkx.pagerank_scipy( + G, alpha=0.9, tol=1.0e-08, personalization=personalize + ) nstart = {n: random.random() for n in G} - p = networkx.pagerank_scipy(G, alpha=0.9, tol=1.e-08, nstart=nstart) + p = networkx.pagerank_scipy(G, alpha=0.9, tol=1.0e-08, nstart=nstart) for n in G: assert almost_equal(p[n], G.pagerank[n], places=4) diff --git a/networkx/algorithms/link_prediction.py b/networkx/algorithms/link_prediction.py index 9fceb02f..77bd86b1 100644 --- a/networkx/algorithms/link_prediction.py +++ b/networkx/algorithms/link_prediction.py @@ -8,14 +8,16 @@ from math import log import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['resource_allocation_index', - 'jaccard_coefficient', - 'adamic_adar_index', - 'preferential_attachment', - 'cn_soundarajan_hopcroft', - 'ra_index_soundarajan_hopcroft', - 'within_inter_cluster', - 'common_neighbor_centrality'] +__all__ = [ + "resource_allocation_index", + "jaccard_coefficient", + "adamic_adar_index", + "preferential_attachment", + "cn_soundarajan_hopcroft", + "ra_index_soundarajan_hopcroft", + "within_inter_cluster", + "common_neighbor_centrality", +] def _apply_prediction(G, func, ebunch=None): @@ -38,8 +40,8 @@ def _apply_prediction(G, func, ebunch=None): return ((u, v, func(u, v)) for u, v in ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def resource_allocation_index(G, ebunch=None): r"""Compute the resource allocation index of all node pairs in ebunch. @@ -86,13 +88,15 @@ def resource_allocation_index(G, ebunch=None): Eur. Phys. J. B 71 (2009) 623. https://arxiv.org/pdf/0901.0553.pdf """ + def predict(u, v): return sum(1 / G.degree(w) for w in nx.common_neighbors(G, u, v)) + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def jaccard_coefficient(G, ebunch=None): r"""Compute the Jaccard coefficient of all node pairs in ebunch. @@ -138,16 +142,18 @@ def jaccard_coefficient(G, ebunch=None): The Link Prediction Problem for Social Networks (2004). http://www.cs.cornell.edu/home/kleinber/link-pred.pdf """ + def predict(u, v): union_size = len(set(G[u]) | set(G[v])) if union_size == 0: return 0 return len(list(nx.common_neighbors(G, u, v))) / union_size + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def adamic_adar_index(G, ebunch=None): r"""Compute the Adamic-Adar index of all node pairs in ebunch. @@ -195,13 +201,16 @@ def adamic_adar_index(G, ebunch=None): The Link Prediction Problem for Social Networks (2004). http://www.cs.cornell.edu/home/kleinber/link-pred.pdf """ + def predict(u, v): return sum(1 / log(G.degree(w)) for w in nx.common_neighbors(G, u, v)) + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def common_neighbor_centrality(G, ebunch=None, alpha = 0.8): + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def common_neighbor_centrality(G, ebunch=None, alpha=0.8): r"""Return the CCPA score for each pair of nodes. Compute the Common Neighbor and Centrality based Parameterized Algorithm(CCPA) @@ -271,13 +280,17 @@ def common_neighbor_centrality(G, ebunch=None, alpha = 0.8): https://doi.org/10.1038/s41598-019-57304-y """ shortest_path = nx.shortest_path(G) + def predict(u, v): - return alpha*len(list(nx.common_neighbors(G, u , v))) \ - + (1-alpha)*(G.number_of_nodes()/(len(shortest_path[u][v]) - 1)) + return alpha * len(list(nx.common_neighbors(G, u, v))) + (1 - alpha) * ( + G.number_of_nodes() / (len(shortest_path[u][v]) - 1) + ) + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') + +@not_implemented_for("directed") +@not_implemented_for("multigraph") def preferential_attachment(G, ebunch=None): r"""Compute the preferential attachment score of all node pairs in ebunch. @@ -323,14 +336,16 @@ def preferential_attachment(G, ebunch=None): The Link Prediction Problem for Social Networks (2004). http://www.cs.cornell.edu/home/kleinber/link-pred.pdf """ + def predict(u, v): return G.degree(u) * G.degree(v) + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def cn_soundarajan_hopcroft(G, ebunch=None, community='community'): +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): r"""Count the number of common neighbors of all node pairs in ebunch using community information. @@ -390,19 +405,22 @@ def cn_soundarajan_hopcroft(G, ebunch=None, community='community'): World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608. http://doi.acm.org/10.1145/2187980.2188150 """ + def predict(u, v): Cu = _community(G, u, community) Cv = _community(G, v, community) cnbors = list(nx.common_neighbors(G, u, v)) - neighbors = (sum(_community(G, w, community) == Cu for w in cnbors) - if Cu == Cv else 0) + neighbors = ( + sum(_community(G, w, community) == Cu for w in cnbors) if Cu == Cv else 0 + ) return len(cnbors) + neighbors + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def ra_index_soundarajan_hopcroft(G, ebunch=None, community='community'): +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def ra_index_soundarajan_hopcroft(G, ebunch=None, community="community"): r"""Compute the resource allocation index of all node pairs in ebunch using community information. @@ -464,20 +482,21 @@ def ra_index_soundarajan_hopcroft(G, ebunch=None, community='community'): World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608. http://doi.acm.org/10.1145/2187980.2188150 """ + def predict(u, v): Cu = _community(G, u, community) Cv = _community(G, v, community) if Cu != Cv: return 0 cnbors = nx.common_neighbors(G, u, v) - return sum(1 / G.degree(w) for w in cnbors - if _community(G, w, community) == Cu) + return sum(1 / G.degree(w) for w in cnbors if _community(G, w, community) == Cu) + return _apply_prediction(G, predict, ebunch) -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def within_inter_cluster(G, ebunch=None, delta=0.001, community='community'): +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def within_inter_cluster(G, ebunch=None, delta=0.001, community="community"): """Compute the ratio of within- and inter-cluster common neighbors of all node pairs in ebunch. @@ -544,7 +563,7 @@ def within_inter_cluster(G, ebunch=None, delta=0.001, community='community'): https://doi.org/10.1007/978-3-642-34459-6_10 """ if delta <= 0: - raise nx.NetworkXAlgorithmError('Delta must be greater than zero') + raise nx.NetworkXAlgorithmError("Delta must be greater than zero") def predict(u, v): Cu = _community(G, u, community) @@ -552,8 +571,7 @@ def within_inter_cluster(G, ebunch=None, delta=0.001, community='community'): if Cu != Cv: return 0 cnbors = set(nx.common_neighbors(G, u, v)) - within = {w for w in cnbors - if _community(G, w, community) == Cu} + within = {w for w in cnbors if _community(G, w, community) == Cu} inter = cnbors - within return len(within) / (len(inter) + delta) @@ -566,4 +584,4 @@ def _community(G, u, community): try: return node_u[community] except KeyError as e: - raise nx.NetworkXAlgorithmError('No community information') from e + raise nx.NetworkXAlgorithmError("No community information") from e diff --git a/networkx/algorithms/lowest_common_ancestors.py b/networkx/algorithms/lowest_common_ancestors.py index 4b0bfe61..7961cedb 100644 --- a/networkx/algorithms/lowest_common_ancestors.py +++ b/networkx/algorithms/lowest_common_ancestors.py @@ -4,12 +4,18 @@ from collections.abc import Mapping, Set from itertools import chain, count import networkx as nx -from networkx.utils import arbitrary_element, not_implemented_for, \ - UnionFind, generate_unique_node +from networkx.utils import ( + arbitrary_element, + not_implemented_for, + UnionFind, + generate_unique_node, +) -__all__ = ["all_pairs_lowest_common_ancestor", - "tree_all_pairs_lowest_common_ancestor", - "lowest_common_ancestor"] +__all__ = [ + "all_pairs_lowest_common_ancestor", + "tree_all_pairs_lowest_common_ancestor", + "lowest_common_ancestor", +] @not_implemented_for("undirected") @@ -96,7 +102,7 @@ def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None): colors = defaultdict(bool) for node in nx.dfs_postorder_nodes(G, root): colors[node] = True - for v in (pair_dict[node] if pairs is not None else G): + for v in pair_dict[node] if pairs is not None else G: if colors[v]: # If the user requested both directions of a pair, give it. # Otherwise, just give one. @@ -200,7 +206,7 @@ def all_pairs_lowest_common_ancestor(G, pairs=None): # This will always produce correct results and avoid unnecessary # copies in many common cases. # - if (not isinstance(pairs, (Mapping, Set)) and pairs is not None): + if not isinstance(pairs, (Mapping, Set)) and pairs is not None: pairs = set(pairs) # Convert G into a dag with a single root by adding a node with edges to @@ -219,8 +225,11 @@ def all_pairs_lowest_common_ancestor(G, pairs=None): # We will then use the tree lca algorithm on the spanning tree, and use # the DAG to figure out the set of tree queries necessary. spanning_tree = nx.dfs_tree(G, root) - dag = nx.DiGraph((u, v) for u, v in G.edges - if u not in spanning_tree or v not in spanning_tree[u]) + dag = nx.DiGraph( + (u, v) + for u, v in G.edges + if u not in spanning_tree or v not in spanning_tree[u] + ) # Ensure that both the dag and the spanning tree contains all nodes in G, # even nodes that are disconnected in the dag. @@ -285,15 +294,16 @@ def all_pairs_lowest_common_ancestor(G, pairs=None): Index can be 0 or 1 (or None if exhausted). """ index1, index2 = indices - if (index1 >= len(ancestors[node1]) and - index2 >= len(ancestors[node2])): + if index1 >= len(ancestors[node1]) and index2 >= len(ancestors[node2]): return None elif index1 >= len(ancestors[node1]): return 1 elif index2 >= len(ancestors[node2]): return 0 - elif (euler_tour_pos[ancestors[node1][index1]] < - euler_tour_pos[ancestors[node2][index2]]): + elif ( + euler_tour_pos[ancestors[node1][index1]] + < euler_tour_pos[ancestors[node2][index2]] + ): return 0 else: return 1 @@ -321,8 +331,9 @@ def all_pairs_lowest_common_ancestor(G, pairs=None): ans = tree_lca[tree_node1, tree_node2] else: ans = tree_lca[tree_node2, tree_node1] - if not dry_run and (best is None or - root_distance[ans] > best_root_distance): + if not dry_run and ( + best is None or root_distance[ans] > best_root_distance + ): best_root_distance = root_distance[ans] best = ans @@ -335,8 +346,7 @@ def all_pairs_lowest_common_ancestor(G, pairs=None): # tree lca. if pairs is None: # We want all pairs so we'll need the entire tree. - tree_lca = dict(tree_all_pairs_lowest_common_ancestor(spanning_tree, - root)) + tree_lca = dict(tree_all_pairs_lowest_common_ancestor(spanning_tree, root)) else: # We only need the merged adjacent pairs by seeing which queries the # algorithm needs then generating them in a single pass. @@ -345,9 +355,9 @@ def all_pairs_lowest_common_ancestor(G, pairs=None): pass # Replace the bogus default tree values with the real ones. - for (pair, lca) in tree_all_pairs_lowest_common_ancestor(spanning_tree, - root, - tree_lca): + for (pair, lca) in tree_all_pairs_lowest_common_ancestor( + spanning_tree, root, tree_lca + ): tree_lca[pair] = lca # All precomputations complete. Now we just need to give the user the pairs diff --git a/networkx/algorithms/matching.py b/networkx/algorithms/matching.py index 18f5b653..c6e5fd1c 100644 --- a/networkx/algorithms/matching.py +++ b/networkx/algorithms/matching.py @@ -3,8 +3,13 @@ from collections import Counter from itertools import combinations from itertools import repeat -__all__ = ['is_matching', 'is_maximal_matching', 'is_perfect_matching', - 'max_weight_matching', 'maximal_matching'] +__all__ = [ + "is_matching", + "is_maximal_matching", + "is_perfect_matching", + "max_weight_matching", + "maximal_matching", +] def maximal_matching(G): @@ -88,8 +93,7 @@ def is_matching(G, matching): if isinstance(matching, dict): matching = matching_dict_to_set(matching) # TODO This is parallelizable. - return all(len(set(e1) & set(e2)) == 0 - for e1, e2 in combinations(matching, 2)) + return all(len(set(e1) & set(e2)) == 0 for e1, e2 in combinations(matching, 2)) def is_maximal_matching(G, matching): @@ -172,7 +176,7 @@ def is_perfect_matching(G, matching): return all(counts[v] == 1 for v in G) -def max_weight_matching(G, maxcardinality=False, weight='weight'): +def max_weight_matching(G, maxcardinality=False, weight="weight"): """Compute a maximum-weighted matching of G. A matching is a subset of edges in which no node occurs more than once. @@ -239,12 +243,13 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): class NoNode: """Dummy value which is different from any node.""" + pass class Blossom: """Representation of a non-trivial blossom or sub-blossom.""" - __slots__ = ['childs', 'edges', 'mybestedges'] + __slots__ = ["childs", "edges", "mybestedges"] # b.childs is an ordered list of b's sub-blossoms, starting with # the base and going round the blossom. @@ -278,8 +283,7 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): wt = d.get(weight, 1) if i != j and wt > maxweight: maxweight = wt - allinteger = allinteger and (str(type(wt)).split("'")[1] - in ('int', 'long')) + allinteger = allinteger and (str(type(wt)).split("'")[1] in ("int", "long")) # If v is a matched vertex, mate[v] is its partner vertex. # If v is a single vertex, v does not occur as a key in mate. @@ -436,8 +440,9 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): blossomparent[bv] = b path.append(bv) edgs.append(labeledge[bv]) - assert label[bv] == 2 or (label[bv] == 1 and labeledge[ - bv][0] == mate[blossombase[bv]]) + assert label[bv] == 2 or ( + label[bv] == 1 and labeledge[bv][0] == mate[blossombase[bv]] + ) # Trace one step back. v = labeledge[bv][0] bv = inblossom[v] @@ -451,8 +456,9 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): blossomparent[bw] = b path.append(bw) edgs.append((labeledge[bw][1], labeledge[bw][0])) - assert label[bw] == 2 or (label[bw] == 1 and labeledge[ - bw][0] == mate[blossombase[bw]]) + assert label[bw] == 2 or ( + label[bw] == 1 and labeledge[bw][0] == mate[blossombase[bw]] + ) # Trace one step back. w = labeledge[bw][0] bw = inblossom[w] @@ -481,22 +487,21 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): else: # This subblossom does not have a list of least-slack # edges; get the information from the vertices. - nblist = [(v, w) - for v in bv.leaves() - for w in G.neighbors(v) - if v != w] + nblist = [ + (v, w) for v in bv.leaves() for w in G.neighbors(v) if v != w + ] else: - nblist = [(bv, w) - for w in G.neighbors(bv) - if bv != w] + nblist = [(bv, w) for w in G.neighbors(bv) if bv != w] for k in nblist: (i, j) = k if inblossom[j] == b: i, j = j, i bj = inblossom[j] - if (bj != b and label.get(bj) == 1 and - ((bj not in bestedgeto) or - slack(i, j) < slack(*bestedgeto[bj]))): + if ( + bj != b + and label.get(bj) == 1 + and ((bj not in bestedgeto) or slack(i, j) < slack(*bestedgeto[bj])) + ): bestedgeto[bj] = k # Forget about least-slack edge of the subblossom. bestedge[bv] = None @@ -661,9 +666,9 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): while 1: bs = inblossom[s] assert label[bs] == 1 - assert ( - labeledge[bs] is None and blossombase[bs] not in mate)\ - or (labeledge[bs][0] == mate[blossombase[bs]]) + assert (labeledge[bs] is None and blossombase[bs] not in mate) or ( + labeledge[bs][0] == mate[blossombase[bs]] + ) # Augment through the S-blossom from s to base. if isinstance(bs, Blossom): augmentBlossom(bs, s) @@ -823,15 +828,13 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): elif label.get(bw) == 1: # keep track of the least-slack non-allowable edge to # a different S-blossom. - if bestedge.get(bv) is None or \ - kslack < slack(*bestedge[bv]): + if bestedge.get(bv) is None or kslack < slack(*bestedge[bv]): bestedge[bv] = (v, w) elif label.get(w) is None: # w is a free vertex (or an unreached vertex inside # a T-blossom) but we can not reach it yet; # keep track of the least-slack edge that reaches w. - if bestedge.get(w) is None or \ - kslack < slack(*bestedge[w]): + if bestedge.get(w) is None or kslack < slack(*bestedge[w]): bestedge[w] = (v, w) if augmented: @@ -852,8 +855,7 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): # Compute delta2: the minimum slack on any edge between # an S-vertex and a free vertex. for v in G.nodes(): - if label.get(inblossom[v]) is None and \ - bestedge.get(v) is not None: + if label.get(inblossom[v]) is None and bestedge.get(v) is not None: d = slack(*bestedge[v]) if deltatype == -1 or d < delta: delta = d @@ -863,8 +865,11 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): # Compute delta3: half the minimum slack on any edge between # a pair of S-blossoms. for b in blossomparent: - if (blossomparent[b] is None and label.get(b) == 1 and - bestedge.get(b) is not None): + if ( + blossomparent[b] is None + and label.get(b) == 1 + and bestedge.get(b) is not None + ): kslack = slack(*bestedge[b]) if allinteger: assert (kslack % 2) == 0 @@ -878,8 +883,11 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): # Compute delta4: minimum z variable of any T-blossom. for b in blossomdual: - if (blossomparent[b] is None and label.get(b) == 2 and - (deltatype == -1 or blossomdual[b] < delta)): + if ( + blossomparent[b] is None + and label.get(b) == 2 + and (deltatype == -1 or blossomdual[b] < delta) + ): delta = blossomdual[b] deltatype = 4 deltablossom = b @@ -943,8 +951,7 @@ def max_weight_matching(G, maxcardinality=False, weight='weight'): for b in list(blossomdual.keys()): if b not in blossomdual: continue # already expanded - if (blossomparent[b] is None and label.get(b) == 1 and - blossomdual[b] == 0): + if blossomparent[b] is None and label.get(b) == 1 and blossomdual[b] == 0: expandBlossom(b, True) # Verify that we reached the optimum solution (only for integer weights). diff --git a/networkx/algorithms/minors.py b/networkx/algorithms/minors.py index 22c82f7d..74b0da09 100644 --- a/networkx/algorithms/minors.py +++ b/networkx/algorithms/minors.py @@ -9,8 +9,7 @@ from networkx import density from networkx.exception import NetworkXException from networkx.utils import arbitrary_element -__all__ = ['contracted_edge', 'contracted_nodes', - 'identified_nodes', 'quotient_graph'] +__all__ = ["contracted_edge", "contracted_nodes", "identified_nodes", "quotient_graph"] chaini = chain.from_iterable @@ -50,8 +49,15 @@ def equivalence_classes(iterable, relation): return {frozenset(block) for block in blocks} -def quotient_graph(G, partition, edge_relation=None, node_data=None, - edge_data=None, relabel=False, create_using=None): +def quotient_graph( + G, + partition, + edge_relation=None, + node_data=None, + edge_data=None, + relabel=False, + create_using=None, +): """Returns the quotient graph of `G` under the specified equivalence relation on nodes. @@ -209,8 +215,9 @@ def quotient_graph(G, partition, edge_relation=None, node_data=None, if callable(partition): # equivalence_classes always return partition of whole G. partition = equivalence_classes(G, partition) - return _quotient_graph(G, partition, edge_relation, node_data, - edge_data, relabel, create_using) + return _quotient_graph( + G, partition, edge_relation, node_data, edge_data, relabel, create_using + ) # If the user provided partition as a collection of sets. Then we # need to check if partition covers all of G nodes. If the answer @@ -218,15 +225,23 @@ def quotient_graph(G, partition, edge_relation=None, node_data=None, partition_nodes = set().union(*partition) if len(partition_nodes) != len(G): G = G.subgraph(partition_nodes) - return _quotient_graph(G, partition, edge_relation, node_data, - edge_data, relabel, create_using) - - -def _quotient_graph(G, partition, edge_relation=None, node_data=None, - edge_data=None, relabel=False, create_using=None): + return _quotient_graph( + G, partition, edge_relation, node_data, edge_data, relabel, create_using + ) + + +def _quotient_graph( + G, + partition, + edge_relation=None, + node_data=None, + edge_data=None, + relabel=False, + create_using=None, +): # Each node in the graph must be in exactly one block. if any(sum(1 for b in partition if v in b) != 1 for v in G): - raise NetworkXException('each node must be in exactly one block') + raise NetworkXException("each node must be in exactly one block") if create_using is None: H = G.__class__() else: @@ -234,10 +249,13 @@ def _quotient_graph(G, partition, edge_relation=None, node_data=None, # By default set some basic information about the subgraph that each block # represents on the nodes in the quotient graph. if node_data is None: + def node_data(b): S = G.subgraph(b) - return dict(graph=S, nnodes=len(S), nedges=S.number_of_edges(), - density=density(S)) + return dict( + graph=S, nnodes=len(S), nedges=S.number_of_edges(), density=density(S) + ) + # Each block of the partition becomes a node in the quotient graph. partition = [frozenset(b) for b in partition] H.add_nodes_from((b, node_data(b)) for b in partition) @@ -249,28 +267,42 @@ def _quotient_graph(G, partition, edge_relation=None, node_data=None, # there are O(n^2) pairs to check and each check may require O(log n) time # (to check set membership). This can certainly be parallelized. if edge_relation is None: + def edge_relation(b, c): return any(v in G[u] for u, v in product(b, c)) + # By default, sum the weights of the edges joining pairs of nodes across # blocks to get the weight of the edge joining those two blocks. if edge_data is None: + def edge_data(b, c): - edgedata = (d for u, v, d in G.edges(b | c, data=True) - if (u in b and v in c) or (u in c and v in b)) - return {'weight': sum(d.get('weight', 1) for d in edgedata)} + edgedata = ( + d + for u, v, d in G.edges(b | c, data=True) + if (u in b and v in c) or (u in c and v in b) + ) + return {"weight": sum(d.get("weight", 1) for d in edgedata)} + block_pairs = permutations(H, 2) if H.is_directed() else combinations(H, 2) # In a multigraph, add one edge in the quotient graph for each edge # in the original graph. if H.is_multigraph(): - edges = chaini(((b, c, G.get_edge_data(u, v, default={})) - for u, v in product(b, c) if v in G[u]) - for b, c in block_pairs if edge_relation(b, c)) + edges = chaini( + ( + (b, c, G.get_edge_data(u, v, default={})) + for u, v in product(b, c) + if v in G[u] + ) + for b, c in block_pairs + if edge_relation(b, c) + ) # In a simple graph, apply the edge data function to each pair of # blocks to determine the edge data attributes to apply to each edge # in the quotient graph. else: - edges = ((b, c, edge_data(b, c)) for (b, c) in block_pairs - if edge_relation(b, c)) + edges = ( + (b, c, edge_data(b, c)) for (b, c) in block_pairs if edge_relation(b, c) + ) H.add_edges_from(edges) # If requested by the user, relabel the nodes to be integers, # numbered in increasing order from zero in the same order as the @@ -365,17 +397,23 @@ def contracted_nodes(G, u, v, self_loops=True, copy=True): # edge code uses G.edges(v) instead of G.adj[v] to handle multiedges if H.is_directed(): - in_edges = ((w if w != v else u, u, d) - for w, x, d in G.in_edges(v, data=True) - if self_loops or w != u) - out_edges = ((u, w if w != v else u, d) - for x, w, d in G.out_edges(v, data=True) - if self_loops or w != u) + in_edges = ( + (w if w != v else u, u, d) + for w, x, d in G.in_edges(v, data=True) + if self_loops or w != u + ) + out_edges = ( + (u, w if w != v else u, d) + for x, w, d in G.out_edges(v, data=True) + if self_loops or w != u + ) new_edges = chain(in_edges, out_edges) else: - new_edges = ((u, w if w != v else u, d) - for x, w, d in G.edges(v, data=True) - if self_loops or w != u) + new_edges = ( + (u, w if w != v else u, d) + for x, w, d in G.edges(v, data=True) + if self_loops or w != u + ) # If the H=G, the generators change as H changes # This makes the new_edges independent of H @@ -386,10 +424,10 @@ def contracted_nodes(G, u, v, self_loops=True, copy=True): H.remove_node(v) H.add_edges_from(new_edges) - if 'contraction' in H.nodes[u]: - H.nodes[u]['contraction'][v] = v_data + if "contraction" in H.nodes[u]: + H.nodes[u]["contraction"][v] = v_data else: - H.nodes[u]['contraction'] = {v: v_data} + H.nodes[u]["contraction"] = {v: v_data} return H @@ -458,5 +496,5 @@ def contracted_edge(G, edge, self_loops=True): """ if not G.has_edge(*edge): - raise ValueError(f'Edge {edge} does not exist in graph G; cannot contract it') + raise ValueError(f"Edge {edge} does not exist in graph G; cannot contract it") return contracted_nodes(G, *edge, self_loops=self_loops) diff --git a/networkx/algorithms/mis.py b/networkx/algorithms/mis.py index da590ec7..8a5f1b40 100644 --- a/networkx/algorithms/mis.py +++ b/networkx/algorithms/mis.py @@ -6,11 +6,11 @@ import networkx as nx from networkx.utils import not_implemented_for from networkx.utils import py_random_state -__all__ = ['maximal_independent_set'] +__all__ = ["maximal_independent_set"] @py_random_state(2) -@not_implemented_for('directed') +@not_implemented_for("directed") def maximal_independent_set(G, nodes=None, seed=None): """Returns a random maximal independent set guaranteed to contain a given set of nodes. diff --git a/networkx/algorithms/moral.py b/networkx/algorithms/moral.py index 48140680..c81e2cb9 100644 --- a/networkx/algorithms/moral.py +++ b/networkx/algorithms/moral.py @@ -3,10 +3,10 @@ r"""Function for computing the moral graph of a directed graph.""" from networkx.utils import not_implemented_for import itertools -__all__ = ['moral_graph'] +__all__ = ["moral_graph"] -@not_implemented_for('undirected') +@not_implemented_for("undirected") def moral_graph(G): r"""Return the Moral Graph diff --git a/networkx/algorithms/node_classification/hmn.py b/networkx/algorithms/node_classification/hmn.py index a5a6bae5..3a977357 100644 --- a/networkx/algorithms/node_classification/hmn.py +++ b/networkx/algorithms/node_classification/hmn.py @@ -16,11 +16,11 @@ from networkx.algorithms.node_classification.utils import ( _predict, ) -__all__ = ['harmonic_function'] +__all__ = ["harmonic_function"] -@not_implemented_for('directed') -def harmonic_function(G, max_iter=30, label_name='label'): +@not_implemented_for("directed") +def harmonic_function(G, max_iter=30, label_name="label"): """Node classification by Harmonic function Parameters @@ -65,12 +65,14 @@ def harmonic_function(G, max_iter=30, label_name='label'): import numpy as np except ImportError as e: raise ImportError( - "harmonic_function() requires numpy: http://numpy.org/ ") from e + "harmonic_function() requires numpy: http://numpy.org/ " + ) from e try: from scipy import sparse except ImportError as e: raise ImportError( - "harmonic_function() requires scipy: http://scipy.org/ ") from e + "harmonic_function() requires scipy: http://scipy.org/ " + ) from e def _build_propagation_matrix(X, labels): """Build propagation matrix of Harmonic function @@ -122,7 +124,8 @@ def harmonic_function(G, max_iter=30, label_name='label'): if labels.shape[0] == 0: raise nx.NetworkXError( - "No node on the input graph is labeled by '" + label_name + "'.") + "No node on the input graph is labeled by '" + label_name + "'." + ) n_samples = X.shape[0] n_classes = label_dict.shape[0] diff --git a/networkx/algorithms/node_classification/lgc.py b/networkx/algorithms/node_classification/lgc.py index e48f2f44..b87cd3a8 100644 --- a/networkx/algorithms/node_classification/lgc.py +++ b/networkx/algorithms/node_classification/lgc.py @@ -16,13 +16,11 @@ from networkx.algorithms.node_classification.utils import ( _predict, ) -__all__ = ['local_and_global_consistency'] +__all__ = ["local_and_global_consistency"] -@not_implemented_for('directed') -def local_and_global_consistency(G, alpha=0.99, - max_iter=30, - label_name='label'): +@not_implemented_for("directed") +def local_and_global_consistency(G, alpha=0.99, max_iter=30, label_name="label"): """Node classification by Local and Global Consistency Parameters @@ -70,14 +68,14 @@ def local_and_global_consistency(G, alpha=0.99, import numpy as np except ImportError as e: raise ImportError( - "local_and_global_consistency() requires numpy: ", - "http://numpy.org/ ") from e + "local_and_global_consistency() requires numpy: ", "http://numpy.org/ " + ) from e try: from scipy import sparse except ImportError as e: raise ImportError( - "local_and_global_consistensy() requires scipy: ", - "http://scipy.org/ ") from e + "local_and_global_consistensy() requires scipy: ", "http://scipy.org/ " + ) from e def _build_propagation_matrix(X, labels, alpha): """Build propagation matrix of Local and global consistency @@ -133,7 +131,8 @@ def local_and_global_consistency(G, alpha=0.99, if labels.shape[0] == 0: raise nx.NetworkXError( - "No node on the input graph is labeled by '" + label_name + "'.") + "No node on the input graph is labeled by '" + label_name + "'." + ) n_samples = X.shape[0] n_classes = label_dict.shape[0] diff --git a/networkx/algorithms/node_classification/tests/test_harmonic_function.py b/networkx/algorithms/node_classification/tests/test_harmonic_function.py index 019c2ce2..c8379262 100644 --- a/networkx/algorithms/node_classification/tests/test_harmonic_function.py +++ b/networkx/algorithms/node_classification/tests/test_harmonic_function.py @@ -1,24 +1,23 @@ import pytest -numpy = pytest.importorskip('numpy') -scipy = pytest.importorskip('scipy') + +numpy = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") import networkx as nx from networkx.algorithms import node_classification class TestHarmonicFunction: - def test_path_graph(self): G = nx.path_graph(4) - label_name = 'label' - G.nodes[0][label_name] = 'A' - G.nodes[3][label_name] = 'B' - predicted = node_classification.harmonic_function( - G, label_name=label_name) - assert predicted[0] == 'A' - assert predicted[1] == 'A' - assert predicted[2] == 'B' - assert predicted[3] == 'B' + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" + predicted = node_classification.harmonic_function(G, label_name=label_name) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "B" + assert predicted[3] == "B" def test_no_labels(self): with pytest.raises(nx.NetworkXError): @@ -43,38 +42,35 @@ class TestHarmonicFunction: G.add_edge(0, 1) G.add_edge(1, 2) G.add_edge(2, 3) - label_name = 'label' - G.nodes[0][label_name] = 'A' - G.nodes[3][label_name] = 'B' + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" node_classification.harmonic_function(G) def test_one_labeled_node(self): G = nx.path_graph(4) - label_name = 'label' - G.nodes[0][label_name] = 'A' - predicted = node_classification.harmonic_function( - G, label_name=label_name) - assert predicted[0] == 'A' - assert predicted[1] == 'A' - assert predicted[2] == 'A' - assert predicted[3] == 'A' + label_name = "label" + G.nodes[0][label_name] = "A" + predicted = node_classification.harmonic_function(G, label_name=label_name) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "A" + assert predicted[3] == "A" def test_nodes_all_labeled(self): G = nx.karate_club_graph() - label_name = 'club' - predicted = node_classification.harmonic_function( - G, label_name=label_name) + label_name = "club" + predicted = node_classification.harmonic_function(G, label_name=label_name) for i in range(len(G)): assert predicted[i] == G.nodes[i][label_name] def test_labeled_nodes_are_not_changed(self): G = nx.karate_club_graph() - label_name = 'club' + label_name = "club" label_removed = {0, 1, 2, 3, 4, 5, 6, 7} for i in label_removed: del G.nodes[i][label_name] - predicted = node_classification.harmonic_function( - G, label_name=label_name) + predicted = node_classification.harmonic_function(G, label_name=label_name) label_not_removed = set(list(range(len(G)))) - label_removed for i in label_not_removed: assert predicted[i] == G.nodes[i][label_name] diff --git a/networkx/algorithms/node_classification/tests/test_local_and_global_consistency.py b/networkx/algorithms/node_classification/tests/test_local_and_global_consistency.py index 11e4f595..163c0218 100644 --- a/networkx/algorithms/node_classification/tests/test_local_and_global_consistency.py +++ b/networkx/algorithms/node_classification/tests/test_local_and_global_consistency.py @@ -1,6 +1,7 @@ import pytest -numpy = pytest.importorskip('numpy') -scipy = pytest.importorskip('scipy') + +numpy = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") import networkx as nx @@ -8,18 +9,18 @@ from networkx.algorithms import node_classification class TestLocalAndGlobalConsistency: - def test_path_graph(self): G = nx.path_graph(4) - label_name = 'label' - G.nodes[0][label_name] = 'A' - G.nodes[3][label_name] = 'B' + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" predicted = node_classification.local_and_global_consistency( - G, label_name=label_name) - assert predicted[0] == 'A' - assert predicted[1] == 'A' - assert predicted[2] == 'B' - assert predicted[3] == 'B' + G, label_name=label_name + ) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "B" + assert predicted[3] == "B" def test_no_labels(self): with pytest.raises(nx.NetworkXError): @@ -44,26 +45,28 @@ class TestLocalAndGlobalConsistency: G.add_edge(0, 1) G.add_edge(1, 2) G.add_edge(2, 3) - label_name = 'label' - G.nodes[0][label_name] = 'A' - G.nodes[3][label_name] = 'B' + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" node_classification.harmonic_function(G) def test_one_labeled_node(self): G = nx.path_graph(4) - label_name = 'label' - G.nodes[0][label_name] = 'A' + label_name = "label" + G.nodes[0][label_name] = "A" predicted = node_classification.local_and_global_consistency( - G, label_name=label_name) - assert predicted[0] == 'A' - assert predicted[1] == 'A' - assert predicted[2] == 'A' - assert predicted[3] == 'A' + G, label_name=label_name + ) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "A" + assert predicted[3] == "A" def test_nodes_all_labeled(self): G = nx.karate_club_graph() - label_name = 'club' + label_name = "club" predicted = node_classification.local_and_global_consistency( - G, alpha=0, label_name=label_name) + G, alpha=0, label_name=label_name + ) for i in range(len(G)): assert predicted[i] == G.nodes[i][label_name] diff --git a/networkx/algorithms/node_classification/utils.py b/networkx/algorithms/node_classification/utils.py index 70e8730d..4f801381 100644 --- a/networkx/algorithms/node_classification/utils.py +++ b/networkx/algorithms/node_classification/utils.py @@ -49,8 +49,9 @@ def _get_label_info(G, label_name): lid += 1 labels.append([i, label_to_id[label]]) labels = np.array(labels) - label_dict = np.array([label for label, _ in sorted( - label_to_id.items(), key=lambda x:x[1])]) + label_dict = np.array( + [label for label, _ in sorted(label_to_id.items(), key=lambda x: x[1])] + ) return (labels, label_dict) diff --git a/networkx/algorithms/non_randomness.py b/networkx/algorithms/non_randomness.py index d35bb77f..3dc77fe4 100644 --- a/networkx/algorithms/non_randomness.py +++ b/networkx/algorithms/non_randomness.py @@ -5,11 +5,11 @@ import math import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['non_randomness'] +__all__ = ["non_randomness"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def non_randomness(G, k=None): """Compute the non-randomness of graph G. @@ -58,7 +58,7 @@ def non_randomness(G, k=None): if not nx.is_connected(G): raise nx.NetworkXException("Non connected graph.") if len(list(nx.selfloop_edges(G))) > 0: - raise nx.NetworkXError('Graph must not contain self-loops') + raise nx.NetworkXError("Graph must not contain self-loops") if k is None: k = len(tuple(nx.community.label_propagation_communities(G))) diff --git a/networkx/algorithms/operators/all.py b/networkx/algorithms/operators/all.py index 7f369991..a08f634d 100644 --- a/networkx/algorithms/operators/all.py +++ b/networkx/algorithms/operators/all.py @@ -3,8 +3,7 @@ from itertools import zip_longest import networkx as nx -__all__ = ['union_all', 'compose_all', 'disjoint_union_all', - 'intersection_all'] +__all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"] def union_all(graphs, rename=(None,)): @@ -46,7 +45,7 @@ def union_all(graphs, rename=(None,)): disjoint_union_all """ if not graphs: - raise ValueError('cannot apply union_all to an empty list') + raise ValueError("cannot apply union_all to an empty list") graphs_names = zip_longest(graphs, rename) U, gname = next(graphs_names) for H, hname in graphs_names: @@ -84,7 +83,7 @@ def disjoint_union_all(graphs): from the last graph in the list with that attribute is used. """ if not graphs: - raise ValueError('cannot apply disjoint_union_all to an empty list') + raise ValueError("cannot apply disjoint_union_all to an empty list") graphs = iter(graphs) U = next(graphs) for H in graphs: @@ -122,7 +121,7 @@ def compose_all(graphs): from the last graph in the list with that attribute is used. """ if not graphs: - raise ValueError('cannot apply compose_all to an empty list') + raise ValueError("cannot apply compose_all to an empty list") graphs = iter(graphs) C = next(graphs) for H in graphs: @@ -156,7 +155,7 @@ def intersection_all(graphs): graph. """ if not graphs: - raise ValueError('cannot apply intersection_all to an empty list') + raise ValueError("cannot apply intersection_all to an empty list") graphs = iter(graphs) R = next(graphs) for H in graphs: diff --git a/networkx/algorithms/operators/binary.py b/networkx/algorithms/operators/binary.py index cd6d71ec..d3aacbbd 100644 --- a/networkx/algorithms/operators/binary.py +++ b/networkx/algorithms/operators/binary.py @@ -3,8 +3,15 @@ Operations on graphs including union, intersection, difference. """ import networkx as nx -__all__ = ['union', 'compose', 'disjoint_union', 'intersection', - 'difference', 'symmetric_difference', 'full_join'] +__all__ = [ + "union", + "compose", + "disjoint_union", + "intersection", + "difference", + "symmetric_difference", + "full_join", +] def union(G, H, rename=(None, None), name=None): @@ -43,7 +50,7 @@ def union(G, H, rename=(None, None), name=None): disjoint_union """ if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") # Union is the same type as G R = G.__class__() # add graph attributes, H attributes take precedent over G attributes @@ -61,13 +68,16 @@ def union(G, H, rename=(None, None), name=None): else: name = prefix + repr(x) return name + return nx.relabel_nodes(graph, label) + G = add_prefix(G, rename[0]) H = add_prefix(H, rename[1]) if set(G) & set(H): - raise nx.NetworkXError('The node sets of G and H are not disjoint.', - 'Use appropriate rename=(Gprefix,Hprefix)' - 'or use disjoint_union(G,H).') + raise nx.NetworkXError( + "The node sets of G and H are not disjoint.", + "Use appropriate rename=(Gprefix,Hprefix)" "or use disjoint_union(G,H).", + ) if G.is_multigraph(): G_edges = G.edges(keys=True, data=True) else: @@ -157,7 +167,7 @@ def intersection(G, H): R = nx.create_empty_copy(G) if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") if set(G) != set(H): raise nx.NetworkXError("Node sets of graphs are not equal") @@ -208,7 +218,7 @@ def difference(G, H): """ # create new graph if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") R = nx.create_empty_copy(G) if set(G) != set(H): @@ -245,7 +255,7 @@ def symmetric_difference(G, H): """ # create new graph if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") R = nx.create_empty_copy(G) if set(G) != set(H): @@ -301,7 +311,7 @@ def compose(G, H): in two graphs) if you use MultiGraph without keeping track of edge keys. """ if not G.is_multigraph() == H.is_multigraph(): - raise nx.NetworkXError('G and H must both be graphs or multigraphs.') + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") R = G.__class__() # add graph attributes, H attributes take precedent over G attributes @@ -377,7 +387,9 @@ def full_join(G, H, rename=(None, None)): else: name = prefix + repr(x) return name + return nx.relabel_nodes(graph, label) + G = add_prefix(G, rename[0]) H = add_prefix(H, rename[1]) diff --git a/networkx/algorithms/operators/product.py b/networkx/algorithms/operators/product.py index 9567ebba..2a7c5db3 100644 --- a/networkx/algorithms/operators/product.py +++ b/networkx/algorithms/operators/product.py @@ -6,9 +6,14 @@ from itertools import product import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['tensor_product', 'cartesian_product', - 'lexicographic_product', 'strong_product', 'power', - 'rooted_product'] +__all__ = [ + "tensor_product", + "cartesian_product", + "lexicographic_product", + "strong_product", + "power", + "rooted_product", +] def _dict_product(d1, d2): @@ -329,8 +334,8 @@ def strong_product(G, H): return GH -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def power(G, k): """Returns the specified power of a graph. @@ -394,22 +399,22 @@ def power(G, k): """ if k <= 0: - raise ValueError('k must be a positive integer') + raise ValueError("k must be a positive integer") H = nx.Graph() H.add_nodes_from(G) # update BFS code to ignore self loops. for n in G: - seen = {} # level (number of hops) when seen in BFS - level = 1 # the current level + seen = {} # level (number of hops) when seen in BFS + level = 1 # the current level nextlevel = G[n] while nextlevel: thislevel = nextlevel # advance to next level - nextlevel = {} # and start a new list (fringe) + nextlevel = {} # and start a new list (fringe) for v in thislevel: - if v == n: # avoid self loop + if v == n: # avoid self loop continue if v not in seen: - seen[v] = level # set the level of vertex v + seen[v] = level # set the level of vertex v nextlevel.update(G[v]) # add neighbors of v if k <= level: break @@ -418,7 +423,7 @@ def power(G, k): return H -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def rooted_product(G, H, root): """ Return the rooted product of graphs G and H rooted at root in H. @@ -445,7 +450,7 @@ def rooted_product(G, H, root): The nodes of G and H are not relabeled. """ if root not in H: - raise nx.NetworkXError('root must be a vertex in H') + raise nx.NetworkXError("root must be a vertex in H") R = nx.Graph() R.add_nodes_from(product(G, H)) diff --git a/networkx/algorithms/operators/tests/test_all.py b/networkx/algorithms/operators/tests/test_all.py index 3f38b933..8c96b081 100644 --- a/networkx/algorithms/operators/tests/test_all.py +++ b/networkx/algorithms/operators/tests/test_all.py @@ -8,26 +8,26 @@ def test_union_all_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 j = g.copy() - j.graph['name'] = 'j' - j.graph['attr'] = 'attr' - j.nodes[0]['x'] = 7 + j.graph["name"] = "j" + j.graph["attr"] = "attr" + j.nodes[0]["x"] = 7 - ghj = nx.union_all([g, h, j], rename=('g', 'h', 'j')) - assert set(ghj.nodes()) == {'h0', 'h1', 'g0', 'g1', 'j0', 'j1'} + ghj = nx.union_all([g, h, j], rename=("g", "h", "j")) + assert set(ghj.nodes()) == {"h0", "h1", "g0", "g1", "j0", "j1"} for n in ghj: graph, node = n assert ghj.nodes[n] == eval(graph).nodes[int(node)] - assert ghj.graph['attr'] == 'attr' - assert ghj.graph['name'] == 'j' # j graph attributes take precendent + assert ghj.graph["attr"] == "attr" + assert ghj.graph["name"] == "j" # j graph attributes take precendent def test_intersection_all(): @@ -53,12 +53,12 @@ def test_intersection_all_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 gh = nx.intersection_all([g, h]) assert set(gh.nodes()) == set(g.nodes()) @@ -89,40 +89,70 @@ def test_union_all_and_compose_all(): P3 = nx.path_graph(3) G1 = nx.DiGraph() - G1.add_edge('A', 'B') - G1.add_edge('A', 'C') - G1.add_edge('A', 'D') + G1.add_edge("A", "B") + G1.add_edge("A", "C") + G1.add_edge("A", "D") G2 = nx.DiGraph() - G2.add_edge('1', '2') - G2.add_edge('1', '3') - G2.add_edge('1', '4') + G2.add_edge("1", "2") + G2.add_edge("1", "3") + G2.add_edge("1", "4") G = nx.union_all([G1, G2]) H = nx.compose_all([G1, G2]) assert_edges_equal(G.edges(), H.edges()) - assert not G.has_edge('A', '1') + assert not G.has_edge("A", "1") pytest.raises(nx.NetworkXError, nx.union, K3, P3) - H1 = nx.union_all([H, G1], rename=('H', 'G1')) - assert (sorted(H1.nodes()) == - ['G1A', 'G1B', 'G1C', 'G1D', - 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) + H1 = nx.union_all([H, G1], rename=("H", "G1")) + assert sorted(H1.nodes()) == [ + "G1A", + "G1B", + "G1C", + "G1D", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] H2 = nx.union_all([H, G2], rename=("H", "")) - assert (sorted(H2.nodes()) == - ['1', '2', '3', '4', - 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) - - assert not H1.has_edge('NB', 'NA') + assert sorted(H2.nodes()) == [ + "1", + "2", + "3", + "4", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] + + assert not H1.has_edge("NB", "NA") G = nx.compose_all([G, G]) assert_edges_equal(G.edges(), H.edges()) - G2 = nx.union_all([G2, G2], rename=('', 'copy')) - assert (sorted(G2.nodes()) == - ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4']) - - assert sorted(G2.neighbors('copy4')) == [] - assert sorted(G2.neighbors('copy1')) == ['copy2', 'copy3', 'copy4'] + G2 = nx.union_all([G2, G2], rename=("", "copy")) + assert sorted(G2.nodes()) == [ + "1", + "2", + "3", + "4", + "copy1", + "copy2", + "copy3", + "copy4", + ] + + assert sorted(G2.neighbors("copy4")) == [] + assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"] assert len(G) == 8 assert nx.number_of_edges(G) == 6 @@ -134,15 +164,13 @@ def test_union_all_and_compose_all(): assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] G1 = nx.DiGraph() - G1.add_edge('A', 'B') + G1.add_edge("A", "B") G2 = nx.DiGraph() G2.add_edge(1, 2) G3 = nx.DiGraph() G3.add_edge(11, 22) G4 = nx.union_all([G1, G2, G3], rename=("G1", "G2", "G3")) - assert (sorted(G4.nodes()) == - ['G1A', 'G1B', 'G21', 'G22', - 'G311', 'G322']) + assert sorted(G4.nodes()) == ["G1A", "G1B", "G21", "G22", "G311", "G322"] def test_union_all_multigraph(): @@ -154,8 +182,7 @@ def test_union_all_multigraph(): H.add_edge(3, 4, key=1) GH = nx.union_all([G, H]) assert set(GH) == set(G) | set(H) - assert (set(GH.edges(keys=True)) == - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) def test_input_output(): diff --git a/networkx/algorithms/operators/tests/test_binary.py b/networkx/algorithms/operators/tests/test_binary.py index 53c77e7b..bf885130 100644 --- a/networkx/algorithms/operators/tests/test_binary.py +++ b/networkx/algorithms/operators/tests/test_binary.py @@ -8,21 +8,21 @@ def test_union_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 - gh = nx.union(g, h, rename=('g', 'h')) - assert set(gh.nodes()) == {'h0', 'h1', 'g0', 'g1'} + gh = nx.union(g, h, rename=("g", "h")) + assert set(gh.nodes()) == {"h0", "h1", "g0", "g1"} for n in gh: graph, node = n assert gh.nodes[n] == eval(graph).nodes[int(node)] - assert gh.graph['attr'] == 'attr' - assert gh.graph['name'] == 'h' # h graph attributes take precendent + assert gh.graph["attr"] == "attr" + assert gh.graph["name"] == "h" # h graph attributes take precendent def test_intersection(): @@ -44,12 +44,12 @@ def test_intersection_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 gh = nx.intersection(g, h) assert set(gh.nodes()) == set(g.nodes()) @@ -120,12 +120,12 @@ def test_difference_attributes(): g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) - g.graph['name'] = 'g' + g.graph["name"] = "g" h = g.copy() - h.graph['name'] = 'h' - h.graph['attr'] = 'attr' - h.nodes[0]['x'] = 7 + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 gh = nx.difference(g, h) assert set(gh.nodes()) == set(g.nodes()) @@ -170,8 +170,11 @@ def test_symmetric_difference_multigraph(): assert set(gh.nodes()) == set(g.nodes()) assert set(gh.nodes()) == set(h.nodes()) assert sorted(gh.edges()) == 3 * [(0, 1)] - assert (sorted(sorted(e) for e in gh.edges(keys=True)) == - [[0, 1, 1], [0, 1, 2], [0, 1, 3]]) + assert sorted(sorted(e) for e in gh.edges(keys=True)) == [ + [0, 1, 1], + [0, 1, 2], + [0, 1, 3], + ] def test_union_and_compose(): @@ -179,40 +182,70 @@ def test_union_and_compose(): P3 = nx.path_graph(3) G1 = nx.DiGraph() - G1.add_edge('A', 'B') - G1.add_edge('A', 'C') - G1.add_edge('A', 'D') + G1.add_edge("A", "B") + G1.add_edge("A", "C") + G1.add_edge("A", "D") G2 = nx.DiGraph() - G2.add_edge('1', '2') - G2.add_edge('1', '3') - G2.add_edge('1', '4') + G2.add_edge("1", "2") + G2.add_edge("1", "3") + G2.add_edge("1", "4") G = nx.union(G1, G2) H = nx.compose(G1, G2) assert_edges_equal(G.edges(), H.edges()) - assert not G.has_edge('A', 1) + assert not G.has_edge("A", 1) pytest.raises(nx.NetworkXError, nx.union, K3, P3) - H1 = nx.union(H, G1, rename=('H', 'G1')) - assert (sorted(H1.nodes()) == - ['G1A', 'G1B', 'G1C', 'G1D', - 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) + H1 = nx.union(H, G1, rename=("H", "G1")) + assert sorted(H1.nodes()) == [ + "G1A", + "G1B", + "G1C", + "G1D", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] H2 = nx.union(H, G2, rename=("H", "")) - assert (sorted(H2.nodes()) == - ['1', '2', '3', '4', - 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) - - assert not H1.has_edge('NB', 'NA') + assert sorted(H2.nodes()) == [ + "1", + "2", + "3", + "4", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] + + assert not H1.has_edge("NB", "NA") G = nx.compose(G, G) assert_edges_equal(G.edges(), H.edges()) - G2 = nx.union(G2, G2, rename=('', 'copy')) - assert (sorted(G2.nodes()) == - ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4']) - - assert sorted(G2.neighbors('copy4')) == [] - assert sorted(G2.neighbors('copy1')) == ['copy2', 'copy3', 'copy4'] + G2 = nx.union(G2, G2, rename=("", "copy")) + assert sorted(G2.nodes()) == [ + "1", + "2", + "3", + "4", + "copy1", + "copy2", + "copy3", + "copy4", + ] + + assert sorted(G2.neighbors("copy4")) == [] + assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"] assert len(G) == 8 assert nx.number_of_edges(G) == 6 @@ -225,10 +258,10 @@ def test_union_and_compose(): G = nx.Graph() H = nx.Graph() - G.add_nodes_from([(1, {'a1': 1})]) - H.add_nodes_from([(1, {'b1': 1})]) + G.add_nodes_from([(1, {"a1": 1})]) + H.add_nodes_from([(1, {"b1": 1})]) R = nx.compose(G, H) - assert R.nodes == {1: {'a1': 1, 'b1': 1}} + assert R.nodes == {1: {"a1": 1, "b1": 1}} def test_union_multigraph(): @@ -240,8 +273,7 @@ def test_union_multigraph(): H.add_edge(3, 4, key=1) GH = nx.union(G, H) assert set(GH) == set(G) | set(H) - assert (set(GH.edges(keys=True)) == - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) def test_disjoint_union_multigraph(): @@ -253,8 +285,7 @@ def test_disjoint_union_multigraph(): H.add_edge(2, 3, key=1) GH = nx.disjoint_union(G, H) assert set(GH) == set(G) | set(H) - assert (set(GH.edges(keys=True)) == - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) def test_compose_multigraph(): @@ -266,13 +297,11 @@ def test_compose_multigraph(): H.add_edge(3, 4, key=1) GH = nx.compose(G, H) assert set(GH) == set(G) | set(H) - assert (set(GH.edges(keys=True)) == - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) H.add_edge(1, 2, key=2) GH = nx.compose(G, H) assert set(GH) == set(G) | set(H) - assert (set(GH.edges(keys=True)) == - set(G.edges(keys=True)) | set(H.edges(keys=True))) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) def test_full_join_graph(): @@ -286,15 +315,13 @@ def test_full_join_graph(): U = nx.full_join(G, H) assert set(U) == set(G) | set(H) assert len(U) == len(G) + len(H) - assert (len(U.edges()) == - len(G.edges()) + len(H.edges()) + len(G) * len(H)) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) # Rename - U = nx.full_join(G, H, rename=('g', 'h')) - assert set(U) == {'g0', 'g1', 'g2', 'h3', 'h4'} + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} assert len(U) == len(G) + len(H) - assert (len(U.edges()) == - len(G.edges()) + len(H.edges()) + len(G) * len(H)) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) # Rename graphs with string-like nodes G = nx.Graph() @@ -303,11 +330,10 @@ def test_full_join_graph(): H = nx.Graph() H.add_edge("d", "e") - U = nx.full_join(G, H, rename=('g', 'h')) - assert set(U) == {'ga', 'gb', 'gc', 'hd', 'he'} + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"ga", "gb", "gc", "hd", "he"} assert len(U) == len(G) + len(H) - assert (len(U.edges()) == - len(G.edges()) + len(H.edges()) + len(G) * len(H)) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) # DiGraphs G = nx.DiGraph() @@ -319,15 +345,13 @@ def test_full_join_graph(): U = nx.full_join(G, H) assert set(U) == set(G) | set(H) assert len(U) == len(G) + len(H) - assert (len(U.edges()) == - len(G.edges()) + len(H.edges()) + len(G)*len(H) * 2) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 # DiGraphs Rename - U = nx.full_join(G, H, rename=('g', 'h')) - assert set(U) == {'g0', 'g1', 'g2', 'h3', 'h4'} + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} assert len(U) == len(G) + len(H) - assert (len(U.edges()) == - len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 def test_full_join_multigraph(): @@ -341,15 +365,13 @@ def test_full_join_multigraph(): U = nx.full_join(G, H) assert set(U) == set(G) | set(H) assert len(U) == len(G) + len(H) - assert (len(U.edges()) == - len(G.edges()) + len(H.edges()) + len(G) * len(H)) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) # MultiGraphs rename - U = nx.full_join(G, H, rename=('g', 'h')) - assert set(U) == {'g0', 'g1', 'g2', 'h3', 'h4'} + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} assert len(U) == len(G) + len(H) - assert (len(U.edges()) == - len(G.edges()) + len(H.edges()) + len(G) * len(H)) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) # MultiDiGraphs G = nx.MultiDiGraph() @@ -361,15 +383,13 @@ def test_full_join_multigraph(): U = nx.full_join(G, H) assert set(U) == set(G) | set(H) assert len(U) == len(G) + len(H) - assert (len(U.edges()) == - len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 # MultiDiGraphs rename - U = nx.full_join(G, H, rename=('g', 'h')) - assert set(U) == {'g0', 'g1', 'g2', 'h3', 'h4'} + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} assert len(U) == len(G) + len(H) - assert (len(U.edges()) == - len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 def test_mixed_type_union(): diff --git a/networkx/algorithms/operators/tests/test_product.py b/networkx/algorithms/operators/tests/test_product.py index bdb4c6cc..2737233b 100644 --- a/networkx/algorithms/operators/tests/test_product.py +++ b/networkx/algorithms/operators/tests/test_product.py @@ -85,8 +85,8 @@ def test_tensor_product_classic_result(): def test_tensor_product_random(): - G = nx.erdos_renyi_graph(10, 2 / 10.) - H = nx.erdos_renyi_graph(10, 2 / 10.) + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) GH = nx.tensor_product(G, H) for (u_G, u_H) in GH.nodes(): @@ -106,12 +106,19 @@ def test_cartesian_product_multigraph(): H.add_edge(3, 4, key=1) GH = nx.cartesian_product(G, H) assert set(GH) == {(1, 3), (2, 3), (2, 4), (1, 4)} - assert ({(frozenset([u, v]), k) for u, v, k in GH.edges(keys=True)} == - {(frozenset([u, v]), k) for u, v, k in - [((1, 3), (2, 3), 0), ((1, 3), (2, 3), 1), - ((1, 3), (1, 4), 0), ((1, 3), (1, 4), 1), - ((2, 3), (2, 4), 0), ((2, 3), (2, 4), 1), - ((2, 4), (1, 4), 0), ((2, 4), (1, 4), 1)]}) + assert {(frozenset([u, v]), k) for u, v, k in GH.edges(keys=True)} == { + (frozenset([u, v]), k) + for u, v, k in [ + ((1, 3), (2, 3), 0), + ((1, 3), (2, 3), 1), + ((1, 3), (1, 4), 0), + ((1, 3), (1, 4), 1), + ((2, 3), (2, 4), 0), + ((2, 3), (2, 4), 1), + ((2, 4), (1, 4), 0), + ((2, 4), (1, 4), 1), + ] + } def test_cartesian_product_raises(): @@ -159,14 +166,14 @@ def test_cartesian_product_size(): K3 = nx.complete_graph(3) G = nx.cartesian_product(P5, K3) assert nx.number_of_nodes(G) == 5 * 3 - assert (nx.number_of_edges(G) == - nx.number_of_edges(P5) * nx.number_of_nodes(K3) + - nx.number_of_edges(K3) * nx.number_of_nodes(P5)) + assert nx.number_of_edges(G) == nx.number_of_edges(P5) * nx.number_of_nodes( + K3 + ) + nx.number_of_edges(K3) * nx.number_of_nodes(P5) G = nx.cartesian_product(K3, K5) assert nx.number_of_nodes(G) == 3 * 5 - assert (nx.number_of_edges(G) == - nx.number_of_edges(K5) * nx.number_of_nodes(K3) + - nx.number_of_edges(K3) * nx.number_of_nodes(K5)) + assert nx.number_of_edges(G) == nx.number_of_edges(K5) * nx.number_of_nodes( + K3 + ) + nx.number_of_edges(K3) * nx.number_of_nodes(K5) def test_cartesian_product_classic(): @@ -184,14 +191,15 @@ def test_cartesian_product_classic(): def test_cartesian_product_random(): - G = nx.erdos_renyi_graph(10, 2 / 10.) - H = nx.erdos_renyi_graph(10, 2 / 10.) + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) GH = nx.cartesian_product(G, H) for (u_G, u_H) in GH.nodes(): for (v_G, v_H) in GH.nodes(): - if (u_G == v_G and H.has_edge(u_H, v_H)) or \ - (u_H == v_H and G.has_edge(u_G, v_G)): + if (u_G == v_G and H.has_edge(u_H, v_H)) or ( + u_H == v_H and G.has_edge(u_G, v_G) + ): assert GH.has_edge((u_G, u_H), (v_G, v_H)) else: assert not GH.has_edge((u_G, u_H), (v_G, v_H)) @@ -261,8 +269,8 @@ def test_lexicographic_product_combinations(): def test_lexicographic_product_random(): - G = nx.erdos_renyi_graph(10, 2 / 10.) - H = nx.erdos_renyi_graph(10, 2 / 10.) + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) GH = nx.lexicographic_product(G, H) for (u_G, u_H) in GH.nodes(): @@ -337,15 +345,17 @@ def test_strong_product_combinations(): def test_strong_product_random(): - G = nx.erdos_renyi_graph(10, 2 / 10.) - H = nx.erdos_renyi_graph(10, 2 / 10.) + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) GH = nx.strong_product(G, H) for (u_G, u_H) in GH.nodes(): for (v_G, v_H) in GH.nodes(): - if (u_G == v_G and H.has_edge(u_H, v_H)) or \ - (u_H == v_H and G.has_edge(u_G, v_G)) or \ - (G.has_edge(u_G, v_G) and H.has_edge(u_H, v_H)): + if ( + (u_G == v_G and H.has_edge(u_H, v_H)) + or (u_H == v_H and G.has_edge(u_G, v_G)) + or (G.has_edge(u_G, v_G) and H.has_edge(u_H, v_H)) + ): assert GH.has_edge((u_G, u_H), (v_G, v_H)) else: assert not GH.has_edge((u_G, u_H), (v_G, v_H)) @@ -365,12 +375,36 @@ def test_graph_power(): G.add_edge(9, 2) H = nx.power(G, 2) - assert_edges_equal(list(H.edges()), - [(0, 1), (0, 2), (0, 5), (0, 6), (0, 7), (1, 9), - (1, 2), (1, 3), (1, 6), (2, 3), (2, 4), (2, 8), - (2, 9), (3, 4), (3, 5), (3, 9), (4, 5), (4, 6), - (5, 6), (5, 7), (6, 7), (6, 8), (7, 8), (7, 9), - (8, 9)]) + assert_edges_equal( + list(H.edges()), + [ + (0, 1), + (0, 2), + (0, 5), + (0, 6), + (0, 7), + (1, 9), + (1, 2), + (1, 3), + (1, 6), + (2, 3), + (2, 4), + (2, 8), + (2, 9), + (3, 4), + (3, 5), + (3, 9), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (6, 7), + (6, 8), + (7, 8), + (7, 9), + (8, 9), + ], + ) def test_graph_power_negative(): @@ -386,7 +420,7 @@ def test_rooted_product_raises(): def test_rooted_product(): G = nx.cycle_graph(5) H = nx.Graph() - H.add_edges_from([('a', 'b'), ('b', 'c'), ('b', 'd')]) - R = nx.rooted_product(G, H, 'a') + H.add_edges_from([("a", "b"), ("b", "c"), ("b", "d")]) + R = nx.rooted_product(G, H, "a") assert len(R) == len(G) * len(H) assert R.size() == G.size() + len(G) * H.size() diff --git a/networkx/algorithms/operators/tests/test_unary.py b/networkx/algorithms/operators/tests/test_unary.py index 75c74f71..a04a349b 100644 --- a/networkx/algorithms/operators/tests/test_unary.py +++ b/networkx/algorithms/operators/tests/test_unary.py @@ -31,14 +31,21 @@ def test_complement(): def test_complement_2(): G1 = nx.DiGraph() - G1.add_edge('A', 'B') - G1.add_edge('A', 'C') - G1.add_edge('A', 'D') + G1.add_edge("A", "B") + G1.add_edge("A", "C") + G1.add_edge("A", "D") G1C = nx.complement(G1) - assert (sorted(G1C.edges()) == - [('B', 'A'), ('B', 'C'), - ('B', 'D'), ('C', 'A'), ('C', 'B'), - ('C', 'D'), ('D', 'A'), ('D', 'B'), ('D', 'C')]) + assert sorted(G1C.edges()) == [ + ("B", "A"), + ("B", "C"), + ("B", "D"), + ("C", "A"), + ("C", "B"), + ("C", "D"), + ("D", "A"), + ("D", "B"), + ("D", "C"), + ] def test_reverse1(): diff --git a/networkx/algorithms/operators/unary.py b/networkx/algorithms/operators/unary.py index 4465bc2c..a24bbc60 100644 --- a/networkx/algorithms/operators/unary.py +++ b/networkx/algorithms/operators/unary.py @@ -1,7 +1,7 @@ """Unary operations on graphs""" import networkx as nx -__all__ = ['complement', 'reverse'] +__all__ = ["complement", "reverse"] def complement(G): @@ -25,10 +25,9 @@ def complement(G): """ R = G.__class__() R.add_nodes_from(G) - R.add_edges_from(((n, n2) - for n, nbrs in G.adjacency() - for n2 in G if n2 not in nbrs - if n != n2)) + R.add_edges_from( + ((n, n2) for n, nbrs in G.adjacency() for n2 in G if n2 not in nbrs if n != n2) + ) return R diff --git a/networkx/algorithms/planar_drawing.py b/networkx/algorithms/planar_drawing.py index bd3b8221..a129e107 100644 --- a/networkx/algorithms/planar_drawing.py +++ b/networkx/algorithms/planar_drawing.py @@ -92,9 +92,8 @@ def combinatorial_embedding_to_pos(embedding, fully_triangulate=False): delta_x_wp_wq = sum(delta_x[x] for x in contour_neighbors[1:]) # Adjust offsets - delta_x[vk] = (-y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq])//2 - y_coordinate[vk] = (y_coordinate[wp] + delta_x_wp_wq + - y_coordinate[wq]) // 2 + delta_x[vk] = (-y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 + y_coordinate[vk] = (y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 delta_x[wq] = delta_x_wp_wq - delta_x[vk] if adds_mult_tri: delta_x[wp1] -= delta_x[vk] @@ -116,11 +115,13 @@ def combinatorial_embedding_to_pos(embedding, fully_triangulate=False): parent_node = remaining_nodes.pop() # Calculate position for left child - set_position(parent_node, left_t_child, - remaining_nodes, delta_x, y_coordinate, pos) + set_position( + parent_node, left_t_child, remaining_nodes, delta_x, y_coordinate, pos + ) # Calculate position for right child - set_position(parent_node, right_t_child, - remaining_nodes, delta_x, y_coordinate, pos) + set_position( + parent_node, right_t_child, remaining_nodes, delta_x, y_coordinate, pos + ) return pos @@ -200,7 +201,7 @@ def get_canonical_ordering(embedding, outer_face): # Initialize outer_face_cw_nbr (do not include v2 -> v1) outer_face_cw_nbr = {} prev_nbr = v1 - for idx in range(len(outer_face)-1, 0, -1): + for idx in range(len(outer_face) - 1, 0, -1): outer_face_cw_nbr[prev_nbr] = outer_face[idx] prev_nbr = outer_face[idx] @@ -212,8 +213,7 @@ def get_canonical_ordering(embedding, outer_face): return outer_face_ccw_nbr[x] == y or outer_face_cw_nbr[x] == y def is_on_outer_face(x): - return x not in marked_nodes and (x in outer_face_ccw_nbr.keys() or - x == v1) + return x not in marked_nodes and (x in outer_face_ccw_nbr.keys() or x == v1) # Initialize number of chords for v in outer_face: @@ -229,7 +229,7 @@ def get_canonical_ordering(embedding, outer_face): ready_to_pick.discard(v1) ready_to_pick.discard(v2) - for k in range(len(embedding.nodes())-1, 1, -1): + for k in range(len(embedding.nodes()) - 1, 1, -1): # 1. Pick v from ready_to_pick v = ready_to_pick.pop() marked_nodes.add(v) @@ -266,7 +266,7 @@ def get_canonical_ordering(embedding, outer_face): nbr = wp while nbr != wq: # Get next next neighbor (clockwise on the outer face) - next_nbr = embedding[v][nbr]['ccw'] + next_nbr = embedding[v][nbr]["ccw"] wp_wq.append(next_nbr) # Update outer face outer_face_cw_nbr[nbr] = next_nbr @@ -365,13 +365,12 @@ def triangulate_embedding(embedding, fully_triangulate=True): embedding = nx.PlanarEmbedding(embedding) # Get a list with a node for each connected component - component_nodes = [next(iter(x)) for x in - nx.connected_components(embedding)] + component_nodes = [next(iter(x)) for x in nx.connected_components(embedding)] # 1. Make graph a single component (add edge between components) - for i in range(len(component_nodes)-1): + for i in range(len(component_nodes) - 1): v1 = component_nodes[i] - v2 = component_nodes[i+1] + v2 = component_nodes[i + 1] embedding.connect_components(v1, v2) # 2. Calculate faces, ensure 2-connectedness and determine outer face @@ -397,7 +396,7 @@ def triangulate_embedding(embedding, fully_triangulate=True): if fully_triangulate: v1 = outer_face[0] v2 = outer_face[1] - v3 = embedding[v2][v1]['ccw'] + v3 = embedding[v2][v1]["ccw"] outer_face = [v1, v2, v3] return embedding, outer_face diff --git a/networkx/algorithms/planarity.py b/networkx/algorithms/planarity.py index d81d13d1..ed87b55f 100644 --- a/networkx/algorithms/planarity.py +++ b/networkx/algorithms/planarity.py @@ -160,8 +160,10 @@ class Interval: def conflicting(self, b, planarity_state): """Returns True if interval I conflicts with edge b""" - return (not self.empty() and - planarity_state.lowpt[self.high] > planarity_state.lowpt[b]) + return ( + not self.empty() + and planarity_state.lowpt[self.high] > planarity_state.lowpt[b] + ) class ConflictPair: @@ -187,8 +189,9 @@ class ConflictPair: return planarity_state.lowpt[self.right.low] if self.right.empty(): return planarity_state.lowpt[self.left.low] - return min(planarity_state.lowpt[self.left.low], - planarity_state.lowpt[self.right.low]) + return min( + planarity_state.lowpt[self.left.low], planarity_state.lowpt[self.right.low] + ) def top_of_stack(l): @@ -200,10 +203,26 @@ def top_of_stack(l): class LRPlanarity: """A class to maintain the state during planarity check.""" + __slots__ = [ - 'G', 'roots', 'height', 'lowpt', 'lowpt2', 'nesting_depth', - 'parent_edge', 'DG', 'adjs', 'ordered_adjs', 'ref', 'side', 'S', - 'stack_bottom', 'lowpt_edge', 'left_ref', 'right_ref', 'embedding' + "G", + "roots", + "height", + "lowpt", + "lowpt2", + "nesting_depth", + "parent_edge", + "DG", + "adjs", + "ordered_adjs", + "ref", + "side", + "S", + "stack_bottom", + "lowpt_edge", + "left_ref", + "right_ref", + "embedding", ] def __init__(self, G): @@ -278,7 +297,8 @@ class LRPlanarity: for v in self.DG: # sort the adjacency lists by nesting depth # note: this sorting leads to non linear time self.ordered_adjs[v] = sorted( - self.DG[v], key=lambda x: self.nesting_depth[(v, x)]) + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) for v in self.roots: if not self.dfs_testing(v): return None @@ -297,7 +317,8 @@ class LRPlanarity: for v in self.DG: # sort the adjacency lists again self.ordered_adjs[v] = sorted( - self.DG[v], key=lambda x: self.nesting_depth[(v, x)]) + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) # initialize the embedding previous_node = None for w in self.ordered_adjs[v]: @@ -343,20 +364,21 @@ class LRPlanarity: for v in self.DG: # sort the adjacency lists by nesting depth # note: this sorting leads to non linear time self.ordered_adjs[v] = sorted( - self.DG[v], key=lambda x: self.nesting_depth[(v, x)]) + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) for v in self.roots: if not self.dfs_testing_recursive(v): return None for e in self.DG.edges: - self.nesting_depth[e] = (self.sign_recursive(e) * - self.nesting_depth[e]) + self.nesting_depth[e] = self.sign_recursive(e) * self.nesting_depth[e] self.embedding.add_nodes_from(self.DG.nodes) for v in self.DG: # sort the adjacency lists again self.ordered_adjs[v] = sorted( - self.DG[v], key=lambda x: self.nesting_depth[(v, x)]) + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) # initialize the embedding previous_node = None for w in self.ordered_adjs[v]: @@ -383,7 +405,7 @@ class LRPlanarity: v = dfs_stack.pop() e = self.parent_edge[v] - for w in self.adjs[v][ind[v]:]: + for w in self.adjs[v][ind[v] :]: vw = (v, w) if not skip_init[vw]: @@ -471,7 +493,7 @@ class LRPlanarity: # to indicate whether to skip the final block after the for loop skip_final = False - for w in self.ordered_adjs[v][ind[v]:]: + for w in self.ordered_adjs[v][ind[v] :]: ei = (v, w) if not skip_init[ei]: @@ -553,8 +575,9 @@ class LRPlanarity: if top_of_stack(self.S) == self.stack_bottom[ei]: break # merge conflicting return edges of e_1,...,e_i-1 into P.L - while (top_of_stack(self.S).left.conflicting(ei, self) or - top_of_stack(self.S).right.conflicting(ei, self)): + while top_of_stack(self.S).left.conflicting(ei, self) or top_of_stack( + self.S + ).right.conflicting(ei, self): Q = self.S.pop() if Q.right.conflicting(ei, self): Q.swap() @@ -609,8 +632,7 @@ class LRPlanarity: hl = top_of_stack(self.S).left.high hr = top_of_stack(self.S).right.high - if hl is not None and ( - hr is None or self.lowpt[hl] > self.lowpt[hr]): + if hl is not None and (hr is None or self.lowpt[hl] > self.lowpt[hr]): self.ref[e] = hl else: self.ref[e] = hr @@ -625,7 +647,7 @@ class LRPlanarity: while dfs_stack: v = dfs_stack.pop() - for w in self.ordered_adjs[v][ind[v]:]: + for w in self.ordered_adjs[v][ind[v] :]: ind[v] += 1 ei = (v, w) @@ -639,11 +661,9 @@ class LRPlanarity: break # handle next node in dfs_stack (i.e. w) else: # back edge if self.side[ei] == 1: - self.embedding.add_half_edge_cw(w, v, - self.right_ref[w]) + self.embedding.add_half_edge_cw(w, v, self.right_ref[w]) else: - self.embedding.add_half_edge_ccw(w, v, - self.left_ref[w]) + self.embedding.add_half_edge_ccw(w, v, self.left_ref[w]) self.left_ref[w] = v def dfs_embedding_recursive(self, v): @@ -828,12 +848,12 @@ class PlanarEmbedding(nx.DiGraph): if len(self[v]) == 0: # v has no neighbors return - start_node = self.nodes[v]['first_nbr'] + start_node = self.nodes[v]["first_nbr"] yield start_node - current_node = self[v][start_node]['cw'] + current_node = self[v][start_node]["cw"] while start_node != current_node: yield current_node - current_node = self[v][current_node]['cw'] + current_node = self[v][current_node]["cw"] def check_structure(self): """Runs without exceptions if this object is valid. @@ -924,17 +944,16 @@ class PlanarEmbedding(nx.DiGraph): if reference_neighbor is None: # The start node has no neighbors self.add_edge(start_node, end_node) # Add edge to graph - self[start_node][end_node]['cw'] = end_node - self[start_node][end_node]['ccw'] = end_node - self.nodes[start_node]['first_nbr'] = end_node + self[start_node][end_node]["cw"] = end_node + self[start_node][end_node]["ccw"] = end_node + self.nodes[start_node]["first_nbr"] = end_node else: - ccw_reference = self[start_node][reference_neighbor]['ccw'] + ccw_reference = self[start_node][reference_neighbor]["ccw"] self.add_half_edge_cw(start_node, end_node, ccw_reference) - if reference_neighbor == self.nodes[start_node].get('first_nbr', - None): + if reference_neighbor == self.nodes[start_node].get("first_nbr", None): # Update first neighbor - self.nodes[start_node]['first_nbr'] = end_node + self.nodes[start_node]["first_nbr"] = end_node def add_half_edge_cw(self, start_node, end_node, reference_neighbor): """Adds a half-edge from start_node to end_node. @@ -966,22 +985,23 @@ class PlanarEmbedding(nx.DiGraph): if reference_neighbor is None: # The start node has no neighbors - self[start_node][end_node]['cw'] = end_node - self[start_node][end_node]['ccw'] = end_node - self.nodes[start_node]['first_nbr'] = end_node + self[start_node][end_node]["cw"] = end_node + self[start_node][end_node]["ccw"] = end_node + self.nodes[start_node]["first_nbr"] = end_node return if reference_neighbor not in self[start_node]: raise nx.NetworkXException( - "Cannot add edge. Reference neighbor does not exist") + "Cannot add edge. Reference neighbor does not exist" + ) # Get half-edge at the other side - cw_reference = self[start_node][reference_neighbor]['cw'] + cw_reference = self[start_node][reference_neighbor]["cw"] # Alter half-edge data structures - self[start_node][reference_neighbor]['cw'] = end_node - self[start_node][end_node]['cw'] = cw_reference - self[start_node][cw_reference]['ccw'] = end_node - self[start_node][end_node]['ccw'] = reference_neighbor + self[start_node][reference_neighbor]["cw"] = end_node + self[start_node][end_node]["cw"] = cw_reference + self[start_node][cw_reference]["ccw"] = end_node + self[start_node][end_node]["ccw"] = reference_neighbor def connect_components(self, v, w): """Adds half-edges for (v, w) and (w, v) at some position. @@ -1021,8 +1041,8 @@ class PlanarEmbedding(nx.DiGraph): add_half_edge_cw connect_components """ - if start_node in self and 'first_nbr' in self.nodes[start_node]: - reference = self.nodes[start_node]['first_nbr'] + if start_node in self and "first_nbr" in self.nodes[start_node]: + reference = self.nodes[start_node]["first_nbr"] else: reference = None self.add_half_edge_ccw(start_node, end_node, reference) @@ -1039,7 +1059,7 @@ class PlanarEmbedding(nx.DiGraph): ------- half-edge : tuple """ - new_node = self[w][v]['ccw'] + new_node = self[w][v]["ccw"] return w, new_node def traverse_face(self, v, w, mark_half_edges=None): @@ -1074,14 +1094,13 @@ class PlanarEmbedding(nx.DiGraph): prev_node = v cur_node = w # Last half-edge is (incoming_node, v) - incoming_node = self[v][w]['cw'] + incoming_node = self[v][w]["cw"] while cur_node != v or prev_node != incoming_node: face_nodes.append(cur_node) prev_node, cur_node = self.next_face_half_edge(prev_node, cur_node) if (prev_node, cur_node) in mark_half_edges: - raise nx.NetworkXException( - "Bad planar embedding. Impossible face.") + raise nx.NetworkXException("Bad planar embedding. Impossible face.") mark_half_edges.add((prev_node, cur_node)) return face_nodes diff --git a/networkx/algorithms/reciprocity.py b/networkx/algorithms/reciprocity.py index c34a1131..89df1d37 100644 --- a/networkx/algorithms/reciprocity.py +++ b/networkx/algorithms/reciprocity.py @@ -2,10 +2,10 @@ from networkx import NetworkXError from ..utils import not_implemented_for -__all__ = ['reciprocity', 'overall_reciprocity'] +__all__ = ["reciprocity", "overall_reciprocity"] -@not_implemented_for('undirected', 'multigraph') +@not_implemented_for("undirected", "multigraph") def reciprocity(G, nodes=None): r"""Compute the reciprocity in a directed graph. @@ -45,7 +45,7 @@ def reciprocity(G, nodes=None): if nodes in G: reciprocity = next(_reciprocity_iter(G, nodes))[1] if reciprocity is None: - raise NetworkXError('Not defined for isolated nodes.') + raise NetworkXError("Not defined for isolated nodes.") else: return reciprocity @@ -73,7 +73,7 @@ def _reciprocity_iter(G, nodes): yield (node, reciprocity) -@not_implemented_for('undirected', 'multigraph') +@not_implemented_for("undirected", "multigraph") def overall_reciprocity(G): """Compute the reciprocity for the whole graph. diff --git a/networkx/algorithms/regular.py b/networkx/algorithms/regular.py index b5c78225..5b302e2a 100644 --- a/networkx/algorithms/regular.py +++ b/networkx/algorithms/regular.py @@ -2,7 +2,7 @@ import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['is_regular', 'is_k_regular', 'k_factor'] +__all__ = ["is_regular", "is_k_regular", "k_factor"] def is_regular(G): @@ -34,7 +34,7 @@ def is_regular(G): return in_regular and out_regular -@not_implemented_for('directed') +@not_implemented_for("directed") def is_k_regular(G, k): """Determines whether the graph ``G`` is a k-regular graph. @@ -53,9 +53,9 @@ def is_k_regular(G, k): return all(d == k for n, d in G.degree) -@not_implemented_for('directed') -@not_implemented_for('multigraph') -def k_factor(G, k, matching_weight='weight'): +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def k_factor(G, k, matching_weight="weight"): """Compute a k-factor of G A k-factor of a graph is a spanning k-regular subgraph. @@ -96,15 +96,15 @@ def k_factor(G, k, matching_weight='weight'): self.degree = degree self.outer_vertices = [(node, x) for x in range(degree)] - self.core_vertices = [(node, x + degree) - for x in range(degree - k)] + self.core_vertices = [(node, x + degree) for x in range(degree - k)] def replace_node(self): adj_view = self.g[self.original] neighbors = list(adj_view.keys()) edge_attrs = list(adj_view.values()) - for (outer, neighbor, edge_attrs) in \ - zip(self.outer_vertices, neighbors, edge_attrs): + for (outer, neighbor, edge_attrs) in zip( + self.outer_vertices, neighbors, edge_attrs + ): self.g.add_edge(outer, neighbor, **edge_attrs) for core in self.core_vertices: for outer in self.outer_vertices: @@ -135,11 +135,9 @@ def k_factor(G, k, matching_weight='weight'): def replace_node(self): adj_view = self.g[self.original] - for (outer, inner, (neighbor, edge_attrs)) in \ - zip( - self.outer_vertices, - self.inner_vertices, - list(adj_view.items())): + for (outer, inner, (neighbor, edge_attrs)) in zip( + self.outer_vertices, self.inner_vertices, list(adj_view.items()) + ): self.g.add_edge(outer, inner) self.g.add_edge(outer, neighbor, **edge_attrs) for core in self.core_vertices: @@ -161,14 +159,13 @@ def k_factor(G, k, matching_weight='weight'): # Step 1 if any(d < k for _, d in G.degree): - raise nx.NetworkXUnfeasible( - "Graph contains a vertex with degree less than k") + raise nx.NetworkXUnfeasible("Graph contains a vertex with degree less than k") g = G.copy() # Step 2 gadgets = [] for node, degree in list(g.degree): - if k < degree / 2.: + if k < degree / 2.0: gadget = SmallKGadget(k, degree, node, g) else: gadget = LargeKGadget(k, degree, node, g) @@ -176,13 +173,13 @@ def k_factor(G, k, matching_weight='weight'): gadgets.append(gadget) # Step 3 - matching = max_weight_matching( - g, maxcardinality=True, weight=matching_weight) + matching = max_weight_matching(g, maxcardinality=True, weight=matching_weight) # Step 4 if not is_perfect_matching(g, matching): raise nx.NetworkXUnfeasible( - "Cannot find k-factor because no perfect matching exists") + "Cannot find k-factor because no perfect matching exists" + ) for edge in g.edges(): if edge not in matching and (edge[1], edge[0]) not in matching: diff --git a/networkx/algorithms/richclub.py b/networkx/algorithms/richclub.py index a951125b..b9e48cbc 100644 --- a/networkx/algorithms/richclub.py +++ b/networkx/algorithms/richclub.py @@ -4,11 +4,11 @@ import networkx as nx from itertools import accumulate from networkx.utils import not_implemented_for -__all__ = ['rich_club_coefficient'] +__all__ = ["rich_club_coefficient"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def rich_club_coefficient(G, normalized=True, Q=100, seed=None): r"""Returns the rich-club coefficient of the graph `G`. @@ -69,8 +69,9 @@ def rich_club_coefficient(G, normalized=True, Q=100, seed=None): sequences", 2006. https://arxiv.org/abs/cond-mat/0312028 """ if nx.number_of_selfloops(G) > 0: - raise Exception('rich_club_coefficient is not implemented for ' - 'graphs with self loops.') + raise Exception( + "rich_club_coefficient is not implemented for " "graphs with self loops." + ) rc = _compute_rc(G) if normalized: # make R a copy of G, randomize with Q*|E| double edge swaps @@ -103,8 +104,7 @@ def _compute_rc(G): # The list is sorted in reverse order so that we can pop from the # right side of the list later, instead of popping from the left # side of the list, which would have a linear time cost. - edge_degrees = sorted((sorted(map(G.degree, e)) for e in G.edges()), - reverse=True) + edge_degrees = sorted((sorted(map(G.degree, e)) for e in G.edges()), reverse=True) ek = G.number_of_edges() k1, k2 = edge_degrees.pop() rc = {} diff --git a/networkx/algorithms/shortest_paths/astar.py b/networkx/algorithms/shortest_paths/astar.py index 13ad24c0..b345e5b6 100644 --- a/networkx/algorithms/shortest_paths/astar.py +++ b/networkx/algorithms/shortest_paths/astar.py @@ -6,10 +6,10 @@ from itertools import count import networkx as nx from networkx.algorithms.shortest_paths.weighted import _weight_function -__all__ = ['astar_path', 'astar_path_length'] +__all__ = ["astar_path", "astar_path_length"] -def astar_path(G, source, target, heuristic=None, weight='weight'): +def astar_path(G, source, target, heuristic=None, weight="weight"): """Returns a list of nodes in a shortest path between source and target using the A* ("A-star") algorithm. @@ -138,7 +138,7 @@ def astar_path(G, source, target, heuristic=None, weight='weight'): raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") -def astar_path_length(G, source, target, heuristic=None, weight='weight'): +def astar_path_length(G, source, target, heuristic=None, weight="weight"): """Returns the length of the shortest path between source and target using the A* ("A-star") algorithm. diff --git a/networkx/algorithms/shortest_paths/dense.py b/networkx/algorithms/shortest_paths/dense.py index ceda4fbe..79ccba4b 100644 --- a/networkx/algorithms/shortest_paths/dense.py +++ b/networkx/algorithms/shortest_paths/dense.py @@ -2,13 +2,15 @@ """ import networkx as nx -__all__ = ['floyd_warshall', - 'floyd_warshall_predecessor_and_distance', - 'reconstruct_path', - 'floyd_warshall_numpy'] +__all__ = [ + "floyd_warshall", + "floyd_warshall_predecessor_and_distance", + "reconstruct_path", + "floyd_warshall_numpy", +] -def floyd_warshall_numpy(G, nodelist=None, weight='weight'): +def floyd_warshall_numpy(G, nodelist=None, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. Parameters @@ -39,13 +41,13 @@ def floyd_warshall_numpy(G, nodelist=None, weight='weight'): try: import numpy as np except ImportError as e: - raise ImportError( - "to_numpy_matrix() requires numpy: http://numpy.org/ ") from e + raise ImportError("to_numpy_matrix() requires numpy: http://numpy.org/ ") from e # To handle cases when an edge has weight=0, we must make sure that # nonedges are not given the value 0 as well. - A = nx.to_numpy_matrix(G, nodelist=nodelist, multigraph_weight=min, - weight=weight, nonedge=np.inf) + A = nx.to_numpy_matrix( + G, nodelist=nodelist, multigraph_weight=min, weight=weight, nonedge=np.inf + ) n, m = A.shape A[np.identity(n) == 1] = 0 # diagonal elements should be zero for i in range(n): @@ -53,7 +55,7 @@ def floyd_warshall_numpy(G, nodelist=None, weight='weight'): return A -def floyd_warshall_predecessor_and_distance(G, weight='weight'): +def floyd_warshall_predecessor_and_distance(G, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. Parameters @@ -94,10 +96,11 @@ def floyd_warshall_predecessor_and_distance(G, weight='weight'): all_pairs_shortest_path_length """ from collections import defaultdict + # dictionary-of-dictionaries representation for dist and pred # use some defaultdict magick here # for dist the default is the floating point inf value - dist = defaultdict(lambda: defaultdict(lambda: float('inf'))) + dist = defaultdict(lambda: defaultdict(lambda: float("inf"))) for u in G: dist[u][u] = 0 pred = defaultdict(dict) @@ -166,7 +169,7 @@ def reconstruct_path(source, target, predecessors): return list(reversed(path)) -def floyd_warshall(G, weight='weight'): +def floyd_warshall(G, weight="weight"): """Find all-pairs shortest path lengths using Floyd's algorithm. Parameters diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py index e821779c..29c65d14 100644 --- a/networkx/algorithms/shortest_paths/generic.py +++ b/networkx/algorithms/shortest_paths/generic.py @@ -7,9 +7,13 @@ These algorithms work with undirected and directed graphs. import networkx as nx -__all__ = ['shortest_path', 'all_shortest_paths', - 'shortest_path_length', 'average_shortest_path_length', - 'has_path'] +__all__ = [ + "shortest_path", + "all_shortest_paths", + "shortest_path_length", + "average_shortest_path_length", + "has_path", +] def has_path(G, source, target): @@ -32,7 +36,7 @@ def has_path(G, source, target): return True -def shortest_path(G, source=None, target=None, weight=None, method='dijkstra'): +def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): """Compute shortest paths in the graph. Parameters @@ -115,16 +119,16 @@ def shortest_path(G, source=None, target=None, weight=None, method='dijkstra'): single_source_dijkstra_path() single_source_bellman_ford_path() """ - if method not in ('dijkstra', 'bellman-ford'): + if method not in ("dijkstra", "bellman-ford"): # so we don't need to check in each branch later - raise ValueError(f'method not supported: {method}') - method = 'unweighted' if weight is None else method + raise ValueError(f"method not supported: {method}") + method = "unweighted" if weight is None else method if source is None: if target is None: # Find paths between all pairs. - if method == 'unweighted': + if method == "unweighted": paths = dict(nx.all_pairs_shortest_path(G)) - elif method == 'dijkstra': + elif method == "dijkstra": paths = dict(nx.all_pairs_dijkstra_path(G, weight=weight)) else: # method == 'bellman-ford': paths = dict(nx.all_pairs_bellman_ford_path(G, weight=weight)) @@ -132,44 +136,36 @@ def shortest_path(G, source=None, target=None, weight=None, method='dijkstra'): # Find paths from all nodes co-accessible to the target. if G.is_directed(): G = G.reverse(copy=False) - if method == 'unweighted': + if method == "unweighted": paths = nx.single_source_shortest_path(G, target) - elif method == 'dijkstra': - paths = nx.single_source_dijkstra_path(G, target, - weight=weight) + elif method == "dijkstra": + paths = nx.single_source_dijkstra_path(G, target, weight=weight) else: # method == 'bellman-ford': - paths = nx.single_source_bellman_ford_path(G, target, - weight=weight) + paths = nx.single_source_bellman_ford_path(G, target, weight=weight) # Now flip the paths so they go from a source to the target. for target in paths: paths[target] = list(reversed(paths[target])) else: if target is None: # Find paths to all nodes accessible from the source. - if method == 'unweighted': + if method == "unweighted": paths = nx.single_source_shortest_path(G, source) - elif method == 'dijkstra': - paths = nx.single_source_dijkstra_path(G, source, - weight=weight) + elif method == "dijkstra": + paths = nx.single_source_dijkstra_path(G, source, weight=weight) else: # method == 'bellman-ford': - paths = nx.single_source_bellman_ford_path(G, source, - weight=weight) + paths = nx.single_source_bellman_ford_path(G, source, weight=weight) else: # Find shortest source-target path. - if method == 'unweighted': + if method == "unweighted": paths = nx.bidirectional_shortest_path(G, source, target) - elif method == 'dijkstra': + elif method == "dijkstra": paths = nx.dijkstra_path(G, source, target, weight) else: # method == 'bellman-ford': paths = nx.bellman_ford_path(G, source, target, weight) return paths -def shortest_path_length(G, - source=None, - target=None, - weight=None, - method='dijkstra'): +def shortest_path_length(G, source=None, target=None, weight=None, method="dijkstra"): """Compute shortest path lengths in the graph. Parameters @@ -258,16 +254,16 @@ def shortest_path_length(G, single_source_dijkstra_path_length() single_source_bellman_ford_path_length() """ - if method not in ('dijkstra', 'bellman-ford'): + if method not in ("dijkstra", "bellman-ford"): # so we don't need to check in each branch later - raise ValueError(f'method not supported: {method}') - method = 'unweighted' if weight is None else method + raise ValueError(f"method not supported: {method}") + method = "unweighted" if weight is None else method if source is None: if target is None: # Find paths between all pairs. - if method == 'unweighted': + if method == "unweighted": paths = nx.all_pairs_shortest_path_length(G) - elif method == 'dijkstra': + elif method == "dijkstra": paths = nx.all_pairs_dijkstra_path_length(G, weight=weight) else: # method == 'bellman-ford': paths = nx.all_pairs_bellman_ford_path_length(G, weight=weight) @@ -275,10 +271,10 @@ def shortest_path_length(G, # Find paths from all nodes co-accessible to the target. if G.is_directed(): G = G.reverse(copy=False) - if method == 'unweighted': + if method == "unweighted": path_length = nx.single_source_shortest_path_length paths = path_length(G, target) - elif method == 'dijkstra': + elif method == "dijkstra": path_length = nx.single_source_dijkstra_path_length paths = path_length(G, target, weight=weight) else: # method == 'bellman-ford': @@ -287,9 +283,9 @@ def shortest_path_length(G, else: if target is None: # Find paths to all nodes accessible from the source. - if method == 'unweighted': + if method == "unweighted": paths = nx.single_source_shortest_path_length(G, source) - elif method == 'dijkstra': + elif method == "dijkstra": path_length = nx.single_source_dijkstra_path_length paths = path_length(G, source, weight=weight) else: # method == 'bellman-ford': @@ -297,10 +293,10 @@ def shortest_path_length(G, paths = path_length(G, source, weight=weight) else: # Find shortest source-target path. - if method == 'unweighted': + if method == "unweighted": p = nx.bidirectional_shortest_path(G, source, target) paths = len(p) - 1 - elif method == 'dijkstra': + elif method == "dijkstra": paths = nx.dijkstra_path_length(G, source, target, weight) else: # method == 'bellman-ford': paths = nx.bellman_ford_path_length(G, source, target, weight) @@ -365,24 +361,23 @@ def average_shortest_path_length(G, weight=None, method=None): 1.0 """ - single_source_methods = ['unweighted', - 'dijkstra', - 'bellman-ford'] - all_pairs_methods = ['floyd-warshall', - 'floyd-warshall-numpy'] + single_source_methods = ["unweighted", "dijkstra", "bellman-ford"] + all_pairs_methods = ["floyd-warshall", "floyd-warshall-numpy"] supported_methods = single_source_methods + all_pairs_methods if method is None: - method = 'unweighted' if weight is None else 'dijkstra' + method = "unweighted" if weight is None else "dijkstra" if method not in supported_methods: - raise ValueError(f'method not supported: {method}') + raise ValueError(f"method not supported: {method}") n = len(G) # For the special case of the null graph, raise an exception, since # there are no paths in the null graph. if n == 0: - msg = ('the null graph has no paths, thus there is no average' - 'shortest path length') + msg = ( + "the null graph has no paths, thus there is no average" + "shortest path length" + ) raise nx.NetworkXPointlessConcept(msg) # For the special case of the trivial graph, return zero immediately. if n == 1: @@ -395,27 +390,26 @@ def average_shortest_path_length(G, weight=None, method=None): # Compute all-pairs shortest paths. def path_length(v): - if method == 'unweighted': + if method == "unweighted": return nx.single_source_shortest_path_length(G, v) - elif method == 'dijkstra': + elif method == "dijkstra": return nx.single_source_dijkstra_path_length(G, v, weight=weight) - elif method == 'bellman-ford': - return nx.single_source_bellman_ford_path_length(G, v, - weight=weight) + elif method == "bellman-ford": + return nx.single_source_bellman_ford_path_length(G, v, weight=weight) if method in single_source_methods: # Sum the distances for each (ordered) pair of source and target node. s = sum(l for u in G for l in path_length(u).values()) else: - if method == 'floyd-warshall': + if method == "floyd-warshall": all_pairs = nx.floyd_warshall(G, weight=weight) s = sum([sum(t.values()) for t in all_pairs.values()]) - elif method == 'floyd-warshall-numpy': + elif method == "floyd-warshall-numpy": s = nx.floyd_warshall_numpy(G, weight=weight).sum() return s / (n * (n - 1)) -def all_shortest_paths(G, source, target, weight=None, method='dijkstra'): +def all_shortest_paths(G, source, target, weight=None, method="dijkstra"): """Compute all shortest simple paths in the graph. Parameters @@ -474,17 +468,15 @@ def all_shortest_paths(G, source, target, weight=None, method='dijkstra'): single_source_shortest_path() all_pairs_shortest_path() """ - method = 'unweighted' if weight is None else method - if method == 'unweighted': + method = "unweighted" if weight is None else method + if method == "unweighted": pred = nx.predecessor(G, source) - elif method == 'dijkstra': - pred, dist = nx.dijkstra_predecessor_and_distance(G, source, - weight=weight) - elif method == 'bellman-ford': - pred, dist = nx.bellman_ford_predecessor_and_distance(G, source, - weight=weight) + elif method == "dijkstra": + pred, dist = nx.dijkstra_predecessor_and_distance(G, source, weight=weight) + elif method == "bellman-ford": + pred, dist = nx.bellman_ford_predecessor_and_distance(G, source, weight=weight) else: - raise ValueError(f'method not supported: {method}') + raise ValueError(f"method not supported: {method}") return _build_paths_from_predecessors({source}, target, pred) @@ -530,8 +522,9 @@ def _build_paths_from_predecessors(sources, target, pred): bellman_ford_path() """ if target not in pred: - raise nx.NetworkXNoPath(f'Target {target} cannot be reached' - f'from given sources') + raise nx.NetworkXNoPath( + f"Target {target} cannot be reached" f"from given sources" + ) seen = {target} stack = [[target, 0]] @@ -539,7 +532,7 @@ def _build_paths_from_predecessors(sources, target, pred): while top >= 0: node, i = stack[top] if node in sources: - yield [p for p, n in reversed(stack[:top + 1])] + yield [p for p, n in reversed(stack[: top + 1])] if len(pred[node]) > i: stack[top][1] = i + 1 next = pred[node][i] diff --git a/networkx/algorithms/shortest_paths/tests/test_astar.py b/networkx/algorithms/shortest_paths/tests/test_astar.py index ad8cde30..9c5d2d27 100644 --- a/networkx/algorithms/shortest_paths/tests/test_astar.py +++ b/networkx/algorithms/shortest_paths/tests/test_astar.py @@ -5,12 +5,20 @@ from networkx.utils import pairwise class TestAStar: - @classmethod def setup_class(cls): - edges = [('s', 'u', 10), ('s', 'x', 5), ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)] + edges = [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] cls.XG = nx.DiGraph() cls.XG.add_weighted_edges_from(edges) @@ -23,8 +31,7 @@ class TestAStar: graph = nx.Graph() points = ["a", "b", "c", "d"] - edges = [("a", "b", 0.18), ("a", "c", 0.68), - ("b", "c", 0.50), ("c", "d", 0.67)] + edges = [("a", "b", 0.18), ("a", "c", 0.68), ("b", "c", 0.50), ("c", "d", 0.67)] graph.add_nodes_from(points) graph.add_weighted_edges_from(edges) @@ -34,43 +41,57 @@ class TestAStar: assert nx.astar_path(graph, "a", "d", h) in (path1, path2) def test_astar_directed(self): - assert nx.astar_path(self.XG, 's', 'v') == ['s', 'x', 'u', 'v'] - assert nx.astar_path_length(self.XG, 's', 'v') == 9 + assert nx.astar_path(self.XG, "s", "v") == ["s", "x", "u", "v"] + assert nx.astar_path_length(self.XG, "s", "v") == 9 def test_astar_multigraph(self): G = nx.MultiDiGraph(self.XG) G.add_weighted_edges_from((u, v, 1000) for (u, v) in list(G.edges())) - assert nx.astar_path(G, 's', 'v') == ['s', 'x', 'u', 'v'] - assert nx.astar_path_length(G, 's', 'v') == 9 + assert nx.astar_path(G, "s", "v") == ["s", "x", "u", "v"] + assert nx.astar_path_length(G, "s", "v") == 9 def test_astar_undirected(self): GG = self.XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 - GG['u']['x']['weight'] = 2 - GG['y']['v']['weight'] = 2 - assert nx.astar_path(GG, 's', 'v') == ['s', 'x', 'u', 'v'] - assert nx.astar_path_length(GG, 's', 'v') == 8 + GG["u"]["x"]["weight"] = 2 + GG["y"]["v"]["weight"] = 2 + assert nx.astar_path(GG, "s", "v") == ["s", "x", "u", "v"] + assert nx.astar_path_length(GG, "s", "v") == 8 def test_astar_directed2(self): XG2 = nx.DiGraph() - edges = [(1, 4, 1), (4, 5, 1), (5, 6, 1), (6, 3, 1), (1, 3, 50), - (1, 2, 100), (2, 3, 100)] + edges = [ + (1, 4, 1), + (4, 5, 1), + (5, 6, 1), + (6, 3, 1), + (1, 3, 50), + (1, 2, 100), + (2, 3, 100), + ] XG2.add_weighted_edges_from(edges) assert nx.astar_path(XG2, 1, 3) == [1, 4, 5, 6, 3] def test_astar_undirected2(self): XG3 = nx.Graph() - edges = [(0, 1, 2), (1, 2, 12), (2, 3, 1), (3, 4, 5), (4, 5, 1), - (5, 0, 10)] + edges = [(0, 1, 2), (1, 2, 12), (2, 3, 1), (3, 4, 5), (4, 5, 1), (5, 0, 10)] XG3.add_weighted_edges_from(edges) assert nx.astar_path(XG3, 0, 3) == [0, 1, 2, 3] assert nx.astar_path_length(XG3, 0, 3) == 15 def test_astar_undirected3(self): XG4 = nx.Graph() - edges = [(0, 1, 2), (1, 2, 2), (2, 3, 1), (3, 4, 1), (4, 5, 1), - (5, 6, 1), (6, 7, 1), (7, 0, 1)] + edges = [ + (0, 1, 2), + (1, 2, 2), + (2, 3, 1), + (3, 4, 1), + (4, 5, 1), + (5, 6, 1), + (6, 7, 1), + (7, 0, 1), + ] XG4.add_weighted_edges_from(edges) assert nx.astar_path(XG4, 0, 2) == [0, 1, 2] assert nx.astar_path_length(XG4, 0, 2) == 4 @@ -78,14 +99,14 @@ class TestAStar: """ Tests that A* finds correct path when multiple paths exist and the best one is not expanded first (GH issue #3464) """ + def test_astar_directed3(self): heuristic_values = {"n5": 36, "n2": 4, "n1": 0, "n0": 0} def h(u, v): return heuristic_values[u] - edges = [("n5", "n1", 11), ("n5", "n2", 9), - ("n2", "n1", 1), ("n1", "n0", 32)] + edges = [("n5", "n1", 11), ("n5", "n2", 9), ("n2", "n1", 1), ("n1", "n0", 32)] graph = nx.DiGraph() graph.add_weighted_edges_from(edges) answer = ["n5", "n2", "n1", "n0"] @@ -94,29 +115,47 @@ class TestAStar: """ Tests that that parent is not wrongly overridden when a node is re-explored multiple times. """ + def test_astar_directed4(self): - edges = [("a", "b", 1), ("a", "c", 1), ("b", "d", 2), - ("c", "d", 1), ("d", "e", 1)] + edges = [ + ("a", "b", 1), + ("a", "c", 1), + ("b", "d", 2), + ("c", "d", 1), + ("d", "e", 1), + ] graph = nx.DiGraph() graph.add_weighted_edges_from(edges) assert nx.astar_path(graph, "a", "e") == ["a", "c", "d", "e"] -# >>> MXG4=NX.MultiGraph(XG4) -# >>> MXG4.add_edge(0,1,3) -# >>> NX.dijkstra_path(MXG4,0,2) -# [0, 1, 2] + # >>> MXG4=NX.MultiGraph(XG4) + # >>> MXG4.add_edge(0,1,3) + # >>> NX.dijkstra_path(MXG4,0,2) + # [0, 1, 2] def test_astar_w1(self): G = nx.DiGraph() - G.add_edges_from([('s', 'u'), ('s', 'x'), ('u', 'v'), ('u', 'x'), - ('v', 'y'), ('x', 'u'), ('x', 'w'), ('w', 'v'), - ('x', 'y'), ('y', 's'), ('y', 'v')]) - assert nx.astar_path(G, 's', 'v') == ['s', 'u', 'v'] - assert nx.astar_path_length(G, 's', 'v') == 2 + G.add_edges_from( + [ + ("s", "u"), + ("s", "x"), + ("u", "v"), + ("u", "x"), + ("v", "y"), + ("x", "u"), + ("x", "w"), + ("w", "v"), + ("x", "y"), + ("y", "s"), + ("y", "v"), + ] + ) + assert nx.astar_path(G, "s", "v") == ["s", "u", "v"] + assert nx.astar_path_length(G, "s", "v") == 2 def test_astar_nopath(self): with pytest.raises(nx.NodeNotFound): - nx.astar_path(self.XG, 's', 'moon') + nx.astar_path(self.XG, "s", "moon") def test_cycle(self): C = nx.cycle_graph(7) diff --git a/networkx/algorithms/shortest_paths/tests/test_dense.py b/networkx/algorithms/shortest_paths/tests/test_dense.py index 16b64945..1a0c5bcd 100644 --- a/networkx/algorithms/shortest_paths/tests/test_dense.py +++ b/networkx/algorithms/shortest_paths/tests/test_dense.py @@ -9,124 +9,180 @@ class TestFloyd: def test_floyd_warshall_predecessor_and_distance(self): XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(XG) - assert dist['s']['v'] == 9 - assert path['s']['v'] == 'u' - assert (dist == - {'y': {'y': 0, 'x': 12, 's': 7, 'u': 15, 'v': 6}, - 'x': {'y': 2, 'x': 0, 's': 9, 'u': 3, 'v': 4}, - 's': {'y': 7, 'x': 5, 's': 0, 'u': 8, 'v': 9}, - 'u': {'y': 2, 'x': 2, 's': 9, 'u': 0, 'v': 1}, - 'v': {'y': 1, 'x': 13, 's': 8, 'u': 16, 'v': 0}}) + assert dist["s"]["v"] == 9 + assert path["s"]["v"] == "u" + assert dist == { + "y": {"y": 0, "x": 12, "s": 7, "u": 15, "v": 6}, + "x": {"y": 2, "x": 0, "s": 9, "u": 3, "v": 4}, + "s": {"y": 7, "x": 5, "s": 0, "u": 8, "v": 9}, + "u": {"y": 2, "x": 2, "s": 9, "u": 0, "v": 1}, + "v": {"y": 1, "x": 13, "s": 8, "u": 16, "v": 0}, + } GG = XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 - GG['u']['x']['weight'] = 2 + GG["u"]["x"]["weight"] = 2 path, dist = nx.floyd_warshall_predecessor_and_distance(GG) - assert dist['s']['v'] == 8 + assert dist["s"]["v"] == 8 # skip this test, could be alternate path s-u-v -# assert_equal(path['s']['v'],'y') + # assert_equal(path['s']['v'],'y') G = nx.DiGraph() # no weights - G.add_edges_from([('s', 'u'), ('s', 'x'), - ('u', 'v'), ('u', 'x'), - ('v', 'y'), ('x', 'u'), - ('x', 'v'), ('x', 'y'), - ('y', 's'), ('y', 'v')]) + G.add_edges_from( + [ + ("s", "u"), + ("s", "x"), + ("u", "v"), + ("u", "x"), + ("v", "y"), + ("x", "u"), + ("x", "v"), + ("x", "y"), + ("y", "s"), + ("y", "v"), + ] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(G) - assert dist['s']['v'] == 2 + assert dist["s"]["v"] == 2 # skip this test, could be alternate path s-u-v # assert_equal(path['s']['v'],'x') # alternate interface dist = nx.floyd_warshall(G) - assert dist['s']['v'] == 2 + assert dist["s"]["v"] == 2 # floyd_warshall_predecessor_and_distance returns # dicts-of-defautdicts # make sure we don't get empty dictionary XG = nx.DiGraph() - XG.add_weighted_edges_from([('v', 'x', 5.0), ('y', 'x', 5.0), - ('v', 'y', 6.0), ('x', 'u', 2.0)]) + XG.add_weighted_edges_from( + [("v", "x", 5.0), ("y", "x", 5.0), ("v", "y", 6.0), ("x", "u", 2.0)] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(XG) inf = float("inf") - assert (dist == - {'v': {'v': 0, 'x': 5.0, 'y': 6.0, 'u': 7.0}, - 'x': {'x': 0, 'u': 2.0, 'v': inf, 'y': inf}, - 'y': {'y': 0, 'x': 5.0, 'v': inf, 'u': 7.0}, - 'u': {'u': 0, 'v': inf, 'x': inf, 'y': inf}}) - assert (path == - {'v': {'x': 'v', 'y': 'v', 'u': 'x'}, - 'x': {'u': 'x'}, - 'y': {'x': 'y', 'u': 'x'}}) + assert dist == { + "v": {"v": 0, "x": 5.0, "y": 6.0, "u": 7.0}, + "x": {"x": 0, "u": 2.0, "v": inf, "y": inf}, + "y": {"y": 0, "x": 5.0, "v": inf, "u": 7.0}, + "u": {"u": 0, "v": inf, "x": inf, "y": inf}, + } + assert path == { + "v": {"x": "v", "y": "v", "u": "x"}, + "x": {"u": "x"}, + "y": {"x": "y", "u": "x"}, + } def test_reconstruct_path(self): with pytest.raises(KeyError): XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) predecessors, _ = nx.floyd_warshall_predecessor_and_distance(XG) - path = nx.reconstruct_path('s', 'v', predecessors) - assert path == ['s', 'x', 'u', 'v'] + path = nx.reconstruct_path("s", "v", predecessors) + assert path == ["s", "x", "u", "v"] - path = nx.reconstruct_path('s', 's', predecessors) + path = nx.reconstruct_path("s", "s", predecessors) assert path == [] # this part raises the keyError - nx.reconstruct_path('1', '2', predecessors) + nx.reconstruct_path("1", "2", predecessors) def test_cycle(self): - path, dist = nx.floyd_warshall_predecessor_and_distance( - nx.cycle_graph(7)) + path, dist = nx.floyd_warshall_predecessor_and_distance(nx.cycle_graph(7)) assert dist[0][3] == 3 assert path[0][3] == 2 assert dist[0][4] == 3 def test_weighted(self): XG3 = nx.Graph() - XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], [2, 3, 1], - [3, 4, 5], [4, 5, 1], [5, 0, 10]]) + XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(XG3) assert dist[0][3] == 15 assert path[0][3] == 2 def test_weighted2(self): XG4 = nx.Graph() - XG4.add_weighted_edges_from([[0, 1, 2], [1, 2, 2], [2, 3, 1], - [3, 4, 1], [4, 5, 1], [5, 6, 1], - [6, 7, 1], [7, 0, 1]]) + XG4.add_weighted_edges_from( + [ + [0, 1, 2], + [1, 2, 2], + [2, 3, 1], + [3, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 7, 1], + [7, 0, 1], + ] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(XG4) assert dist[0][2] == 4 assert path[0][2] == 1 def test_weight_parameter(self): XG4 = nx.Graph() - XG4.add_edges_from([(0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}), - (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}), - (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}), - (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1})]) - path, dist = nx.floyd_warshall_predecessor_and_distance(XG4, - weight='heavy') + XG4.add_edges_from( + [ + (0, 1, {"heavy": 2}), + (1, 2, {"heavy": 2}), + (2, 3, {"heavy": 1}), + (3, 4, {"heavy": 1}), + (4, 5, {"heavy": 1}), + (5, 6, {"heavy": 1}), + (6, 7, {"heavy": 1}), + (7, 0, {"heavy": 1}), + ] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(XG4, weight="heavy") assert dist[0][2] == 4 assert path[0][2] == 1 def test_zero_distance(self): XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) path, dist = nx.floyd_warshall_predecessor_and_distance(XG) for u in XG: @@ -135,7 +191,7 @@ class TestFloyd: GG = XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 - GG['u']['x']['weight'] = 2 + GG["u"]["x"]["weight"] = 2 path, dist = nx.floyd_warshall_predecessor_and_distance(GG) for u in GG: @@ -143,8 +199,7 @@ class TestFloyd: def test_zero_weight(self): G = nx.DiGraph() - edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), - (5, 4, 0), (4, 3, -5), (2, 5, -7)] + edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)] G.add_weighted_edges_from(edges) dist = nx.floyd_warshall(G) assert dist[1][3] == -14 diff --git a/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py b/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py index 5f65fa7e..bee06968 100644 --- a/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py +++ b/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py @@ -1,13 +1,13 @@ import pytest -numpy = pytest.importorskip('numpy') -npt = pytest.importorskip('numpy.testing') + +numpy = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") import networkx as nx class TestFloydNumpy: - def test_cycle_numpy(self): dist = nx.floyd_warshall_numpy(nx.cycle_graph(7)) assert dist[0, 3] == 3 @@ -15,26 +15,44 @@ class TestFloydNumpy: def test_weighted_numpy_three_edges(self): XG3 = nx.Graph() - XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], [2, 3, 1], - [3, 4, 5], [4, 5, 1], [5, 0, 10]]) + XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) dist = nx.floyd_warshall_numpy(XG3) assert dist[0, 3] == 15 def test_weighted_numpy_two_edges(self): XG4 = nx.Graph() - XG4.add_weighted_edges_from([[0, 1, 2], [1, 2, 2], [2, 3, 1], - [3, 4, 1], [4, 5, 1], [5, 6, 1], - [6, 7, 1], [7, 0, 1]]) + XG4.add_weighted_edges_from( + [ + [0, 1, 2], + [1, 2, 2], + [2, 3, 1], + [3, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 7, 1], + [7, 0, 1], + ] + ) dist = nx.floyd_warshall_numpy(XG4) assert dist[0, 2] == 4 def test_weight_parameter_numpy(self): XG4 = nx.Graph() - XG4.add_edges_from([(0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}), - (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}), - (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}), - (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1})]) - dist = nx.floyd_warshall_numpy(XG4, weight='heavy') + XG4.add_edges_from( + [ + (0, 1, {"heavy": 2}), + (1, 2, {"heavy": 2}), + (2, 3, {"heavy": 1}), + (3, 4, {"heavy": 1}), + (4, 5, {"heavy": 1}), + (5, 6, {"heavy": 1}), + (6, 7, {"heavy": 1}), + (7, 0, {"heavy": 1}), + ] + ) + dist = nx.floyd_warshall_numpy(XG4, weight="heavy") assert dist[0, 2] == 4 def test_directed_cycle_numpy(self): diff --git a/networkx/algorithms/shortest_paths/tests/test_generic.py b/networkx/algorithms/shortest_paths/tests/test_generic.py index 408c77e6..251db317 100644 --- a/networkx/algorithms/shortest_paths/tests/test_generic.py +++ b/networkx/algorithms/shortest_paths/tests/test_generic.py @@ -21,12 +21,11 @@ def validate_grid_path(r, c, s, t, p): class TestGenericPath: - @classmethod def setup_class(cls): from networkx import convert_node_labels_to_integers as cnlti - cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, - ordering="sorted") + + cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") cls.cycle = nx.cycle_graph(7) cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) cls.neg_weights = nx.DiGraph() @@ -41,29 +40,31 @@ class TestGenericPath: validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12)) assert nx.shortest_path(self.directed_cycle, 0, 3) == [0, 1, 2, 3] # now with weights - assert (nx.shortest_path(self.cycle, 0, 3, weight='weight') == - [0, 1, 2, 3]) - assert (nx.shortest_path(self.cycle, 0, 4, weight='weight') == - [0, 6, 5, 4]) - validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12, - weight='weight')) - assert (nx.shortest_path(self.directed_cycle, 0, 3, - weight='weight') == - [0, 1, 2, 3]) + assert nx.shortest_path(self.cycle, 0, 3, weight="weight") == [0, 1, 2, 3] + assert nx.shortest_path(self.cycle, 0, 4, weight="weight") == [0, 6, 5, 4] + validate_grid_path( + 4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12, weight="weight") + ) + assert nx.shortest_path(self.directed_cycle, 0, 3, weight="weight") == [ + 0, + 1, + 2, + 3, + ] # weights and method specified - assert (nx.shortest_path(self.directed_cycle, 0, 3, - weight='weight', method='dijkstra') == - [0, 1, 2, 3]) - assert (nx.shortest_path(self.directed_cycle, 0, 3, - weight='weight', method='bellman-ford') == - [0, 1, 2, 3]) + assert nx.shortest_path( + self.directed_cycle, 0, 3, weight="weight", method="dijkstra" + ) == [0, 1, 2, 3] + assert nx.shortest_path( + self.directed_cycle, 0, 3, weight="weight", method="bellman-ford" + ) == [0, 1, 2, 3] # when Dijkstra's will probably (depending on precise implementation) # incorrectly return [0, 1, 3] instead - assert (nx.shortest_path(self.neg_weights, 0, 3, weight='weight', - method='bellman-ford') == - [0, 2, 3]) + assert nx.shortest_path( + self.neg_weights, 0, 3, weight="weight", method="bellman-ford" + ) == [0, 2, 3] # confirm bad method rejection - pytest.raises(ValueError, nx.shortest_path, self.cycle, method='SPAM') + pytest.raises(ValueError, nx.shortest_path, self.cycle, method="SPAM") # confirm absent source rejection pytest.raises(nx.NodeNotFound, nx.shortest_path, self.cycle, 8) @@ -72,14 +73,16 @@ class TestGenericPath: sp = nx.shortest_path(nx.path_graph(3), target=1) assert sp == answer # with weights - sp = nx.shortest_path(nx.path_graph(3), target=1, weight='weight') + sp = nx.shortest_path(nx.path_graph(3), target=1, weight="weight") assert sp == answer # weights and method specified - sp = nx.shortest_path(nx.path_graph(3), target=1, weight='weight', - method='dijkstra') + sp = nx.shortest_path( + nx.path_graph(3), target=1, weight="weight", method="dijkstra" + ) assert sp == answer - sp = nx.shortest_path(nx.path_graph(3), target=1, weight='weight', - method='bellman-ford') + sp = nx.shortest_path( + nx.path_graph(3), target=1, weight="weight", method="bellman-ford" + ) assert sp == answer def test_shortest_path_length(self): @@ -87,27 +90,24 @@ class TestGenericPath: assert nx.shortest_path_length(self.grid, 1, 12) == 5 assert nx.shortest_path_length(self.directed_cycle, 0, 4) == 4 # now with weights - assert (nx.shortest_path_length(self.cycle, 0, 3, - weight='weight') == - 3) - assert (nx.shortest_path_length(self.grid, 1, 12, - weight='weight') == - 5) - assert (nx.shortest_path_length(self.directed_cycle, 0, 4, - weight='weight') == - 4) + assert nx.shortest_path_length(self.cycle, 0, 3, weight="weight") == 3 + assert nx.shortest_path_length(self.grid, 1, 12, weight="weight") == 5 + assert nx.shortest_path_length(self.directed_cycle, 0, 4, weight="weight") == 4 # weights and method specified - assert (nx.shortest_path_length(self.cycle, 0, 3, weight='weight', - method='dijkstra') == - 3) - assert (nx.shortest_path_length(self.cycle, 0, 3, weight='weight', - method='bellman-ford') == - 3) + assert ( + nx.shortest_path_length( + self.cycle, 0, 3, weight="weight", method="dijkstra" + ) + == 3 + ) + assert ( + nx.shortest_path_length( + self.cycle, 0, 3, weight="weight", method="bellman-ford" + ) + == 3 + ) # confirm bad method rejection - pytest.raises(ValueError, - nx.shortest_path_length, - self.cycle, - method='SPAM') + pytest.raises(ValueError, nx.shortest_path_length, self.cycle, method="SPAM") # confirm absent source rejection pytest.raises(nx.NodeNotFound, nx.shortest_path_length, self.cycle, 8) @@ -116,15 +116,16 @@ class TestGenericPath: sp = dict(nx.shortest_path_length(nx.path_graph(3), target=1)) assert sp == answer # with weights - sp = nx.shortest_path_length(nx.path_graph(3), target=1, - weight='weight') + sp = nx.shortest_path_length(nx.path_graph(3), target=1, weight="weight") assert sp == answer # weights and method specified - sp = nx.shortest_path_length(nx.path_graph(3), target=1, - weight='weight', method='dijkstra') + sp = nx.shortest_path_length( + nx.path_graph(3), target=1, weight="weight", method="dijkstra" + ) assert sp == answer - sp = nx.shortest_path_length(nx.path_graph(3), target=1, - weight='weight', method='bellman-ford') + sp = nx.shortest_path_length( + nx.path_graph(3), target=1, weight="weight", method="bellman-ford" + ) assert sp == answer def test_single_source_shortest_path(self): @@ -134,46 +135,44 @@ class TestGenericPath: p = nx.shortest_path(self.grid, 1) validate_grid_path(4, 4, 1, 12, p[12]) # now with weights - p = nx.shortest_path(self.cycle, 0, weight='weight') + p = nx.shortest_path(self.cycle, 0, weight="weight") assert p[3] == [0, 1, 2, 3] assert p == nx.single_source_dijkstra_path(self.cycle, 0) - p = nx.shortest_path(self.grid, 1, weight='weight') + p = nx.shortest_path(self.grid, 1, weight="weight") validate_grid_path(4, 4, 1, 12, p[12]) # weights and method specified - p = nx.shortest_path(self.cycle, 0, method='dijkstra', weight='weight') + p = nx.shortest_path(self.cycle, 0, method="dijkstra", weight="weight") assert p[3] == [0, 1, 2, 3] assert p == nx.single_source_shortest_path(self.cycle, 0) - p = nx.shortest_path(self.cycle, 0, method='bellman-ford', - weight='weight') + p = nx.shortest_path(self.cycle, 0, method="bellman-ford", weight="weight") assert p[3] == [0, 1, 2, 3] assert p == nx.single_source_shortest_path(self.cycle, 0) def test_single_source_shortest_path_length(self): ans = dict(nx.shortest_path_length(self.cycle, 0)) assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - assert (ans == - dict(nx.single_source_shortest_path_length(self.cycle, - 0))) + assert ans == dict(nx.single_source_shortest_path_length(self.cycle, 0)) ans = dict(nx.shortest_path_length(self.grid, 1)) assert ans[16] == 6 # now with weights - ans = dict(nx.shortest_path_length(self.cycle, 0, weight='weight')) + ans = dict(nx.shortest_path_length(self.cycle, 0, weight="weight")) assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - assert ans == dict(nx.single_source_dijkstra_path_length( - self.cycle, 0)) - ans = dict(nx.shortest_path_length(self.grid, 1, weight='weight')) + assert ans == dict(nx.single_source_dijkstra_path_length(self.cycle, 0)) + ans = dict(nx.shortest_path_length(self.grid, 1, weight="weight")) assert ans[16] == 6 # weights and method specified - ans = dict(nx.shortest_path_length(self.cycle, 0, weight='weight', - method='dijkstra')) + ans = dict( + nx.shortest_path_length(self.cycle, 0, weight="weight", method="dijkstra") + ) assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - assert ans == dict(nx.single_source_dijkstra_path_length( - self.cycle, 0)) - ans = dict(nx.shortest_path_length(self.cycle, 0, weight='weight', - method='bellman-ford')) + assert ans == dict(nx.single_source_dijkstra_path_length(self.cycle, 0)) + ans = dict( + nx.shortest_path_length( + self.cycle, 0, weight="weight", method="bellman-ford" + ) + ) assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - assert ans == dict(nx.single_source_bellman_ford_path_length( - self.cycle, 0)) + assert ans == dict(nx.single_source_bellman_ford_path_length(self.cycle, 0)) def test_all_pairs_shortest_path(self): p = nx.shortest_path(self.cycle) @@ -182,17 +181,16 @@ class TestGenericPath: p = nx.shortest_path(self.grid) validate_grid_path(4, 4, 1, 12, p[1][12]) # now with weights - p = nx.shortest_path(self.cycle, weight='weight') + p = nx.shortest_path(self.cycle, weight="weight") assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) - p = nx.shortest_path(self.grid, weight='weight') + p = nx.shortest_path(self.grid, weight="weight") validate_grid_path(4, 4, 1, 12, p[1][12]) # weights and method specified - p = nx.shortest_path(self.cycle, weight='weight', method='dijkstra') + p = nx.shortest_path(self.cycle, weight="weight", method="dijkstra") assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) - p = nx.shortest_path(self.cycle, weight='weight', - method='bellman-ford') + p = nx.shortest_path(self.cycle, weight="weight", method="bellman-ford") assert p[0][3] == [0, 1, 2, 3] assert p == dict(nx.all_pairs_bellman_ford_path(self.cycle)) @@ -203,21 +201,22 @@ class TestGenericPath: ans = dict(nx.shortest_path_length(self.grid)) assert ans[1][16] == 6 # now with weights - ans = dict(nx.shortest_path_length(self.cycle, weight='weight')) + ans = dict(nx.shortest_path_length(self.cycle, weight="weight")) assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} assert ans == dict(nx.all_pairs_dijkstra_path_length(self.cycle)) - ans = dict(nx.shortest_path_length(self.grid, weight='weight')) + ans = dict(nx.shortest_path_length(self.grid, weight="weight")) assert ans[1][16] == 6 # weights and method specified - ans = dict(nx.shortest_path_length(self.cycle, weight='weight', - method='dijkstra')) + ans = dict( + nx.shortest_path_length(self.cycle, weight="weight", method="dijkstra") + ) assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} assert ans == dict(nx.all_pairs_dijkstra_path_length(self.cycle)) - ans = dict(nx.shortest_path_length(self.cycle, weight='weight', - method='bellman-ford')) + ans = dict( + nx.shortest_path_length(self.cycle, weight="weight", method="bellman-ford") + ) assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - assert (ans == - dict(nx.all_pairs_bellman_ford_path_length(self.cycle))) + assert ans == dict(nx.all_pairs_bellman_ford_path_length(self.cycle)) def test_has_path(self): G = nx.Graph() @@ -230,27 +229,27 @@ class TestGenericPath: G = nx.Graph() nx.add_path(G, [0, 1, 2, 3]) nx.add_path(G, [0, 10, 20, 3]) - assert ([[0, 1, 2, 3], [0, 10, 20, 3]] == - sorted(nx.all_shortest_paths(G, 0, 3))) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted(nx.all_shortest_paths(G, 0, 3)) # with weights G = nx.Graph() nx.add_path(G, [0, 1, 2, 3]) nx.add_path(G, [0, 10, 20, 3]) - assert ([[0, 1, 2, 3], [0, 10, 20, 3]] == - sorted(nx.all_shortest_paths(G, 0, 3, weight='weight'))) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted( + nx.all_shortest_paths(G, 0, 3, weight="weight") + ) # weights and method specified G = nx.Graph() nx.add_path(G, [0, 1, 2, 3]) nx.add_path(G, [0, 10, 20, 3]) - assert ([[0, 1, 2, 3], [0, 10, 20, 3]] == - sorted(nx.all_shortest_paths(G, 0, 3, weight='weight', - method='dijkstra'))) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted( + nx.all_shortest_paths(G, 0, 3, weight="weight", method="dijkstra") + ) G = nx.Graph() nx.add_path(G, [0, 1, 2, 3]) nx.add_path(G, [0, 10, 20, 3]) - assert ([[0, 1, 2, 3], [0, 10, 20, 3]] == - sorted(nx.all_shortest_paths(G, 0, 3, weight='weight', - method='bellman-ford'))) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted( + nx.all_shortest_paths(G, 0, 3, weight="weight", method="bellman-ford") + ) def test_all_shortest_paths_raise(self): with pytest.raises(nx.NetworkXNoPath): @@ -261,28 +260,31 @@ class TestGenericPath: def test_bad_method(self): with pytest.raises(ValueError): G = nx.path_graph(2) - list(nx.all_shortest_paths(G, 0, 1, weight='weight', method='SPAM')) + list(nx.all_shortest_paths(G, 0, 1, weight="weight", method="SPAM")) def test_all_shortest_paths_zero_weight_edge(self): g = nx.Graph() nx.add_path(g, [0, 1, 3]) nx.add_path(g, [0, 1, 2, 3]) - g.edges[1, 2]['weight'] = 0 - paths30d = list(nx.all_shortest_paths(g, 3, 0, weight='weight', - method='dijkstra')) - paths03d = list(nx.all_shortest_paths(g, 0, 3, weight='weight', - method='dijkstra')) - paths30b = list(nx.all_shortest_paths(g, 3, 0, weight='weight', - method='bellman-ford')) - paths03b = list(nx.all_shortest_paths(g, 0, 3, weight='weight', - method='bellman-ford')) + g.edges[1, 2]["weight"] = 0 + paths30d = list( + nx.all_shortest_paths(g, 3, 0, weight="weight", method="dijkstra") + ) + paths03d = list( + nx.all_shortest_paths(g, 0, 3, weight="weight", method="dijkstra") + ) + paths30b = list( + nx.all_shortest_paths(g, 3, 0, weight="weight", method="bellman-ford") + ) + paths03b = list( + nx.all_shortest_paths(g, 0, 3, weight="weight", method="bellman-ford") + ) assert sorted(paths03d) == sorted(p[::-1] for p in paths30d) assert sorted(paths03d) == sorted(p[::-1] for p in paths30b) assert sorted(paths03b) == sorted(p[::-1] for p in paths30b) class TestAverageShortestPathLength: - def test_cycle_graph(self): ans = nx.average_shortest_path_length(nx.cycle_graph(7)) assert almost_equal(ans, 2) @@ -294,42 +296,34 @@ class TestAverageShortestPathLength: def test_weighted(self): G = nx.Graph() nx.add_cycle(G, range(7), weight=2) - ans = nx.average_shortest_path_length(G, weight='weight') + ans = nx.average_shortest_path_length(G, weight="weight") assert almost_equal(ans, 4) G = nx.Graph() nx.add_path(G, range(5), weight=2) - ans = nx.average_shortest_path_length(G, weight='weight') + ans = nx.average_shortest_path_length(G, weight="weight") assert almost_equal(ans, 4) def test_specified_methods(self): G = nx.Graph() nx.add_cycle(G, range(7), weight=2) - ans = nx.average_shortest_path_length(G, - weight='weight', - method='dijkstra') + ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra") assert almost_equal(ans, 4) - ans = nx.average_shortest_path_length(G, - weight='weight', - method='bellman-ford') + ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford") assert almost_equal(ans, 4) - ans = nx.average_shortest_path_length(G, - weight='weight', - method='floyd-warshall') + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall" + ) assert almost_equal(ans, 4) G = nx.Graph() nx.add_path(G, range(5), weight=2) - ans = nx.average_shortest_path_length(G, - weight='weight', - method='dijkstra') + ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra") assert almost_equal(ans, 4) - ans = nx.average_shortest_path_length(G, - weight='weight', - method='bellman-ford') + ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford") assert almost_equal(ans, 4) - ans = nx.average_shortest_path_length(G, - weight='weight', - method='floyd-warshall') + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall" + ) assert almost_equal(ans, 4) def test_disconnected(self): @@ -358,30 +352,30 @@ class TestAverageShortestPathLength: def test_bad_method(self): with pytest.raises(ValueError): G = nx.path_graph(2) - nx.average_shortest_path_length(G, weight='weight', method='SPAM') + nx.average_shortest_path_length(G, weight="weight", method="SPAM") class TestAverageShortestPathLengthNumpy: - @classmethod def setup_class(cls): global numpy global npt import pytest - numpy = pytest.importorskip('numpy') - npt = pytest.importorskip('numpy.testing') + + numpy = pytest.importorskip("numpy") + npt = pytest.importorskip("numpy.testing") def test_specified_methods_numpy(self): G = nx.Graph() nx.add_cycle(G, range(7), weight=2) - ans = nx.average_shortest_path_length(G, - weight='weight', - method='floyd-warshall-numpy') + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall-numpy" + ) npt.assert_almost_equal(ans, 4) G = nx.Graph() nx.add_path(G, range(5), weight=2) - ans = nx.average_shortest_path_length(G, - weight='weight', - method='floyd-warshall-numpy') + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall-numpy" + ) npt.assert_almost_equal(ans, 4) diff --git a/networkx/algorithms/shortest_paths/tests/test_unweighted.py b/networkx/algorithms/shortest_paths/tests/test_unweighted.py index a1cb2d91..96708f06 100644 --- a/networkx/algorithms/shortest_paths/tests/test_unweighted.py +++ b/networkx/algorithms/shortest_paths/tests/test_unweighted.py @@ -17,22 +17,21 @@ def validate_grid_path(r, c, s, t, p): class TestUnweightedPath: - @classmethod def setup_class(cls): from networkx import convert_node_labels_to_integers as cnlti + cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") cls.cycle = nx.cycle_graph(7) cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) def test_bidirectional_shortest_path(self): - assert (nx.bidirectional_shortest_path(self.cycle, 0, 3) == - [0, 1, 2, 3]) - assert (nx.bidirectional_shortest_path(self.cycle, 0, 4) == - [0, 6, 5, 4]) - validate_grid_path(4, 4, 1, 12, nx.bidirectional_shortest_path(self.grid, 1, 12)) - assert (nx.bidirectional_shortest_path(self.directed_cycle, 0, 3) == - [0, 1, 2, 3]) + assert nx.bidirectional_shortest_path(self.cycle, 0, 3) == [0, 1, 2, 3] + assert nx.bidirectional_shortest_path(self.cycle, 0, 4) == [0, 6, 5, 4] + validate_grid_path( + 4, 4, 1, 12, nx.bidirectional_shortest_path(self.grid, 1, 12) + ) + assert nx.bidirectional_shortest_path(self.directed_cycle, 0, 3) == [0, 1, 2, 3] def test_shortest_path_length(self): assert nx.shortest_path_length(self.cycle, 0, 3) == 3 diff --git a/networkx/algorithms/shortest_paths/tests/test_weighted.py b/networkx/algorithms/shortest_paths/tests/test_weighted.py index 3df0ffd6..b234618c 100644 --- a/networkx/algorithms/shortest_paths/tests/test_weighted.py +++ b/networkx/algorithms/shortest_paths/tests/test_weighted.py @@ -4,7 +4,7 @@ import networkx as nx from networkx.utils import pairwise -def validate_path(G, s, t, soln_len, path, weight='weight'): +def validate_path(G, s, t, soln_len, path, weight="weight"): assert path[0] == s assert path[-1] == t @@ -12,9 +12,12 @@ def validate_path(G, s, t, soln_len, path, weight='weight'): weight_f = weight else: if G.is_multigraph(): + def weight_f(u, v, d): return min(e.get(weight, 1) for e in d.values()) + else: + def weight_f(u, v, d): return d.get(weight, 1) @@ -22,7 +25,7 @@ def validate_path(G, s, t, soln_len, path, weight='weight'): assert soln_len == computed -def validate_length_path(G, s, t, soln_len, length, path, weight='weight'): +def validate_length_path(G, s, t, soln_len, length, path, weight="weight"): assert soln_len == length validate_path(G, s, t, length, path, weight=weight) @@ -36,70 +39,103 @@ class WeightedTestBase: def setup(self): """Creates some graphs for use in the unit tests.""" cnlti = nx.convert_node_labels_to_integers - self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, - ordering="sorted") + self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") self.cycle = nx.cycle_graph(7) self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) self.XG = nx.DiGraph() - self.XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) + self.XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) self.MXG = nx.MultiDiGraph(self.XG) - self.MXG.add_edge('s', 'u', weight=15) + self.MXG.add_edge("s", "u", weight=15) self.XG2 = nx.DiGraph() - self.XG2.add_weighted_edges_from([[1, 4, 1], [4, 5, 1], - [5, 6, 1], [6, 3, 1], - [1, 3, 50], [1, 2, 100], - [2, 3, 100]]) + self.XG2.add_weighted_edges_from( + [ + [1, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 3, 1], + [1, 3, 50], + [1, 2, 100], + [2, 3, 100], + ] + ) self.XG3 = nx.Graph() - self.XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], - [2, 3, 1], [3, 4, 5], - [4, 5, 1], [5, 0, 10]]) + self.XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) self.XG4 = nx.Graph() - self.XG4.add_weighted_edges_from([[0, 1, 2], [1, 2, 2], - [2, 3, 1], [3, 4, 1], - [4, 5, 1], [5, 6, 1], - [6, 7, 1], [7, 0, 1]]) + self.XG4.add_weighted_edges_from( + [ + [0, 1, 2], + [1, 2, 2], + [2, 3, 1], + [3, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 7, 1], + [7, 0, 1], + ] + ) self.MXG4 = nx.MultiGraph(self.XG4) self.MXG4.add_edge(0, 1, weight=3) self.G = nx.DiGraph() # no weights - self.G.add_edges_from([('s', 'u'), ('s', 'x'), - ('u', 'v'), ('u', 'x'), - ('v', 'y'), ('x', 'u'), - ('x', 'v'), ('x', 'y'), - ('y', 's'), ('y', 'v')]) + self.G.add_edges_from( + [ + ("s", "u"), + ("s", "x"), + ("u", "v"), + ("u", "x"), + ("v", "y"), + ("x", "u"), + ("x", "v"), + ("x", "y"), + ("y", "s"), + ("y", "v"), + ] + ) class TestWeightedPath(WeightedTestBase): - def test_dijkstra(self): - (D, P) = nx.single_source_dijkstra(self.XG, 's') - validate_path(self.XG, 's', 'v', 9, P['v']) - assert D['v'] == 9 + (D, P) = nx.single_source_dijkstra(self.XG, "s") + validate_path(self.XG, "s", "v", 9, P["v"]) + assert D["v"] == 9 validate_path( - self.XG, 's', 'v', 9, nx.single_source_dijkstra_path(self.XG, 's')['v']) - assert dict( - nx.single_source_dijkstra_path_length(self.XG, 's'))['v'] == 9 + self.XG, "s", "v", 9, nx.single_source_dijkstra_path(self.XG, "s")["v"] + ) + assert dict(nx.single_source_dijkstra_path_length(self.XG, "s"))["v"] == 9 validate_path( - self.XG, 's', 'v', 9, nx.single_source_dijkstra(self.XG, 's')[1]['v']) + self.XG, "s", "v", 9, nx.single_source_dijkstra(self.XG, "s")[1]["v"] + ) validate_path( - self.MXG, 's', 'v', 9, nx.single_source_dijkstra_path(self.MXG, 's')['v']) + self.MXG, "s", "v", 9, nx.single_source_dijkstra_path(self.MXG, "s")["v"] + ) GG = self.XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 - GG['u']['x']['weight'] = 2 - (D, P) = nx.single_source_dijkstra(GG, 's') - validate_path(GG, 's', 'v', 8, P['v']) - assert D['v'] == 8 # uses lower weight of 2 on u<->x edge - validate_path(GG, 's', 'v', 8, nx.dijkstra_path(GG, 's', 'v')) - assert nx.dijkstra_path_length(GG, 's', 'v') == 8 + GG["u"]["x"]["weight"] = 2 + (D, P) = nx.single_source_dijkstra(GG, "s") + validate_path(GG, "s", "v", 8, P["v"]) + assert D["v"] == 8 # uses lower weight of 2 on u<->x edge + validate_path(GG, "s", "v", 8, nx.dijkstra_path(GG, "s", "v")) + assert nx.dijkstra_path_length(GG, "s", "v") == 8 validate_path(self.XG2, 1, 3, 4, nx.dijkstra_path(self.XG2, 1, 3)) validate_path(self.XG3, 0, 3, 15, nx.dijkstra_path(self.XG3, 0, 3)) @@ -108,17 +144,18 @@ class TestWeightedPath(WeightedTestBase): assert nx.dijkstra_path_length(self.XG4, 0, 2) == 4 validate_path(self.MXG4, 0, 2, 4, nx.dijkstra_path(self.MXG4, 0, 2)) validate_path( - self.G, 's', 'v', 2, nx.single_source_dijkstra(self.G, 's', 'v')[1]) + self.G, "s", "v", 2, nx.single_source_dijkstra(self.G, "s", "v")[1] + ) validate_path( - self.G, 's', 'v', 2, nx.single_source_dijkstra(self.G, 's')[1]['v']) + self.G, "s", "v", 2, nx.single_source_dijkstra(self.G, "s")[1]["v"] + ) - validate_path(self.G, 's', 'v', 2, nx.dijkstra_path(self.G, 's', 'v')) - assert nx.dijkstra_path_length(self.G, 's', 'v') == 2 + validate_path(self.G, "s", "v", 2, nx.dijkstra_path(self.G, "s", "v")) + assert nx.dijkstra_path_length(self.G, "s", "v") == 2 # NetworkXError: node s not reachable from moon - pytest.raises(nx.NetworkXNoPath, nx.dijkstra_path, self.G, 's', 'moon') - pytest.raises( - nx.NetworkXNoPath, nx.dijkstra_path_length, self.G, 's', 'moon') + pytest.raises(nx.NetworkXNoPath, nx.dijkstra_path, self.G, "s", "moon") + pytest.raises(nx.NetworkXNoPath, nx.dijkstra_path_length, self.G, "s", "moon") validate_path(self.cycle, 0, 3, 3, nx.dijkstra_path(self.cycle, 0, 3)) validate_path(self.cycle, 0, 4, 3, nx.dijkstra_path(self.cycle, 0, 4)) @@ -127,22 +164,33 @@ class TestWeightedPath(WeightedTestBase): def test_bidirectional_dijkstra(self): validate_length_path( - self.XG, 's', 'v', 9, *nx.bidirectional_dijkstra(self.XG, 's', 'v')) + self.XG, "s", "v", 9, *nx.bidirectional_dijkstra(self.XG, "s", "v") + ) validate_length_path( - self.G, 's', 'v', 2, *nx.bidirectional_dijkstra(self.G, 's', 'v')) + self.G, "s", "v", 2, *nx.bidirectional_dijkstra(self.G, "s", "v") + ) validate_length_path( - self.cycle, 0, 3, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 3)) + self.cycle, 0, 3, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 3) + ) validate_length_path( - self.cycle, 0, 4, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 4)) + self.cycle, 0, 4, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 4) + ) validate_length_path( - self.XG3, 0, 3, 15, *nx.bidirectional_dijkstra(self.XG3, 0, 3)) + self.XG3, 0, 3, 15, *nx.bidirectional_dijkstra(self.XG3, 0, 3) + ) validate_length_path( - self.XG4, 0, 2, 4, *nx.bidirectional_dijkstra(self.XG4, 0, 2)) + self.XG4, 0, 2, 4, *nx.bidirectional_dijkstra(self.XG4, 0, 2) + ) # need more tests here - P = nx.single_source_dijkstra_path(self.XG, 's')['v'] - validate_path(self.XG, 's', 'v', sum(self.XG[u][v]['weight'] for u, v in zip( - P[:-1], P[1:])), nx.dijkstra_path(self.XG, 's', 'v')) + P = nx.single_source_dijkstra_path(self.XG, "s")["v"] + validate_path( + self.XG, + "s", + "v", + sum(self.XG[u][v]["weight"] for u, v in zip(P[:-1], P[1:])), + nx.dijkstra_path(self.XG, "s", "v"), + ) # check absent source G = nx.path_graph(2) @@ -155,12 +203,12 @@ class TestWeightedPath(WeightedTestBase): def getpath(pred, v, s): return [v] if v == s else getpath(pred, pred[v], s) + [v] - def goldberg_radzik(g, s, t, weight='weight'): + def goldberg_radzik(g, s, t, weight="weight"): pred, dist = nx.goldberg_radzik(g, s, weight=weight) dist = dist[t] return dist, getpath(pred, t, s) - def astar(g, s, t, weight='weight'): + def astar(g, s, t, weight="weight"): path = nx.astar_path(g, s, t, heuristic, weight=weight) dist = nx.astar_path_length(g, s, t, heuristic, weight=weight) return dist, path @@ -175,7 +223,7 @@ class TestWeightedPath(WeightedTestBase): path = [6] + list(range(t + 1)) def weight(u, v, _): - return 1 + v**2 + return 1 + v ** 2 length = sum(weight(u, v, None) for u, v in pairwise(path)) vlp(G, s, t, length, nx.bidirectional_dijkstra, weight) @@ -185,7 +233,7 @@ class TestWeightedPath(WeightedTestBase): vlp(G, s, t, length, astar, weight) def weight(u, v, _): - return 2**(u * v) + return 2 ** (u * v) length = sum(weight(u, v, None) for u, v in pairwise(path)) vlp(G, s, t, length, nx.bidirectional_dijkstra, weight) @@ -206,18 +254,22 @@ class TestWeightedPath(WeightedTestBase): # regression testing against later changes to any of the "client" # Dijkstra or Bellman-Ford functions G = nx.path_graph(2) - for fn in (nx.dijkstra_path, - nx.dijkstra_path_length, - nx.single_source_dijkstra_path, - nx.single_source_dijkstra_path_length, - nx.single_source_dijkstra, - nx.dijkstra_predecessor_and_distance,): + for fn in ( + nx.dijkstra_path, + nx.dijkstra_path_length, + nx.single_source_dijkstra_path, + nx.single_source_dijkstra_path_length, + nx.single_source_dijkstra, + nx.dijkstra_predecessor_and_distance, + ): pytest.raises(nx.NodeNotFound, fn, G, 3, 0) def test_dijkstra_predecessor1(self): G = nx.path_graph(4) - assert (nx.dijkstra_predecessor_and_distance(G, 0) == - ({0: [], 1: [0], 2: [1], 3: [2]}, {0: 0, 1: 1, 2: 2, 3: 3})) + assert nx.dijkstra_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [1], 3: [2]}, + {0: 0, 1: 1, 2: 2, 3: 3}, + ) def test_dijkstra_predecessor2(self): # 4-cycle @@ -231,16 +283,25 @@ class TestWeightedPath(WeightedTestBase): def test_dijkstra_predecessor3(self): XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) - (P, D) = nx.dijkstra_predecessor_and_distance(XG, 's') - assert P['v'] == ['u'] - assert D['v'] == 9 - (P, D) = nx.dijkstra_predecessor_and_distance(XG, 's', cutoff=8) - assert 'v' not in D + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + (P, D) = nx.dijkstra_predecessor_and_distance(XG, "s") + assert P["v"] == ["u"] + assert D["v"] == 9 + (P, D) = nx.dijkstra_predecessor_and_distance(XG, "s", cutoff=8) + assert "v" not in D def test_single_source_dijkstra_path_length(self): pl = nx.single_source_dijkstra_path_length @@ -250,18 +311,18 @@ class TestWeightedPath(WeightedTestBase): def test_bidirectional_dijkstra_multigraph(self): G = nx.MultiGraph() - G.add_edge('a', 'b', weight=10) - G.add_edge('a', 'b', weight=100) - dp = nx.bidirectional_dijkstra(G, 'a', 'b') - assert dp == (10, ['a', 'b']) + G.add_edge("a", "b", weight=10) + G.add_edge("a", "b", weight=100) + dp = nx.bidirectional_dijkstra(G, "a", "b") + assert dp == (10, ["a", "b"]) def test_dijkstra_pred_distance_multigraph(self): G = nx.MultiGraph() - G.add_edge('a', 'b', key='short', foo=5, weight=100) - G.add_edge('a', 'b', key='long', bar=1, weight=110) - p, d = nx.dijkstra_predecessor_and_distance(G, 'a') - assert p == {'a': [], 'b': ['a']} - assert d == {'a': 0, 'b': 100} + G.add_edge("a", "b", key="short", foo=5, weight=100) + G.add_edge("a", "b", key="long", bar=1, weight=110) + p, d = nx.dijkstra_predecessor_and_distance(G, "a") + assert p == {"a": [], "b": ["a"]} + assert d == {"a": 0, "b": 100} def test_negative_edge_cycle(self): G = nx.cycle_graph(5, create_using=nx.DiGraph()) @@ -285,14 +346,16 @@ class TestWeightedPath(WeightedTestBase): # Create a triangle in which the edge from node 0 to node 2 has # a large weight and the other two edges have a small weight. G = nx.complete_graph(3) - G.adj[0][2]['weight'] = 10 - G.adj[0][1]['weight'] = 1 - G.adj[1][2]['weight'] = 1 + G.adj[0][2]["weight"] = 10 + G.adj[0][1]["weight"] = 1 + G.adj[1][2]["weight"] = 1 # The weight function will take the multiplicative inverse of # the weights on the edges. This way, weights that were large # before now become small and vice versa. - def weight(u, v, d): return 1 / d['weight'] + def weight(u, v, d): + return 1 / d["weight"] + # The shortest path from 0 to 2 using the actual weights on the # edges should be [0, 1, 2]. distance, path = nx.single_source_dijkstra(G, 0, 2) @@ -309,7 +372,7 @@ class TestWeightedPath(WeightedTestBase): p = dict(nx.all_pairs_dijkstra_path(cycle)) assert p[0][3] == [0, 1, 2, 3] - cycle[1][2]['weight'] = 10 + cycle[1][2]["weight"] = 10 p = dict(nx.all_pairs_dijkstra_path(cycle)) assert p[0][3] == [0, 6, 5, 4, 3] @@ -318,7 +381,7 @@ class TestWeightedPath(WeightedTestBase): pl = dict(nx.all_pairs_dijkstra_path_length(cycle)) assert pl[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} - cycle[1][2]['weight'] = 10 + cycle[1][2]["weight"] = 10 pl = dict(nx.all_pairs_dijkstra_path_length(cycle)) assert pl[0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} @@ -328,7 +391,7 @@ class TestWeightedPath(WeightedTestBase): assert out[0][0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} assert out[0][1][3] == [0, 1, 2, 3] - cycle[1][2]['weight'] = 10 + cycle[1][2]["weight"] = 10 out = dict(nx.all_pairs_dijkstra(cycle)) assert out[0][0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} assert out[0][1][3] == [0, 6, 5, 4, 3] @@ -348,14 +411,16 @@ class TestDijkstraPathLength: # Create a triangle in which the edge from node 0 to node 2 has # a large weight and the other two edges have a small weight. G = nx.complete_graph(3) - G.adj[0][2]['weight'] = 10 - G.adj[0][1]['weight'] = 1 - G.adj[1][2]['weight'] = 1 + G.adj[0][2]["weight"] = 10 + G.adj[0][1]["weight"] = 1 + G.adj[1][2]["weight"] = 1 # The weight function will take the multiplicative inverse of # the weights on the edges. This way, weights that were large # before now become small and vice versa. - def weight(u, v, d): return 1 / d['weight'] + def weight(u, v, d): + return 1 / d["weight"] + # The shortest path from 0 to 2 using the actual weights on the # edges should be [0, 1, 2]. However, with the above weight # function, the shortest path should be [0, 2], since that has a @@ -384,9 +449,11 @@ class TestMultiSourceDijkstra: def test_absent_source(self): G = nx.path_graph(2) - for fn in (nx.multi_source_dijkstra_path, - nx.multi_source_dijkstra_path_length, - nx.multi_source_dijkstra,): + for fn in ( + nx.multi_source_dijkstra_path, + nx.multi_source_dijkstra_path_length, + nx.multi_source_dijkstra, + ): pytest.raises(nx.NodeNotFound, fn, G, [3], 0) def test_two_sources(self): @@ -409,7 +476,6 @@ class TestMultiSourceDijkstra: class TestBellmanFordAndGoldbergRadzik(WeightedTestBase): - def test_single_node_graph(self): G = nx.DiGraph() G.add_node(0) @@ -423,12 +489,14 @@ class TestBellmanFordAndGoldbergRadzik(WeightedTestBase): # the check is in _bellman_ford; this provides regression testing # against later changes to "client" Bellman-Ford functions G = nx.path_graph(2) - for fn in (nx.bellman_ford_predecessor_and_distance, - nx.bellman_ford_path, - nx.bellman_ford_path_length, - nx.single_source_bellman_ford_path, - nx.single_source_bellman_ford_path_length, - nx.single_source_bellman_ford,): + for fn in ( + nx.bellman_ford_predecessor_and_distance, + nx.bellman_ford_path, + nx.bellman_ford_path_length, + nx.single_source_bellman_ford_path, + nx.single_source_bellman_ford_path_length, + nx.single_source_bellman_ford, + ): pytest.raises(nx.NodeNotFound, fn, G, 3, 0) def test_absent_source_goldberg_radzik(self): @@ -445,15 +513,16 @@ class TestBellmanFordAndGoldbergRadzik(WeightedTestBase): assert not nx.negative_edge_cycle(G, heuristic=True) G.add_edge(2, 0, weight=1.999) assert nx.negative_edge_cycle(G, heuristic=True) - G.edges[2, 0]['weight'] = 2 + G.edges[2, 0]["weight"] = 2 assert not nx.negative_edge_cycle(G, heuristic=True) def test_negative_weight_cycle_consistency(self): import random + unif = random.uniform for random_seed in range(2): # range(20): random.seed(random_seed) - for density in [.1, .9]: # .3, .7, .9]: + for density in [0.1, 0.9]: # .3, .7, .9]: for N in [1, 10, 20]: # range(1, 60 - int(30 * density)): for max_cost in [1, 90]: # [1, 10, 40, 90]: G = nx.binomial_graph(N, density, seed=4, directed=True) @@ -468,94 +537,162 @@ class TestBellmanFordAndGoldbergRadzik(WeightedTestBase): G = nx.cycle_graph(5, create_using=nx.DiGraph()) G.add_edge(1, 2, weight=-7) for i in range(5): - pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i) - pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i + ) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i + ) pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i) - pytest.raises(nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i + ) pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i) G = nx.cycle_graph(5) # undirected Graph G.add_edge(1, 2, weight=-3) for i in range(5): - pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i) - pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i + ) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i + ) pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i) - pytest.raises(nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i + ) pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i) - G = nx.DiGraph([(1, 1, {'weight': -1})]) + G = nx.DiGraph([(1, 1, {"weight": -1})]) pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, 1) - pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, 1) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, 1 + ) pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, 1) - pytest.raises(nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1 + ) pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1) # no negative cycle but negative weight G = nx.cycle_graph(5, create_using=nx.DiGraph()) G.add_edge(1, 2, weight=-3) - assert (nx.single_source_bellman_ford_path(G, 0) == - {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3], 4: [0, 1, 2, 3, 4]}) - assert (nx.single_source_bellman_ford_path_length(G, 0) == - {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}) - assert (nx.single_source_bellman_ford(G, 0) == - ({0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, - {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3], 4: [0, 1, 2, 3, 4]})) - assert (nx.bellman_ford_predecessor_and_distance(G, 0) == - ({0: [], 1: [0], 2: [1], 3: [2], 4: [3]}, - {0: 0, 1: 1, 2: -2, 3: -1, 4: 0})) - assert (nx.goldberg_radzik(G, 0) == - ({0: None, 1: 0, 2: 1, 3: 2, 4: 3}, - {0: 0, 1: 1, 2: -2, 3: -1, 4: 0})) + assert nx.single_source_bellman_ford_path(G, 0) == { + 0: [0], + 1: [0, 1], + 2: [0, 1, 2], + 3: [0, 1, 2, 3], + 4: [0, 1, 2, 3, 4], + } + assert nx.single_source_bellman_ford_path_length(G, 0) == { + 0: 0, + 1: 1, + 2: -2, + 3: -1, + 4: 0, + } + assert nx.single_source_bellman_ford(G, 0) == ( + {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, + {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3], 4: [0, 1, 2, 3, 4]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [1], 3: [2], 4: [3]}, + {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, + ) + assert nx.goldberg_radzik(G, 0) == ( + {0: None, 1: 0, 2: 1, 3: 2, 4: 3}, + {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, + ) def test_not_connected(self): G = nx.complete_graph(6) G.add_edge(10, 11) G.add_edge(10, 12) - assert (nx.single_source_bellman_ford_path(G, 0) == - {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}) - assert (nx.single_source_bellman_ford_path_length(G, 0) == - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}) - assert (nx.single_source_bellman_ford(G, 0) == - ({0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, - {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]})) - assert (nx.bellman_ford_predecessor_and_distance(G, 0) == - ({0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) - assert (nx.goldberg_radzik(G, 0) == - ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) + assert nx.single_source_bellman_ford_path(G, 0) == { + 0: [0], + 1: [0, 1], + 2: [0, 2], + 3: [0, 3], + 4: [0, 4], + 5: [0, 5], + } + assert nx.single_source_bellman_ford_path_length(G, 0) == { + 0: 0, + 1: 1, + 2: 1, + 3: 1, + 4: 1, + 5: 1, + } + assert nx.single_source_bellman_ford(G, 0) == ( + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) + assert nx.goldberg_radzik(G, 0) == ( + {0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) # not connected, with a component not containing the source that # contains a negative cost cycle. G = nx.complete_graph(6) - G.add_edges_from([('A', 'B', {'load': 3}), - ('B', 'C', {'load': -10}), - ('C', 'A', {'load': 2})]) - assert (nx.single_source_bellman_ford_path(G, 0, weight='load') == - {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}) - assert (nx.single_source_bellman_ford_path_length(G, 0, weight='load') == - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}) - assert (nx.single_source_bellman_ford(G, 0, weight='load') == - ({0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, - {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]})) - assert (nx.bellman_ford_predecessor_and_distance(G, 0, weight='load') == - ({0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) - assert (nx.goldberg_radzik(G, 0, weight='load') == - ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, - {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) + G.add_edges_from( + [ + ("A", "B", {"load": 3}), + ("B", "C", {"load": -10}), + ("C", "A", {"load": 2}), + ] + ) + assert nx.single_source_bellman_ford_path(G, 0, weight="load") == { + 0: [0], + 1: [0, 1], + 2: [0, 2], + 3: [0, 3], + 4: [0, 4], + 5: [0, 5], + } + assert nx.single_source_bellman_ford_path_length(G, 0, weight="load") == { + 0: 0, + 1: 1, + 2: 1, + 3: 1, + 4: 1, + 5: 1, + } + assert nx.single_source_bellman_ford(G, 0, weight="load") == ( + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0, weight="load") == ( + {0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) + assert nx.goldberg_radzik(G, 0, weight="load") == ( + {0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) def test_multigraph(self): - assert nx.bellman_ford_path(self.MXG, 's', 'v') == ['s', 'x', 'u', 'v'] - assert nx.bellman_ford_path_length(self.MXG, 's', 'v') == 9 - assert nx.single_source_bellman_ford_path(self.MXG, 's')['v'] == ['s', 'x', 'u', 'v'] - assert nx.single_source_bellman_ford_path_length(self.MXG, 's')['v'] == 9 - D, P = nx.single_source_bellman_ford(self.MXG, 's', target='v') + assert nx.bellman_ford_path(self.MXG, "s", "v") == ["s", "x", "u", "v"] + assert nx.bellman_ford_path_length(self.MXG, "s", "v") == 9 + assert nx.single_source_bellman_ford_path(self.MXG, "s")["v"] == [ + "s", + "x", + "u", + "v", + ] + assert nx.single_source_bellman_ford_path_length(self.MXG, "s")["v"] == 9 + D, P = nx.single_source_bellman_ford(self.MXG, "s", target="v") assert D == 9 - assert P == ['s', 'x', 'u', 'v'] - P, D = nx.bellman_ford_predecessor_and_distance(self.MXG, 's') - assert P['v'] == ['u'] - assert D['v'] == 9 - P, D = nx.goldberg_radzik(self.MXG, 's') - assert P['v'] == 'u' - assert D['v'] == 9 + assert P == ["s", "x", "u", "v"] + P, D = nx.bellman_ford_predecessor_and_distance(self.MXG, "s") + assert P["v"] == ["u"] + assert D["v"] == 9 + P, D = nx.goldberg_radzik(self.MXG, "s") + assert P["v"] == "u" + assert D["v"] == 9 assert nx.bellman_ford_path(self.MXG4, 0, 2) == [0, 1, 2] assert nx.bellman_ford_path_length(self.MXG4, 0, 2) == 4 assert nx.single_source_bellman_ford_path(self.MXG4, 0)[2] == [0, 1, 2] @@ -571,42 +708,75 @@ class TestBellmanFordAndGoldbergRadzik(WeightedTestBase): assert D[2] == 4 def test_others(self): - assert nx.bellman_ford_path(self.XG, 's', 'v') == ['s', 'x', 'u', 'v'] - assert nx.bellman_ford_path_length(self.XG, 's', 'v') == 9 - assert nx.single_source_bellman_ford_path(self.XG, 's')['v'] == ['s', 'x', 'u', 'v'] - assert nx.single_source_bellman_ford_path_length(self.XG, 's')['v'] == 9 - D, P = nx.single_source_bellman_ford(self.XG, 's', target='v') + assert nx.bellman_ford_path(self.XG, "s", "v") == ["s", "x", "u", "v"] + assert nx.bellman_ford_path_length(self.XG, "s", "v") == 9 + assert nx.single_source_bellman_ford_path(self.XG, "s")["v"] == [ + "s", + "x", + "u", + "v", + ] + assert nx.single_source_bellman_ford_path_length(self.XG, "s")["v"] == 9 + D, P = nx.single_source_bellman_ford(self.XG, "s", target="v") assert D == 9 - assert P == ['s', 'x', 'u', 'v'] - (P, D) = nx.bellman_ford_predecessor_and_distance(self.XG, 's') - assert P['v'] == ['u'] - assert D['v'] == 9 - (P, D) = nx.goldberg_radzik(self.XG, 's') - assert P['v'] == 'u' - assert D['v'] == 9 + assert P == ["s", "x", "u", "v"] + (P, D) = nx.bellman_ford_predecessor_and_distance(self.XG, "s") + assert P["v"] == ["u"] + assert D["v"] == 9 + (P, D) = nx.goldberg_radzik(self.XG, "s") + assert P["v"] == "u" + assert D["v"] == 9 def test_path_graph(self): G = nx.path_graph(4) - assert (nx.single_source_bellman_ford_path(G, 0) == - {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3]}) - assert (nx.single_source_bellman_ford_path_length(G, 0) == - {0: 0, 1: 1, 2: 2, 3: 3}) - assert (nx.single_source_bellman_ford(G, 0) == - ({0: 0, 1: 1, 2: 2, 3: 3}, {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3]})) - assert (nx.bellman_ford_predecessor_and_distance(G, 0) == - ({0: [], 1: [0], 2: [1], 3: [2]}, {0: 0, 1: 1, 2: 2, 3: 3})) - assert (nx.goldberg_radzik(G, 0) == - ({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3})) - assert (nx.single_source_bellman_ford_path(G, 3) == - {0: [3, 2, 1, 0], 1: [3, 2, 1], 2: [3, 2], 3: [3]}) - assert (nx.single_source_bellman_ford_path_length(G, 3) == - {0: 3, 1: 2, 2: 1, 3: 0}) - assert (nx.single_source_bellman_ford(G, 3) == - ({0: 3, 1: 2, 2: 1, 3: 0}, {0: [3, 2, 1, 0], 1: [3, 2, 1], 2: [3, 2], 3: [3]})) - assert (nx.bellman_ford_predecessor_and_distance(G, 3) == - ({0: [1], 1: [2], 2: [3], 3: []}, {0: 3, 1: 2, 2: 1, 3: 0})) - assert (nx.goldberg_radzik(G, 3) == - ({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0})) + assert nx.single_source_bellman_ford_path(G, 0) == { + 0: [0], + 1: [0, 1], + 2: [0, 1, 2], + 3: [0, 1, 2, 3], + } + assert nx.single_source_bellman_ford_path_length(G, 0) == { + 0: 0, + 1: 1, + 2: 2, + 3: 3, + } + assert nx.single_source_bellman_ford(G, 0) == ( + {0: 0, 1: 1, 2: 2, 3: 3}, + {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [1], 3: [2]}, + {0: 0, 1: 1, 2: 2, 3: 3}, + ) + assert nx.goldberg_radzik(G, 0) == ( + {0: None, 1: 0, 2: 1, 3: 2}, + {0: 0, 1: 1, 2: 2, 3: 3}, + ) + assert nx.single_source_bellman_ford_path(G, 3) == { + 0: [3, 2, 1, 0], + 1: [3, 2, 1], + 2: [3, 2], + 3: [3], + } + assert nx.single_source_bellman_ford_path_length(G, 3) == { + 0: 3, + 1: 2, + 2: 1, + 3: 0, + } + assert nx.single_source_bellman_ford(G, 3) == ( + {0: 3, 1: 2, 2: 1, 3: 0}, + {0: [3, 2, 1, 0], 1: [3, 2, 1], 2: [3, 2], 3: [3]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 3) == ( + {0: [1], 1: [2], 2: [3], 3: []}, + {0: 3, 1: 2, 2: 1, 3: 0}, + ) + assert nx.goldberg_radzik(G, 3) == ( + {0: 1, 1: 2, 2: 3, 3: None}, + {0: 3, 1: 2, 2: 1, 3: 0}, + ) def test_4_cycle(self): # 4-cycle @@ -634,14 +804,14 @@ class TestBellmanFordAndGoldbergRadzik(WeightedTestBase): def test_negative_weight(self): G = nx.DiGraph() - G.add_nodes_from('abcd') - G.add_edge('a', 'd', weight=0) - G.add_edge('a', 'b', weight=1) - G.add_edge('b', 'c', weight=-3) - G.add_edge('c', 'd', weight=1) + G.add_nodes_from("abcd") + G.add_edge("a", "d", weight=0) + G.add_edge("a", "b", weight=1) + G.add_edge("b", "c", weight=-3) + G.add_edge("c", "d", weight=1) - assert nx.bellman_ford_path(G, 'a', 'd') == ['a', 'b', 'c', 'd'] - assert nx.bellman_ford_path_length(G, 'a', 'd') == -1 + assert nx.bellman_ford_path(G, "a", "d") == ["a", "b", "c", "d"] + assert nx.bellman_ford_path_length(G, "a", "d") == -1 def test_zero_cycle_smoke(self): D = nx.DiGraph() @@ -654,7 +824,6 @@ class TestBellmanFordAndGoldbergRadzik(WeightedTestBase): class TestJohnsonAlgorithm(WeightedTestBase): - def test_single_node_graph(self): with pytest.raises(nx.NetworkXError): G = nx.DiGraph() @@ -663,27 +832,47 @@ class TestJohnsonAlgorithm(WeightedTestBase): def test_negative_cycle(self): G = nx.DiGraph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), ('1', '0', -5), - ('0', '2', 2), ('1', '2', 4), - ('2', '3', 1)]) + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) pytest.raises(nx.NetworkXUnbounded, nx.johnson, G) G = nx.Graph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), ('1', '0', -5), - ('0', '2', 2), ('1', '2', 4), - ('2', '3', 1)]) + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) pytest.raises(nx.NetworkXUnbounded, nx.johnson, G) def test_negative_weights(self): G = nx.DiGraph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), - ('0', '2', 2), ('1', '2', 4), - ('2', '3', 1)]) + G.add_weighted_edges_from( + [("0", "3", 3), ("0", "1", -5), ("0", "2", 2), ("1", "2", 4), ("2", "3", 1)] + ) paths = nx.johnson(G) - assert paths == {'1': {'1': ['1'], '3': ['1', '2', '3'], - '2': ['1', '2']}, '0': {'1': ['0', '1'], - '0': ['0'], '3': ['0', '1', '2', '3'], - '2': ['0', '1', '2']}, '3': {'3': ['3']}, - '2': {'3': ['2', '3'], '2': ['2']}} + assert paths == { + "1": {"1": ["1"], "3": ["1", "2", "3"], "2": ["1", "2"]}, + "0": { + "1": ["0", "1"], + "0": ["0"], + "3": ["0", "1", "2", "3"], + "2": ["0", "1", "2"], + }, + "3": {"3": ["3"]}, + "2": {"3": ["2", "3"], "2": ["2"]}, + } def test_unweighted_graph(self): with pytest.raises(nx.NetworkXError): @@ -691,8 +880,8 @@ class TestJohnsonAlgorithm(WeightedTestBase): nx.johnson(G) def test_graphs(self): - validate_path(self.XG, 's', 'v', 9, nx.johnson(self.XG)['s']['v']) - validate_path(self.MXG, 's', 'v', 9, nx.johnson(self.MXG)['s']['v']) + validate_path(self.XG, "s", "v", 9, nx.johnson(self.XG)["s"]["v"]) + validate_path(self.MXG, "s", "v", 9, nx.johnson(self.MXG)["s"]["v"]) validate_path(self.XG2, 1, 3, 4, nx.johnson(self.XG2)[1][3]) validate_path(self.XG3, 0, 3, 15, nx.johnson(self.XG3)[0][3]) validate_path(self.XG4, 0, 2, 4, nx.johnson(self.XG4)[0][2]) diff --git a/networkx/algorithms/shortest_paths/unweighted.py b/networkx/algorithms/shortest_paths/unweighted.py index 483848fe..a6e45faa 100644 --- a/networkx/algorithms/shortest_paths/unweighted.py +++ b/networkx/algorithms/shortest_paths/unweighted.py @@ -3,14 +3,16 @@ Shortest path algorithms for unweighted graphs. """ import networkx as nx -__all__ = ['bidirectional_shortest_path', - 'single_source_shortest_path', - 'single_source_shortest_path_length', - 'single_target_shortest_path', - 'single_target_shortest_path_length', - 'all_pairs_shortest_path', - 'all_pairs_shortest_path_length', - 'predecessor'] +__all__ = [ + "bidirectional_shortest_path", + "single_source_shortest_path", + "single_source_shortest_path_length", + "single_target_shortest_path", + "single_target_shortest_path_length", + "all_pairs_shortest_path", + "all_pairs_shortest_path_length", + "predecessor", +] def single_source_shortest_path_length(G, source, cutoff=None): @@ -50,9 +52,9 @@ def single_source_shortest_path_length(G, source, cutoff=None): shortest_path_length """ if source not in G: - raise nx.NodeNotFound(f'Source {source} is not in G') + raise nx.NodeNotFound(f"Source {source} is not in G") if cutoff is None: - cutoff = float('inf') + cutoff = float("inf") nextlevel = {source: 1} return dict(_single_shortest_path_length(G.adj, nextlevel, cutoff)) @@ -128,10 +130,10 @@ def single_target_shortest_path_length(G, target, cutoff=None): single_source_shortest_path_length, shortest_path_length """ if target not in G: - raise nx.NodeNotFound(f'Target {target} is not in G') + raise nx.NodeNotFound(f"Target {target} is not in G") if cutoff is None: - cutoff = float('inf') + cutoff = float("inf") # handle either directed or undirected adj = G.pred if G.is_directed() else G.adj nextlevel = {target: 1} @@ -332,9 +334,10 @@ def single_source_shortest_path(G, source, cutoff=None): def join(p1, p2): return p1 + p2 + if cutoff is None: - cutoff = float('inf') - nextlevel = {source: 1} # list of nodes to check at next level + cutoff = float("inf") + nextlevel = {source: 1} # list of nodes to check at next level paths = {source: [source]} # paths dictionary (paths to key from source) return dict(_single_shortest_path(G.adj, nextlevel, paths, cutoff, join)) @@ -358,7 +361,7 @@ def _single_shortest_path(adj, firstlevel, paths, cutoff, join): list inputs `p1` and `p2`, and returns a list. Usually returns `p1 + p2` (forward from source) or `p2 + p1` (backward from target) """ - level = 0 # the current level + level = 0 # the current level nextlevel = firstlevel while nextlevel and cutoff > level: thislevel = nextlevel @@ -413,11 +416,12 @@ def single_target_shortest_path(G, target, cutoff=None): def join(p1, p2): return p2 + p1 + # handle undirected graphs adj = G.pred if G.is_directed() else G.adj if cutoff is None: - cutoff = float('inf') - nextlevel = {target: 1} # list of nodes to check at next level + cutoff = float("inf") + nextlevel = {target: 1} # list of nodes to check at next level paths = {target: [target]} # paths dictionary (paths to key from source) return dict(_single_shortest_path(adj, nextlevel, paths, cutoff, join)) @@ -491,10 +495,10 @@ def predecessor(G, source, target=None, cutoff=None, return_seen=None): if source not in G: raise nx.NodeNotFound(f"Source {source} not in G") - level = 0 # the current level - nextlevel = [source] # list of nodes to check at next level - seen = {source: level} # level (number of hops) when seen in BFS - pred = {source: []} # predecessor dictionary + level = 0 # the current level + nextlevel = [source] # list of nodes to check at next level + seen = {source: level} # level (number of hops) when seen in BFS + pred = {source: []} # predecessor dictionary while nextlevel: level = level + 1 thislevel = nextlevel @@ -505,9 +509,9 @@ def predecessor(G, source, target=None, cutoff=None, return_seen=None): pred[w] = [v] seen[w] = level nextlevel.append(w) - elif (seen[w] == level): # add v to predecessor list if it - pred[w].append(v) # is at the correct level - if (cutoff and cutoff <= level): + elif seen[w] == level: # add v to predecessor list if it + pred[w].append(v) # is at the correct level + if cutoff and cutoff <= level: break if target is not None: diff --git a/networkx/algorithms/shortest_paths/weighted.py b/networkx/algorithms/shortest_paths/weighted.py index 6f9aee86..972b8a9d 100644 --- a/networkx/algorithms/shortest_paths/weighted.py +++ b/networkx/algorithms/shortest_paths/weighted.py @@ -7,34 +7,35 @@ from heapq import heappush, heappop from itertools import count import networkx as nx from networkx.utils import generate_unique_node -from networkx.algorithms.shortest_paths.generic import ( - _build_paths_from_predecessors) - - -__all__ = ['dijkstra_path', - 'dijkstra_path_length', - 'bidirectional_dijkstra', - 'single_source_dijkstra', - 'single_source_dijkstra_path', - 'single_source_dijkstra_path_length', - 'multi_source_dijkstra', - 'multi_source_dijkstra_path', - 'multi_source_dijkstra_path_length', - 'all_pairs_dijkstra', - 'all_pairs_dijkstra_path', - 'all_pairs_dijkstra_path_length', - 'dijkstra_predecessor_and_distance', - 'bellman_ford_path', - 'bellman_ford_path_length', - 'single_source_bellman_ford', - 'single_source_bellman_ford_path', - 'single_source_bellman_ford_path_length', - 'all_pairs_bellman_ford_path', - 'all_pairs_bellman_ford_path_length', - 'bellman_ford_predecessor_and_distance', - 'negative_edge_cycle', - 'goldberg_radzik', - 'johnson'] +from networkx.algorithms.shortest_paths.generic import _build_paths_from_predecessors + + +__all__ = [ + "dijkstra_path", + "dijkstra_path_length", + "bidirectional_dijkstra", + "single_source_dijkstra", + "single_source_dijkstra_path", + "single_source_dijkstra_path_length", + "multi_source_dijkstra", + "multi_source_dijkstra_path", + "multi_source_dijkstra_path_length", + "all_pairs_dijkstra", + "all_pairs_dijkstra_path", + "all_pairs_dijkstra_path_length", + "dijkstra_predecessor_and_distance", + "bellman_ford_path", + "bellman_ford_path_length", + "single_source_bellman_ford", + "single_source_bellman_ford_path", + "single_source_bellman_ford_path_length", + "all_pairs_bellman_ford_path", + "all_pairs_bellman_ford_path_length", + "bellman_ford_predecessor_and_distance", + "negative_edge_cycle", + "goldberg_radzik", + "johnson", +] def _weight_function(G, weight): @@ -77,7 +78,7 @@ def _weight_function(G, weight): return lambda u, v, data: data.get(weight, 1) -def dijkstra_path(G, source, target, weight='weight'): +def dijkstra_path(G, source, target, weight="weight"): """Returns the shortest weighted path from source to target in G. Uses Dijkstra's Method to compute the shortest weighted path @@ -153,12 +154,11 @@ def dijkstra_path(G, source, target, weight='weight'): bidirectional_dijkstra(), bellman_ford_path() single_source_dijkstra() """ - (length, path) = single_source_dijkstra(G, source, target=target, - weight=weight) + (length, path) = single_source_dijkstra(G, source, target=target, weight=weight) return path -def dijkstra_path_length(G, source, target, weight='weight'): +def dijkstra_path_length(G, source, target, weight="weight"): """Returns the shortest weighted path length in G from source to target. Uses Dijkstra's Method to compute the shortest weighted path length @@ -234,7 +234,7 @@ def dijkstra_path_length(G, source, target, weight='weight'): raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") from e -def single_source_dijkstra_path(G, source, cutoff=None, weight='weight'): +def single_source_dijkstra_path(G, source, cutoff=None, weight="weight"): """Find shortest weighted paths in G from a source node. Compute shortest path between source and all other reachable @@ -294,12 +294,10 @@ def single_source_dijkstra_path(G, source, cutoff=None, weight='weight'): single_source_dijkstra(), single_source_bellman_ford() """ - return multi_source_dijkstra_path(G, {source}, cutoff=cutoff, - weight=weight) + return multi_source_dijkstra_path(G, {source}, cutoff=cutoff, weight=weight) -def single_source_dijkstra_path_length(G, source, cutoff=None, - weight='weight'): +def single_source_dijkstra_path_length(G, source, cutoff=None, weight="weight"): """Find shortest weighted path lengths in G from a source node. Compute the shortest path length between source and all other @@ -366,12 +364,10 @@ def single_source_dijkstra_path_length(G, source, cutoff=None, single_source_dijkstra(), single_source_bellman_ford_path_length() """ - return multi_source_dijkstra_path_length(G, {source}, cutoff=cutoff, - weight=weight) + return multi_source_dijkstra_path_length(G, {source}, cutoff=cutoff, weight=weight) -def single_source_dijkstra(G, source, target=None, cutoff=None, - weight='weight'): +def single_source_dijkstra(G, source, target=None, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths from a source node. Compute the shortest path length between source and all other @@ -465,11 +461,12 @@ def single_source_dijkstra(G, source, target=None, cutoff=None, single_source_dijkstra_path_length() single_source_bellman_ford() """ - return multi_source_dijkstra(G, {source}, cutoff=cutoff, target=target, - weight=weight) + return multi_source_dijkstra( + G, {source}, cutoff=cutoff, target=target, weight=weight + ) -def multi_source_dijkstra_path(G, sources, cutoff=None, weight='weight'): +def multi_source_dijkstra_path(G, sources, cutoff=None, weight="weight"): """Find shortest weighted paths in G from a given set of source nodes. @@ -537,13 +534,11 @@ def multi_source_dijkstra_path(G, sources, cutoff=None, weight='weight'): multi_source_dijkstra(), multi_source_bellman_ford() """ - length, path = multi_source_dijkstra(G, sources, cutoff=cutoff, - weight=weight) + length, path = multi_source_dijkstra(G, sources, cutoff=cutoff, weight=weight) return path -def multi_source_dijkstra_path_length(G, sources, cutoff=None, - weight='weight'): +def multi_source_dijkstra_path_length(G, sources, cutoff=None, weight="weight"): """Find shortest weighted path lengths in G from a given set of source nodes. @@ -615,13 +610,12 @@ def multi_source_dijkstra_path_length(G, sources, cutoff=None, """ if not sources: - raise ValueError('sources must not be empty') + raise ValueError("sources must not be empty") weight = _weight_function(G, weight) return _dijkstra_multisource(G, sources, weight, cutoff=cutoff) -def multi_source_dijkstra(G, sources, target=None, cutoff=None, - weight='weight'): +def multi_source_dijkstra(G, sources, target=None, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths from a given set of source nodes. @@ -720,13 +714,14 @@ def multi_source_dijkstra(G, sources, target=None, cutoff=None, """ if not sources: - raise ValueError('sources must not be empty') + raise ValueError("sources must not be empty") if target in sources: return (0, [target]) weight = _weight_function(G, weight) paths = {source: [source] for source in sources} # dictionary of paths - dist = _dijkstra_multisource(G, sources, weight, paths=paths, - cutoff=cutoff, target=target) + dist = _dijkstra_multisource( + G, sources, weight, paths=paths, cutoff=cutoff, target=target + ) if target is None: return (dist, paths) try: @@ -735,8 +730,7 @@ def multi_source_dijkstra(G, sources, target=None, cutoff=None, raise nx.NetworkXNoPath(f"No path to {target}.") from e -def _dijkstra(G, source, weight, pred=None, paths=None, cutoff=None, - target=None): +def _dijkstra(G, source, weight, pred=None, paths=None, cutoff=None, target=None): """Uses Dijkstra's algorithm to find shortest weighted paths from a single source. @@ -745,12 +739,14 @@ def _dijkstra(G, source, weight, pred=None, paths=None, cutoff=None, `sources` set to ``[source]``. """ - return _dijkstra_multisource(G, [source], weight, pred=pred, paths=paths, - cutoff=cutoff, target=target) + return _dijkstra_multisource( + G, [source], weight, pred=pred, paths=paths, cutoff=cutoff, target=target + ) -def _dijkstra_multisource(G, sources, weight, pred=None, paths=None, - cutoff=None, target=None): +def _dijkstra_multisource( + G, sources, weight, pred=None, paths=None, cutoff=None, target=None +): """Uses Dijkstra's algorithm to find shortest weighted paths Parameters @@ -832,8 +828,7 @@ def _dijkstra_multisource(G, sources, weight, pred=None, paths=None, if u in dist: u_dist = dist[u] if vu_dist < u_dist: - raise ValueError('Contradictory paths found:', - 'negative weights?') + raise ValueError("Contradictory paths found:", "negative weights?") elif pred is not None and vu_dist == u_dist: pred[u].append(v) elif u not in seen or vu_dist < seen[u]: @@ -852,7 +847,7 @@ def _dijkstra_multisource(G, sources, weight, pred=None, paths=None, return dist -def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight='weight'): +def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight="weight"): """Compute weighted shortest path length and predecessors. Uses Dijkstra's Method to obtain the shortest weighted paths @@ -925,7 +920,7 @@ def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight='weight'): return (pred, _dijkstra(G, source, weight, pred=pred, cutoff=cutoff)) -def all_pairs_dijkstra(G, cutoff=None, weight='weight'): +def all_pairs_dijkstra(G, cutoff=None, weight="weight"): """Find shortest weighted paths and lengths between all nodes. Parameters @@ -992,7 +987,7 @@ def all_pairs_dijkstra(G, cutoff=None, weight='weight'): yield (n, (dist, path)) -def all_pairs_dijkstra_path_length(G, cutoff=None, weight='weight'): +def all_pairs_dijkstra_path_length(G, cutoff=None, weight="weight"): """Compute shortest path lengths between all nodes in a weighted graph. Parameters @@ -1049,7 +1044,7 @@ def all_pairs_dijkstra_path_length(G, cutoff=None, weight='weight'): yield (n, length(G, n, cutoff=cutoff, weight=weight)) -def all_pairs_dijkstra_path(G, cutoff=None, weight='weight'): +def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"): """Compute shortest paths between all nodes in a weighted graph. Parameters @@ -1100,9 +1095,9 @@ def all_pairs_dijkstra_path(G, cutoff=None, weight='weight'): yield (n, path(G, n, cutoff=cutoff, weight=weight)) -def bellman_ford_predecessor_and_distance(G, source, target=None, - weight='weight', - heuristic=False): +def bellman_ford_predecessor_and_distance( + G, source, target=None, weight="weight", heuristic=False +): """Compute shortest path lengths and predecessors on shortest paths in weighted graphs. @@ -1205,14 +1200,15 @@ def bellman_ford_predecessor_and_distance(G, source, target=None, weight = _weight_function(G, weight) - dist = _bellman_ford(G, [source], weight, pred=pred, dist=dist, - target=target, - heuristic=heuristic) + dist = _bellman_ford( + G, [source], weight, pred=pred, dist=dist, target=target, heuristic=heuristic + ) return (pred, dist) -def _bellman_ford(G, source, weight, pred=None, paths=None, dist=None, - target=None, heuristic=True): +def _bellman_ford( + G, source, weight, pred=None, paths=None, dist=None, target=None, heuristic=True +): """Relaxation loop for Bellman–Ford algorithm. This is an implementation of the SPFA variant. @@ -1285,7 +1281,7 @@ def _bellman_ford(G, source, weight, pred=None, paths=None, dist=None, recent_update = {v: nonexistent_edge for v in source} G_succ = G.succ if G.is_directed() else G.adj - inf = float('inf') + inf = float("inf") n = len(G) count = {} @@ -1325,8 +1321,7 @@ def _bellman_ford(G, source, weight, pred=None, paths=None, dist=None, in_q.add(v) count_v = count.get(v, 0) + 1 if count_v == n: - raise nx.NetworkXUnbounded( - "Negative cost cycle detected.") + raise nx.NetworkXUnbounded("Negative cost cycle detected.") count[v] = count_v dist[v] = dist_v pred[v] = [u] @@ -1345,7 +1340,7 @@ def _bellman_ford(G, source, weight, pred=None, paths=None, dist=None, return dist -def bellman_ford_path(G, source, target, weight='weight'): +def bellman_ford_path(G, source, target, weight="weight"): """Returns the shortest path from source to target in a weighted graph G. Parameters @@ -1389,12 +1384,11 @@ def bellman_ford_path(G, source, target, weight='weight'): -------- dijkstra_path(), bellman_ford_path_length() """ - length, path = single_source_bellman_ford(G, source, - target=target, weight=weight) + length, path = single_source_bellman_ford(G, source, target=target, weight=weight) return path -def bellman_ford_path_length(G, source, target, weight='weight'): +def bellman_ford_path_length(G, source, target, weight="weight"): """Returns the shortest path length from source to target in a weighted graph. @@ -1452,7 +1446,7 @@ def bellman_ford_path_length(G, source, target, weight='weight'): raise nx.NetworkXNoPath(f"node {target} not reachable from {source}") from e -def single_source_bellman_ford_path(G, source, weight='weight'): +def single_source_bellman_ford_path(G, source, weight="weight"): """Compute shortest path between source and all other reachable nodes for a weighted graph. @@ -1493,12 +1487,11 @@ def single_source_bellman_ford_path(G, source, weight='weight'): single_source_dijkstra(), single_source_bellman_ford() """ - (length, path) = single_source_bellman_ford( - G, source, weight=weight) + (length, path) = single_source_bellman_ford(G, source, weight=weight) return path -def single_source_bellman_ford_path_length(G, source, weight='weight'): +def single_source_bellman_ford_path_length(G, source, weight="weight"): """Compute the shortest path length between source and all other reachable nodes for a weighted graph. @@ -1550,7 +1543,7 @@ def single_source_bellman_ford_path_length(G, source, weight='weight'): return _bellman_ford(G, [source], weight) -def single_source_bellman_ford(G, source, target=None, weight='weight'): +def single_source_bellman_ford(G, source, target=None, weight="weight"): """Compute shortest paths and lengths in a weighted graph G. Uses Bellman-Ford algorithm for shortest paths. @@ -1628,7 +1621,7 @@ def single_source_bellman_ford(G, source, target=None, weight='weight'): raise nx.NetworkXNoPath(msg) from e -def all_pairs_bellman_ford_path_length(G, weight='weight'): +def all_pairs_bellman_ford_path_length(G, weight="weight"): """ Compute shortest path lengths between all nodes in a weighted graph. Parameters @@ -1672,7 +1665,7 @@ def all_pairs_bellman_ford_path_length(G, weight='weight'): yield (n, dict(length(G, n, weight=weight))) -def all_pairs_bellman_ford_path(G, weight='weight'): +def all_pairs_bellman_ford_path(G, weight="weight"): """ Compute shortest paths between all nodes in a weighted graph. Parameters @@ -1710,7 +1703,7 @@ def all_pairs_bellman_ford_path(G, weight='weight'): yield (n, path(G, n, weight=weight)) -def goldberg_radzik(G, source, weight='weight'): +def goldberg_radzik(G, source, weight="weight"): """Compute shortest path lengths and predecessors on shortest paths in weighted graphs. @@ -1801,7 +1794,7 @@ def goldberg_radzik(G, source, weight='weight'): else: G_succ = G.adj - inf = float('inf') + inf = float("inf") d = {u: inf for u in G} d[source] = 0 pred = {source: None} @@ -1826,8 +1819,7 @@ def goldberg_radzik(G, source, weight='weight'): continue d_u = d[u] # Skip nodes without out-edges of negative reduced costs. - if all(d_u + weight(u, v, e) >= d[v] - for v, e in G_succ[u].items()): + if all(d_u + weight(u, v, e) >= d[v] for v, e in G_succ[u].items()): continue # Nonrecursive DFS that inserts nodes reachable from u via edges of # nonpositive reduced costs into to_scan in (reverse) topological @@ -1854,14 +1846,12 @@ def goldberg_radzik(G, source, weight='weight'): neg_count[v] = neg_count[u] + int(is_neg) stack.append((v, iter(G_succ[v].items()))) in_stack.add(v) - elif (v in in_stack and - neg_count[u] + int(is_neg) > neg_count[v]): + elif v in in_stack and neg_count[u] + int(is_neg) > neg_count[v]: # (u, v) is a back edge, and the cycle formed by the # path v to u and (u, v) contains at least one edge of # negative reduced cost. The cycle must be of negative # cost. - raise nx.NetworkXUnbounded( - 'Negative cost cycle detected.') + raise nx.NetworkXUnbounded("Negative cost cycle detected.") to_scan.reverse() return to_scan @@ -1893,7 +1883,7 @@ def goldberg_radzik(G, source, weight='weight'): return pred, d -def negative_edge_cycle(G, weight='weight', heuristic=True): +def negative_edge_cycle(G, weight="weight", heuristic=True): """Returns True if there exists a negative edge cycle anywhere in G. Parameters @@ -1955,7 +1945,7 @@ def negative_edge_cycle(G, weight='weight', heuristic=True): return False -def bidirectional_dijkstra(G, source, target, weight='weight'): +def bidirectional_dijkstra(G, source, target, weight="weight"): r"""Dijkstra's algorithm for shortest paths using bidirectional search. Parameters @@ -2039,7 +2029,7 @@ def bidirectional_dijkstra(G, source, target, weight='weight'): push = heappush pop = heappop # Init: [Forward, Backward] - dists = [{}, {}] # dictionary of final distances + dists = [{}, {}] # dictionary of final distances paths = [{source: [source]}, {target: [target]}] # dictionary of paths fringe = [[], []] # heap of (distance, node) for choosing node to expand seen = [{source: 0}, {target: 0}] # dict of distances to seen nodes @@ -2073,14 +2063,13 @@ def bidirectional_dijkstra(G, source, target, weight='weight'): return (finaldist, finalpath) for w, d in neighs[dir][v].items(): - if(dir == 0): # forward + if dir == 0: # forward vwLength = dists[dir][v] + weight(v, w, d) else: # back, must remember to change v,w->w,v vwLength = dists[dir][v] + weight(w, v, d) if w in dists[dir]: if vwLength < dists[dir][w]: - raise ValueError( - "Contradictory paths found: negative weights?") + raise ValueError("Contradictory paths found: negative weights?") elif w not in seen[dir] or vwLength < seen[dir][w]: # relaxing seen[dir][w] = vwLength @@ -2098,7 +2087,7 @@ def bidirectional_dijkstra(G, source, target, weight='weight'): raise nx.NetworkXNoPath(f"No path between {source} and {target}.") -def johnson(G, weight='weight'): +def johnson(G, weight="weight"): r"""Uses Johnson's Algorithm to compute shortest paths. Johnson's Algorithm finds a shortest path between each pair of @@ -2166,7 +2155,7 @@ def johnson(G, weight='weight'): """ if not nx.is_weighted(G, weight=weight): - raise nx.NetworkXError('Graph is not weighted.') + raise nx.NetworkXError("Graph is not weighted.") dist = {v: 0 for v in G} pred = {v: [] for v in G} diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py index e9c1e7e8..597ccb4d 100644 --- a/networkx/algorithms/similarity.py +++ b/networkx/algorithms/similarity.py @@ -43,7 +43,7 @@ def graph_edit_distance( edge_ins_cost=None, roots=None, upper_bound=None, - timeout=None + timeout=None, ): """Returns GED (graph edit distance) between graphs G1 and G2. @@ -194,7 +194,7 @@ def graph_edit_distance( upper_bound, True, roots, - timeout + timeout, ): # assert bestcost is None or cost < bestcost bestcost = cost @@ -527,7 +527,7 @@ def optimize_edit_paths( upper_bound=None, strictly_decreasing=True, roots=None, - timeout=None + timeout=None, ): """GED (graph edit distance) calculation: advanced interface. @@ -1042,7 +1042,7 @@ def optimize_edit_paths( initial_cost = 0 if roots: root_u, root_v = roots - if (root_u not in pending_u or root_v not in pending_v): + if root_u not in pending_u or root_v not in pending_v: raise nx.NodeNotFound("Root node not in graph.") # remove roots from pending diff --git a/networkx/algorithms/simple_paths.py b/networkx/algorithms/simple_paths.py index f524cf09..f8cbe7ee 100644 --- a/networkx/algorithms/simple_paths.py +++ b/networkx/algorithms/simple_paths.py @@ -9,10 +9,10 @@ from networkx.utils import empty_generator from networkx.algorithms.shortest_paths.weighted import _weight_function __all__ = [ - 'all_simple_paths', - 'is_simple_path', - 'shortest_simple_paths', - 'all_simple_edge_paths' + "all_simple_paths", + "is_simple_path", + "shortest_simple_paths", + "all_simple_edge_paths", ] @@ -78,8 +78,7 @@ def is_simple_path(G, nodes): return nodes[0] in G # Test that no node appears more than once, and that each # adjacent pair of nodes is adjacent. - return (len(set(nodes)) == len(nodes) and - all(v in G[u] for u, v in pairwise(nodes))) + return len(set(nodes)) == len(nodes) and all(v in G[u] for u, v in pairwise(nodes)) def all_simple_paths(G, source, target, cutoff=None): @@ -365,14 +364,14 @@ def all_simple_edge_paths(G, source, target, cutoff=None): """ if source not in G: - raise nx.NodeNotFound('source node %s not in graph' % source) + raise nx.NodeNotFound("source node %s not in graph" % source) if target in G: targets = {target} else: try: targets = set(target) except TypeError: - raise nx.NodeNotFound('target node %s not in graph' % target) + raise nx.NodeNotFound("target node %s not in graph" % target) if source in targets: return [] if cutoff is None: @@ -380,8 +379,7 @@ def all_simple_edge_paths(G, source, target, cutoff=None): if cutoff < 1: return [] if G.is_multigraph(): - for simp_path in _all_simple_edge_paths_multigraph(G, source, targets, - cutoff): + for simp_path in _all_simple_edge_paths_multigraph(G, source, targets, cutoff): yield simp_path else: for simp_path in _all_simple_paths_graph(G, source, targets, cutoff): @@ -414,7 +412,7 @@ def _all_simple_edge_paths_multigraph(G, source, targets, cutoff): visited.pop() -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def shortest_simple_paths(G, source, target, weight=None): """Generate all simple paths in the graph G from source to target, starting from shortest ones. @@ -511,8 +509,12 @@ def shortest_simple_paths(G, source, target, weight=None): shortest_path_func = _bidirectional_shortest_path else: wt = _weight_function(G, weight) + def length_func(path): - return sum(wt(u, v, G.get_edge_data(u, v)) for (u, v) in zip(path, path[1:])) + return sum( + wt(u, v, G.get_edge_data(u, v)) for (u, v) in zip(path, path[1:]) + ) + shortest_path_func = _bidirectional_dijkstra listA = list() @@ -532,10 +534,14 @@ def shortest_simple_paths(G, source, target, weight=None): if path[:i] == root: ignore_edges.add((path[i - 1], path[i])) try: - length, spur = shortest_path_func(G, root[-1], target, - ignore_nodes=ignore_nodes, - ignore_edges=ignore_edges, - weight=weight) + length, spur = shortest_path_func( + G, + root[-1], + target, + ignore_nodes=ignore_nodes, + ignore_edges=ignore_edges, + weight=weight, + ) path = root[:-1] + spur listB.push(root_length + length, path) except nx.NetworkXNoPath: @@ -552,7 +558,6 @@ def shortest_simple_paths(G, source, target, weight=None): class PathBuffer: - def __init__(self): self.paths = set() self.sortedpaths = list() @@ -574,10 +579,9 @@ class PathBuffer: return path -def _bidirectional_shortest_path(G, source, target, - ignore_nodes=None, - ignore_edges=None, - weight=None): +def _bidirectional_shortest_path( + G, source, target, ignore_nodes=None, ignore_edges=None, weight=None +): """Returns the shortest path between source and target ignoring nodes and edges in the containers ignore_nodes and ignore_edges. @@ -660,11 +664,13 @@ def _bidirectional_pred_succ(G, source, target, ignore_nodes=None, ignore_edges= # support optional nodes filter if ignore_nodes: + def filter_iter(nodes): def iterate(v): for w in nodes(v): if w not in ignore_nodes: yield w + return iterate Gpred = filter_iter(Gpred) @@ -673,11 +679,13 @@ def _bidirectional_pred_succ(G, source, target, ignore_nodes=None, ignore_edges= # support optional edges filter if ignore_edges: if G.is_directed(): + def filter_pred_iter(pred_iter): def iterate(v): for w in pred_iter(v): if (w, v) not in ignore_edges: yield w + return iterate def filter_succ_iter(succ_iter): @@ -685,18 +693,20 @@ def _bidirectional_pred_succ(G, source, target, ignore_nodes=None, ignore_edges= for w in succ_iter(v): if (v, w) not in ignore_edges: yield w + return iterate Gpred = filter_pred_iter(Gpred) Gsucc = filter_succ_iter(Gsucc) else: + def filter_iter(nodes): def iterate(v): for w in nodes(v): - if (v, w) not in ignore_edges \ - and (w, v) not in ignore_edges: + if (v, w) not in ignore_edges and (w, v) not in ignore_edges: yield w + return iterate Gpred = filter_iter(Gpred) @@ -737,8 +747,9 @@ def _bidirectional_pred_succ(G, source, target, ignore_nodes=None, ignore_edges= raise nx.NetworkXNoPath(f"No path between {source} and {target}.") -def _bidirectional_dijkstra(G, source, target, weight='weight', - ignore_nodes=None, ignore_edges=None): +def _bidirectional_dijkstra( + G, source, target, weight="weight", ignore_nodes=None, ignore_edges=None +): """Dijkstra's algorithm for shortest paths using bidirectional search. This function returns the shortest path between source and target @@ -820,11 +831,13 @@ def _bidirectional_dijkstra(G, source, target, weight='weight', # support optional nodes filter if ignore_nodes: + def filter_iter(nodes): def iterate(v): for w in nodes(v): if w not in ignore_nodes: yield w + return iterate Gpred = filter_iter(Gpred) @@ -833,11 +846,13 @@ def _bidirectional_dijkstra(G, source, target, weight='weight', # support optional edges filter if ignore_edges: if G.is_directed(): + def filter_pred_iter(pred_iter): def iterate(v): for w in pred_iter(v): if (w, v) not in ignore_edges: yield w + return iterate def filter_succ_iter(succ_iter): @@ -845,18 +860,20 @@ def _bidirectional_dijkstra(G, source, target, weight='weight', for w in succ_iter(v): if (v, w) not in ignore_edges: yield w + return iterate Gpred = filter_pred_iter(Gpred) Gsucc = filter_succ_iter(Gsucc) else: + def filter_iter(nodes): def iterate(v): for w in nodes(v): - if (v, w) not in ignore_edges \ - and (w, v) not in ignore_edges: + if (v, w) not in ignore_edges and (w, v) not in ignore_edges: yield w + return iterate Gpred = filter_iter(Gpred) @@ -865,11 +882,11 @@ def _bidirectional_dijkstra(G, source, target, weight='weight', push = heappush pop = heappop # Init: Forward Backward - dists = [{}, {}] # dictionary of final distances + dists = [{}, {}] # dictionary of final distances paths = [{source: [source]}, {target: [target]}] # dictionary of paths - fringe = [[], []] # heap of (distance, node) tuples for + fringe = [[], []] # heap of (distance, node) tuples for # extracting next node to expand - seen = [{source: 0}, {target: 0}] # dictionary of distances to + seen = [{source: 0}, {target: 0}] # dictionary of distances to # nodes seen c = count() # initialize fringe heap @@ -899,7 +916,7 @@ def _bidirectional_dijkstra(G, source, target, weight='weight', wt = _weight_function(G, weight) for w in neighs[dir](v): - if(dir == 0): # forward + if dir == 0: # forward minweight = wt(v, w, G.get_edge_data(v, w)) vwLength = dists[dir][v] + minweight else: # back, must remember to change v,w->w,v @@ -908,8 +925,7 @@ def _bidirectional_dijkstra(G, source, target, weight='weight', if w in dists[dir]: if vwLength < dists[dir][w]: - raise ValueError( - "Contradictory paths found: negative weights?") + raise ValueError("Contradictory paths found: negative weights?") elif w not in seen[dir] or vwLength < seen[dir][w]: # relaxing seen[dir][w] = vwLength diff --git a/networkx/algorithms/smallworld.py b/networkx/algorithms/smallworld.py index d9f02d9a..eb164177 100644 --- a/networkx/algorithms/smallworld.py +++ b/networkx/algorithms/smallworld.py @@ -18,12 +18,12 @@ import networkx as nx from networkx.utils import not_implemented_for from networkx.utils import py_random_state -__all__ = ['random_reference', 'lattice_reference', 'sigma', 'omega'] +__all__ = ["random_reference", "lattice_reference", "sigma", "omega"] @py_random_state(3) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def random_reference(G, niter=1, connectivity=True, seed=None): """Compute a random graph by swapping edges of a given graph. @@ -65,6 +65,7 @@ def random_reference(G, niter=1, connectivity=True, seed=None): raise nx.NetworkXError("Graph has less than four nodes.") from networkx.utils import cumulative_distribution, discrete_sequence + local_conn = nx.connectivity.local_edge_connectivity G = G.copy() @@ -72,8 +73,8 @@ def random_reference(G, niter=1, connectivity=True, seed=None): cdf = cumulative_distribution(degrees) # cdf of degree nnodes = len(G) nedges = nx.number_of_edges(G) - niter = niter*nedges - ntries = int(nnodes*nedges/(nnodes*(nnodes-1)/2)) + niter = niter * nedges + ntries = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2)) swapcount = 0 for i in range(niter): @@ -116,8 +117,8 @@ def random_reference(G, niter=1, connectivity=True, seed=None): @py_random_state(4) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def lattice_reference(G, niter=1, D=None, connectivity=True, seed=None): """Latticize the given graph by swapping edges. @@ -160,6 +161,7 @@ def lattice_reference(G, niter=1, D=None, connectivity=True, seed=None): """ import numpy as np from networkx.utils import cumulative_distribution, discrete_sequence + local_conn = nx.connectivity.local_edge_connectivity if G.is_directed(): @@ -183,10 +185,10 @@ def lattice_reference(G, niter=1, D=None, connectivity=True, seed=None): u = np.append((0,), np.where(un < um, un, um)) for v in range(int(np.ceil(nnodes / 2))): - D[nnodes - v - 1, :] = np.append(u[v + 1:], u[:v + 1]) + D[nnodes - v - 1, :] = np.append(u[v + 1 :], u[: v + 1]) D[v, :] = D[nnodes - v - 1, :][::-1] - niter = niter*nedges + niter = niter * nedges ntries = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2)) swapcount = 0 @@ -234,8 +236,8 @@ def lattice_reference(G, niter=1, D=None, connectivity=True, seed=None): @py_random_state(3) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def sigma(G, niter=100, nrand=10, seed=None): """Returns the small-world coefficient (sigma) of the given graph. @@ -302,8 +304,8 @@ def sigma(G, niter=100, nrand=10, seed=None): @py_random_state(3) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def omega(G, niter=100, nrand=10, seed=None): """Returns the small-world coefficient (omega) of a graph diff --git a/networkx/algorithms/smetric.py b/networkx/algorithms/smetric.py index ba6e08bb..5ea73036 100644 --- a/networkx/algorithms/smetric.py +++ b/networkx/algorithms/smetric.py @@ -30,7 +30,7 @@ def s_metric(G, normalized=True): """ if normalized: raise nx.NetworkXError("Normalization not implemented") -# Gmax = li_smax_graph(list(G.degree().values())) -# return s_metric(G,normalized=False)/s_metric(Gmax,normalized=False) -# else: + # Gmax = li_smax_graph(list(G.degree().values())) + # return s_metric(G,normalized=False)/s_metric(Gmax,normalized=False) + # else: return float(sum([G.degree(u) * G.degree(v) for (u, v) in G.edges()])) diff --git a/networkx/algorithms/sparsifiers.py b/networkx/algorithms/sparsifiers.py index d23ebc66..75a5508c 100644 --- a/networkx/algorithms/sparsifiers.py +++ b/networkx/algorithms/sparsifiers.py @@ -3,12 +3,12 @@ import math import networkx as nx from networkx.utils import not_implemented_for, py_random_state -__all__ = ['spanner'] +__all__ = ["spanner"] @py_random_state(3) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def spanner(G, stretch, weight=None, seed=None): """Returns a spanner of the given graph with the given stretch. @@ -62,7 +62,7 @@ def spanner(G, stretch, weight=None, seed=None): Random Struct. Algorithms 30(4): 532-563 (2007). """ if stretch < 1: - raise ValueError('stretch must be at least 1') + raise ValueError("stretch must be at least 1") k = (stretch + 1) // 2 @@ -77,7 +77,7 @@ def spanner(G, stretch, weight=None, seed=None): # clustering is a dictionary that maps nodes in a cluster to the # cluster center clustering = {v: v for v in G.nodes} - sample_prob = math.pow(G.number_of_nodes(), - 1 / k) + sample_prob = math.pow(G.number_of_nodes(), -1 / k) size_limit = 2 * math.pow(G.number_of_nodes(), 1 + 1 / k) i = 0 @@ -98,10 +98,12 @@ def spanner(G, stretch, weight=None, seed=None): # step 2: find neighboring (sampled) clusters and # lightest edges to them - lightest_edge_neighbor, lightest_edge_weight =\ - _lightest_edge_dicts(residual_graph, clustering, v) - neighboring_sampled_centers =\ + lightest_edge_neighbor, lightest_edge_weight = _lightest_edge_dicts( + residual_graph, clustering, v + ) + neighboring_sampled_centers = ( set(lightest_edge_weight.keys()) & sampled_centers + ) # step 3: add edges to spanner if not neighboring_sampled_centers: @@ -113,11 +115,11 @@ def spanner(G, stretch, weight=None, seed=None): edges_to_remove.add((v, neighbor)) else: # there is a neighboring sampled center - closest_center = min(neighboring_sampled_centers, - key=lightest_edge_weight.get) + closest_center = min( + neighboring_sampled_centers, key=lightest_edge_weight.get + ) closest_center_weight = lightest_edge_weight[closest_center] - closest_center_neighbor =\ - lightest_edge_neighbor[closest_center] + closest_center_neighbor = lightest_edge_neighbor[closest_center] edges_to_add.add((v, closest_center_neighbor)) new_clustering[v] = closest_center @@ -134,7 +136,10 @@ def spanner(G, stretch, weight=None, seed=None): for neighbor in residual_graph.adj[v]: neighbor_cluster = clustering[neighbor] neighbor_weight = lightest_edge_weight[neighbor_cluster] - if neighbor_cluster == closest_center or neighbor_weight < closest_center_weight: + if ( + neighbor_cluster == closest_center + or neighbor_weight < closest_center_weight + ): edges_to_remove.add((v, neighbor)) # check whether iteration added too many edges to spanner, @@ -172,8 +177,7 @@ def spanner(G, stretch, weight=None, seed=None): # phase 2: vertex-cluster joining for v in residual_graph.nodes: - lightest_edge_neighbor, _ =\ - _lightest_edge_dicts(residual_graph, clustering, v) + lightest_edge_neighbor, _ = _lightest_edge_dicts(residual_graph, clustering, v) for neighbor in lightest_edge_neighbor.values(): _add_edge_to_spanner(H, residual_graph, v, neighbor, weight) @@ -209,9 +213,9 @@ def _setup_residual_graph(G, weight): # establish unique edge weights, even for unweighted graphs for u, v in G.edges(): if not weight: - residual_graph[u][v]['weight'] = (id(u), id(v)) + residual_graph[u][v]["weight"] = (id(u), id(v)) else: - residual_graph[u][v]['weight'] = (G[u][v][weight], id(u), id(v)) + residual_graph[u][v]["weight"] = (G[u][v][weight], id(u), id(v)) return residual_graph @@ -252,9 +256,11 @@ def _lightest_edge_dicts(residual_graph, clustering, node): lightest_edge_weight = {} for neighbor in residual_graph.adj[node]: neighbor_center = clustering[neighbor] - weight = residual_graph[node][neighbor]['weight'] - if neighbor_center not in lightest_edge_weight or\ - weight < lightest_edge_weight[neighbor_center]: + weight = residual_graph[node][neighbor]["weight"] + if ( + neighbor_center not in lightest_edge_weight + or weight < lightest_edge_weight[neighbor_center] + ): lightest_edge_neighbor[neighbor_center] = neighbor lightest_edge_weight[neighbor_center] = weight return lightest_edge_neighbor, lightest_edge_weight @@ -284,4 +290,4 @@ def _add_edge_to_spanner(H, residual_graph, u, v, weight): """ H.add_edge(u, v) if weight: - H[u][v][weight] = residual_graph[u][v]['weight'][0] + H[u][v][weight] = residual_graph[u][v]["weight"][0] diff --git a/networkx/algorithms/structuralholes.py b/networkx/algorithms/structuralholes.py index 3938faa2..fd4e200e 100644 --- a/networkx/algorithms/structuralholes.py +++ b/networkx/algorithms/structuralholes.py @@ -2,7 +2,7 @@ import networkx as nx -__all__ = ['constraint', 'local_constraint', 'effective_size'] +__all__ = ["constraint", "local_constraint", "effective_size"] def mutual_weight(G, u, v, weight=None): @@ -43,8 +43,7 @@ def normalized_mutual_weight(G, u, v, norm=sum, weight=None): attribute used as weight. """ - scale = norm(mutual_weight(G, u, w, weight) - for w in set(nx.all_neighbors(G, u))) + scale = norm(mutual_weight(G, u, w, weight) for w in set(nx.all_neighbors(G, u))) return 0 if scale == 0 else mutual_weight(G, u, v, weight) / scale @@ -127,11 +126,15 @@ def effective_size(G, nodes=None, weight=None): http://www.analytictech.com/connections/v20(1)/holes.htm """ + def redundancy(G, u, v, weight=None): nmw = normalized_mutual_weight - r = sum(nmw(G, u, w, weight=weight) * nmw(G, v, w, norm=max, weight=weight) - for w in set(nx.all_neighbors(G, u))) + r = sum( + nmw(G, u, w, weight=weight) * nmw(G, v, w, norm=max, weight=weight) + for w in set(nx.all_neighbors(G, u)) + ) return 1 - r + effective_size = {} if nodes is None: nodes = G @@ -140,7 +143,7 @@ def effective_size(G, nodes=None, weight=None): for v in nodes: # Effective size is not defined for isolated nodes if len(G[v]) == 0: - effective_size[v] = float('nan') + effective_size[v] = float("nan") continue E = nx.ego_graph(G, v, center=False, undirected=True) effective_size[v] = len(E) - (2 * E.size()) / len(E) @@ -148,10 +151,11 @@ def effective_size(G, nodes=None, weight=None): for v in nodes: # Effective size is not defined for isolated nodes if len(G[v]) == 0: - effective_size[v] = float('nan') + effective_size[v] = float("nan") continue - effective_size[v] = sum(redundancy(G, v, u, weight) - for u in set(nx.all_neighbors(G, v))) + effective_size[v] = sum( + redundancy(G, v, u, weight) for u in set(nx.all_neighbors(G, v)) + ) return effective_size @@ -207,10 +211,11 @@ def constraint(G, nodes=None, weight=None): for v in nodes: # Constraint is not defined for isolated nodes if len(G[v]) == 0: - constraint[v] = float('nan') + constraint[v] = float("nan") continue - constraint[v] = sum(local_constraint(G, v, n, weight) - for n in set(nx.all_neighbors(G, v))) + constraint[v] = sum( + local_constraint(G, v, n, weight) for n in set(nx.all_neighbors(G, v)) + ) return constraint @@ -266,6 +271,8 @@ def local_constraint(G, u, v, weight=None): """ nmw = normalized_mutual_weight direct = nmw(G, u, v, weight=weight) - indirect = sum(nmw(G, u, w, weight=weight) * nmw(G, w, v, weight=weight) - for w in set(nx.all_neighbors(G, u))) + indirect = sum( + nmw(G, u, w, weight=weight) * nmw(G, w, v, weight=weight) + for w in set(nx.all_neighbors(G, u)) + ) return (direct + indirect) ** 2 diff --git a/networkx/algorithms/swap.py b/networkx/algorithms/swap.py index 488a314c..8a06a5f3 100644 --- a/networkx/algorithms/swap.py +++ b/networkx/algorithms/swap.py @@ -6,8 +6,7 @@ from networkx.utils import py_random_state import networkx as nx -__all__ = ['double_edge_swap', - 'connected_double_edge_swap'] +__all__ = ["double_edge_swap", "connected_double_edge_swap"] @py_random_state(3) @@ -51,8 +50,7 @@ def double_edge_swap(G, nswap=1, max_tries=100, seed=None): The graph G is modified in place. """ if G.is_directed(): - raise nx.NetworkXError( - "double_edge_swap() not defined for directed graphs.") + raise nx.NetworkXError("double_edge_swap() not defined for directed graphs.") if nswap > max_tries: raise nx.NetworkXError("Number of swaps > number of tries allowed.") if len(G) < 4: @@ -86,8 +84,10 @@ def double_edge_swap(G, nswap=1, max_tries=100, seed=None): G.remove_edge(x, y) swapcount += 1 if n >= max_tries: - e = (f'Maximum number of swap attempts ({n}) exceeded ' - f'before desired swaps achieved ({nswap}).') + e = ( + f"Maximum number of swap attempts ({n}) exceeded " + f"before desired swaps achieved ({nswap})." + ) raise nx.NetworkXAlgorithmError(e) n += 1 return G diff --git a/networkx/algorithms/tests/test_boundary.py b/networkx/algorithms/tests/test_boundary.py index 7ecb5c5e..46dd0c52 100644 --- a/networkx/algorithms/tests/test_boundary.py +++ b/networkx/algorithms/tests/test_boundary.py @@ -48,8 +48,7 @@ class TestNodeBoundary: """ def cheeger(G, k): - return min(len(nx.node_boundary(G, nn)) / k - for nn in combinations(G, k)) + return min(len(nx.node_boundary(G, nn)) / k for nn in combinations(G, k)) P = nx.petersen_graph() assert almost_equal(cheeger(P, 1), 3.00, places=2) @@ -100,30 +99,32 @@ class TestEdgeBoundary: assert list(nx.edge_boundary(P10, [])) == [] assert list(nx.edge_boundary(P10, [], [])) == [] assert list(nx.edge_boundary(P10, [1, 2, 3])) == [(3, 4)] - assert (sorted(nx.edge_boundary(P10, [4, 5, 6])) == - [(4, 3), (6, 7)]) - assert (sorted(nx.edge_boundary(P10, [3, 4, 5, 6, 7])) == - [(3, 2), (7, 8)]) + assert sorted(nx.edge_boundary(P10, [4, 5, 6])) == [(4, 3), (6, 7)] + assert sorted(nx.edge_boundary(P10, [3, 4, 5, 6, 7])) == [(3, 2), (7, 8)] assert list(nx.edge_boundary(P10, [8, 9, 10])) == [(8, 7)] assert sorted(nx.edge_boundary(P10, [4, 5, 6], [9, 10])) == [] - assert (list(nx.edge_boundary(P10, [1, 2, 3], [3, 4, 5])) == - [(2, 3), (3, 4)]) + assert list(nx.edge_boundary(P10, [1, 2, 3], [3, 4, 5])) == [(2, 3), (3, 4)] def test_complete_graph(self): K10 = cnlti(nx.complete_graph(10), first_label=1) - def ilen(iterable): return sum(1 for i in iterable) + def ilen(iterable): + return sum(1 for i in iterable) + assert list(nx.edge_boundary(K10, [])) == [] assert list(nx.edge_boundary(K10, [], [])) == [] assert ilen(nx.edge_boundary(K10, [1, 2, 3])) == 21 assert ilen(nx.edge_boundary(K10, [4, 5, 6, 7])) == 24 assert ilen(nx.edge_boundary(K10, [3, 4, 5, 6, 7])) == 25 assert ilen(nx.edge_boundary(K10, [8, 9, 10])) == 21 - assert_edges_equal(nx.edge_boundary(K10, [4, 5, 6], [9, 10]), - [(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)]) - assert_edges_equal(nx.edge_boundary(K10, [1, 2, 3], [3, 4, 5]), - [(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), - (2, 5), (3, 4), (3, 5)]) + assert_edges_equal( + nx.edge_boundary(K10, [4, 5, 6], [9, 10]), + [(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)], + ) + assert_edges_equal( + nx.edge_boundary(K10, [1, 2, 3], [3, 4, 5]), + [(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5)], + ) def test_directed(self): """Tests the edge boundary of a directed graph.""" diff --git a/networkx/algorithms/tests/test_bridges.py b/networkx/algorithms/tests/test_bridges.py index 21422133..8f6b0a87 100644 --- a/networkx/algorithms/tests/test_bridges.py +++ b/networkx/algorithms/tests/test_bridges.py @@ -9,10 +9,21 @@ class TestBridges: def test_single_bridge(self): edges = [ # DFS tree edges. - (1, 2), (2, 3), (3, 4), (3, 5), (5, 6), (6, 7), (7, 8), (5, 9), + (1, 2), + (2, 3), + (3, 4), + (3, 5), + (5, 6), + (6, 7), + (7, 8), + (5, 9), (9, 10), # Nontree edges. - (1, 3), (1, 4), (2, 5), (5, 10), (6, 8) + (1, 3), + (1, 4), + (2, 5), + (5, 10), + (6, 8), ] G = nx.Graph(edges) source = 1 @@ -43,7 +54,7 @@ class TestLocalBridges: assert list(nx.local_bridges(self.tri, with_span=False)) == [] def test_no_weight(self): - inf = float('inf') + inf = float("inf") expected = {(3, 4, inf), (4, 3, inf)} assert next(nx.local_bridges(self.BB)) in expected expected = {(u, v, 3) for u, v, in self.square.edges} @@ -51,12 +62,12 @@ class TestLocalBridges: assert list(nx.local_bridges(self.tri)) == [] def test_weight(self): - inf = float('inf') + inf = float("inf") G = self.square.copy() - G.edges[1, 2]['weight'] = 2 - expected = {(u, v, 5 - wt) for u, v, wt in G.edges(data='weight', default=1)} - assert set(nx.local_bridges(G, weight='weight')) == expected + G.edges[1, 2]["weight"] = 2 + expected = {(u, v, 5 - wt) for u, v, wt in G.edges(data="weight", default=1)} + assert set(nx.local_bridges(G, weight="weight")) == expected expected = {(u, v, 6) for u, v in G.edges} lb = nx.local_bridges(G, weight=lambda u, v, d: 2) diff --git a/networkx/algorithms/tests/test_chains.py b/networkx/algorithms/tests/test_chains.py index 27381b38..1ee60ceb 100644 --- a/networkx/algorithms/tests/test_chains.py +++ b/networkx/algorithms/tests/test_chains.py @@ -51,15 +51,26 @@ class TestChainDecomposition: if cyclic_equals(reversed_chain, candidate): break else: - self.fail('chain not found') + self.fail("chain not found") def test_decomposition(self): edges = [ # DFS tree edges. - (1, 2), (2, 3), (3, 4), (3, 5), (5, 6), (6, 7), (7, 8), (5, 9), + (1, 2), + (2, 3), + (3, 4), + (3, 5), + (5, 6), + (6, 7), + (7, 8), + (5, 9), (9, 10), # Nontree edges. - (1, 3), (1, 4), (2, 5), (5, 10), (6, 8) + (1, 3), + (1, 4), + (2, 5), + (5, 10), + (6, 8), ] G = nx.Graph(edges) expected = [ @@ -71,10 +82,11 @@ class TestChainDecomposition: ] chains = list(nx.chain_decomposition(G, root=1)) assert len(chains) == len(expected) -# This chain decomposition isn't unique -# for chain in chains: -# print(chain) -# self.assertContainsChain(chain, expected) + + # This chain decomposition isn't unique + # for chain in chains: + # print(chain) + # self.assertContainsChain(chain, expected) def test_barbell_graph(self): # The (3, 0) barbell graph has two triangles joined by a single edge. @@ -92,15 +104,15 @@ class TestChainDecomposition: """Test for a graph with multiple connected components.""" G = nx.barbell_graph(3, 0) H = nx.barbell_graph(3, 0) - mapping = dict(zip(range(6), 'abcdef')) + mapping = dict(zip(range(6), "abcdef")) nx.relabel_nodes(H, mapping, copy=False) G = nx.union(G, H) chains = list(nx.chain_decomposition(G)) expected = [ [(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)], - [('a', 'b'), ('b', 'c'), ('c', 'a')], - [('d', 'e'), ('e', 'f'), ('f', 'd')], + [("a", "b"), ("b", "c"), ("c", "a")], + [("d", "e"), ("e", "f"), ("f", "d")], ] assert len(chains) == len(expected) for chain in chains: @@ -110,13 +122,13 @@ class TestChainDecomposition: """Test for a single component of a disconnected graph.""" G = nx.barbell_graph(3, 0) H = nx.barbell_graph(3, 0) - mapping = dict(zip(range(6), 'abcdef')) + mapping = dict(zip(range(6), "abcdef")) nx.relabel_nodes(H, mapping, copy=False) G = nx.union(G, H) - chains = list(nx.chain_decomposition(G, root='a')) + chains = list(nx.chain_decomposition(G, root="a")) expected = [ - [('a', 'b'), ('b', 'c'), ('c', 'a')], - [('d', 'e'), ('e', 'f'), ('f', 'd')], + [("a", "b"), ("b", "c"), ("c", "a")], + [("d", "e"), ("e", "f"), ("f", "d")], ] assert len(chains) == len(expected) for chain in chains: diff --git a/networkx/algorithms/tests/test_chordal.py b/networkx/algorithms/tests/test_chordal.py index 36867044..5ca2caee 100644 --- a/networkx/algorithms/tests/test_chordal.py +++ b/networkx/algorithms/tests/test_chordal.py @@ -3,26 +3,47 @@ import networkx as nx class TestMCS: - @classmethod def setup_class(cls): # simple graph connected_chordal_G = nx.Graph() - connected_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 3), (2, 4), - (3, 4), (3, 5), (3, 6), (4, 5), - (4, 6), (5, 6)]) + connected_chordal_G.add_edges_from( + [ + (1, 2), + (1, 3), + (2, 3), + (2, 4), + (3, 4), + (3, 5), + (3, 6), + (4, 5), + (4, 6), + (5, 6), + ] + ) cls.connected_chordal_G = connected_chordal_G chordal_G = nx.Graph() - chordal_G.add_edges_from([(1, 2), (1, 3), (2, 3), (2, 4), (3, 4), - (3, 5), (3, 6), (4, 5), (4, 6), (5, 6), - (7, 8)]) + chordal_G.add_edges_from( + [ + (1, 2), + (1, 3), + (2, 3), + (2, 4), + (3, 4), + (3, 5), + (3, 6), + (4, 5), + (4, 6), + (5, 6), + (7, 8), + ] + ) chordal_G.add_node(9) cls.chordal_G = chordal_G non_chordal_G = nx.Graph() - non_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5), - (3, 4), (3, 5)]) + non_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5), (3, 4), (3, 5)]) cls.non_chordal_G = non_chordal_G def test_is_chordal(self): @@ -37,37 +58,46 @@ class TestMCS: G = nx.generators.classic.path_graph(10) Induced_nodes = nx.find_induced_nodes(G, 1, 9, 2) assert Induced_nodes == {1, 2, 3, 4, 5, 6, 7, 8, 9} - pytest.raises(nx.NetworkXTreewidthBoundExceeded, - nx.find_induced_nodes, G, 1, 9, 1) + pytest.raises( + nx.NetworkXTreewidthBoundExceeded, nx.find_induced_nodes, G, 1, 9, 1 + ) Induced_nodes = nx.find_induced_nodes(self.chordal_G, 1, 6) assert Induced_nodes == {1, 2, 4, 6} - pytest.raises(nx.NetworkXError, - nx.find_induced_nodes, self.non_chordal_G, 1, 5) + pytest.raises(nx.NetworkXError, nx.find_induced_nodes, self.non_chordal_G, 1, 5) def test_chordal_find_cliques(self): - cliques = {frozenset([9]), frozenset([7, 8]), frozenset([1, 2, 3]), - frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])} + cliques = { + frozenset([9]), + frozenset([7, 8]), + frozenset([1, 2, 3]), + frozenset([2, 3, 4]), + frozenset([3, 4, 5, 6]), + } assert nx.chordal_graph_cliques(self.chordal_G) == cliques def test_chordal_find_cliques_path(self): G = nx.path_graph(10) cliqueset = nx.chordal_graph_cliques(G) for (u, v) in G.edges(): - assert (frozenset([u, v]) in cliqueset - or frozenset([v, u]) in cliqueset) + assert frozenset([u, v]) in cliqueset or frozenset([v, u]) in cliqueset def test_chordal_find_cliquesCC(self): - cliques = {frozenset([1, 2, 3]), frozenset([2, 3, 4]), - frozenset([3, 4, 5, 6])} + cliques = {frozenset([1, 2, 3]), frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])} cgc = nx.chordal_graph_cliques assert cgc(self.connected_chordal_G) == cliques def test_complete_to_chordal_graph(self): fgrg = nx.fast_gnp_random_graph - test_graphs = [nx.barbell_graph(6, 2), nx.cycle_graph(15), - nx.wheel_graph(20), nx.grid_graph([10, 4]), - nx.ladder_graph(15), nx.star_graph(5), - nx.bull_graph(), fgrg(20, 0.3, seed=1)] + test_graphs = [ + nx.barbell_graph(6, 2), + nx.cycle_graph(15), + nx.wheel_graph(20), + nx.grid_graph([10, 4]), + nx.ladder_graph(15), + nx.star_graph(5), + nx.bull_graph(), + fgrg(20, 0.3, seed=1), + ] for G in test_graphs: H, a = nx.complete_to_chordal_graph(G) assert nx.is_chordal(H) diff --git a/networkx/algorithms/tests/test_clique.py b/networkx/algorithms/tests/test_clique.py index ed1925a3..ae230ee8 100644 --- a/networkx/algorithms/tests/test_clique.py +++ b/networkx/algorithms/tests/test_clique.py @@ -4,7 +4,6 @@ from networkx import convert_node_labels_to_integers as cnlti class TestCliques: - def setup_method(self): z = [3, 4, 3, 4, 2, 4, 2, 1, 1, 1, 1] self.G = cnlti(nx.generators.havel_hakimi_graph(z), first_label=1) @@ -32,8 +31,7 @@ class TestCliques: def test_find_cliques2(self): hcl = list(nx.find_cliques(self.H)) - assert (sorted(map(sorted, hcl)) == - [[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]]) + assert sorted(map(sorted, hcl)) == [[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]] def test_clique_number(self): G = self.G @@ -58,20 +56,59 @@ class TestCliques: assert list(nx.number_of_cliques(G, [1, 2]).values()) == [1, 2] assert nx.number_of_cliques(G, [1, 2]) == {1: 1, 2: 2} assert nx.number_of_cliques(G, 2) == 2 - assert (nx.number_of_cliques(G) == - {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, - 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) - assert (nx.number_of_cliques(G, nodes=list(G)) == - {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, - 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) - assert (nx.number_of_cliques(G, nodes=[2, 3, 4]) == - {2: 2, 3: 1, 4: 2}) - assert (nx.number_of_cliques(G, cliques=self.cl) == - {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, - 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) - assert (nx.number_of_cliques(G, list(G), cliques=self.cl) == - {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, - 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) + assert nx.number_of_cliques(G) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + assert nx.number_of_cliques(G, nodes=list(G)) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + assert nx.number_of_cliques(G, nodes=[2, 3, 4]) == {2: 2, 3: 1, 4: 2} + assert nx.number_of_cliques(G, cliques=self.cl) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + assert nx.number_of_cliques(G, list(G), cliques=self.cl) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } def test_node_clique_number(self): G = self.G @@ -80,28 +117,45 @@ class TestCliques: assert list(nx.node_clique_number(G, [1, 2]).values()) == [4, 4] assert nx.node_clique_number(G, [1, 2]) == {1: 4, 2: 4} assert nx.node_clique_number(G, 1) == 4 - assert (nx.node_clique_number(G) == - {1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 4, - 7: 3, 8: 2, 9: 2, 10: 2, 11: 2}) - assert (nx.node_clique_number(G, cliques=self.cl) == - {1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 4, - 7: 3, 8: 2, 9: 2, 10: 2, 11: 2}) + assert nx.node_clique_number(G) == { + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 4, + 7: 3, + 8: 2, + 9: 2, + 10: 2, + 11: 2, + } + assert nx.node_clique_number(G, cliques=self.cl) == { + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 4, + 7: 3, + 8: 2, + 9: 2, + 10: 2, + 11: 2, + } def test_cliques_containing_node(self): G = self.G - assert (nx.cliques_containing_node(G, 1) == - [[2, 6, 1, 3]]) - assert (list(nx.cliques_containing_node(G, [1]).values()) == - [[[2, 6, 1, 3]]]) - assert ([sorted(c) for c in list(nx.cliques_containing_node(G, [1, 2]).values())] == - [[[2, 6, 1, 3]], [[2, 6, 1, 3], [2, 6, 4]]]) + assert nx.cliques_containing_node(G, 1) == [[2, 6, 1, 3]] + assert list(nx.cliques_containing_node(G, [1]).values()) == [[[2, 6, 1, 3]]] + assert [ + sorted(c) for c in list(nx.cliques_containing_node(G, [1, 2]).values()) + ] == [[[2, 6, 1, 3]], [[2, 6, 1, 3], [2, 6, 4]]] result = nx.cliques_containing_node(G, [1, 2]) for k, v in result.items(): result[k] = sorted(v) - assert (result == - {1: [[2, 6, 1, 3]], 2: [[2, 6, 1, 3], [2, 6, 4]]}) - assert (nx.cliques_containing_node(G, 1) == - [[2, 6, 1, 3]]) + assert result == {1: [[2, 6, 1, 3]], 2: [[2, 6, 1, 3], [2, 6, 4]]} + assert nx.cliques_containing_node(G, 1) == [[2, 6, 1, 3]] expected = [{2, 6, 1, 3}, {2, 6, 4}] answer = [set(c) for c in nx.cliques_containing_node(G, 2)] assert answer in (expected, list(reversed(expected))) @@ -113,8 +167,7 @@ class TestCliques: def test_make_clique_bipartite(self): G = self.G B = nx.make_clique_bipartite(G) - assert (sorted(B) == - [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) + assert sorted(B) == [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] # Project onto the nodes of the original graph. H = nx.project(B, range(1, 12)) assert H.adj == G.adj @@ -146,69 +199,82 @@ class TestCliques: class TestEnumerateAllCliques: - def test_paper_figure_4(self): # Same graph as given in Fig. 4 of paper enumerate_all_cliques is # based on. # http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=1559964&isnumber=33129 G = nx.Graph() - edges_fig_4 = [('a', 'b'), ('a', 'c'), ('a', 'd'), ('a', 'e'), - ('b', 'c'), ('b', 'd'), ('b', 'e'), - ('c', 'd'), ('c', 'e'), - ('d', 'e'), - ('f', 'b'), ('f', 'c'), ('f', 'g'), - ('g', 'f'), ('g', 'c'), ('g', 'd'), ('g', 'e')] + edges_fig_4 = [ + ("a", "b"), + ("a", "c"), + ("a", "d"), + ("a", "e"), + ("b", "c"), + ("b", "d"), + ("b", "e"), + ("c", "d"), + ("c", "e"), + ("d", "e"), + ("f", "b"), + ("f", "c"), + ("f", "g"), + ("g", "f"), + ("g", "c"), + ("g", "d"), + ("g", "e"), + ] G.add_edges_from(edges_fig_4) cliques = list(nx.enumerate_all_cliques(G)) clique_sizes = list(map(len, cliques)) assert sorted(clique_sizes) == clique_sizes - expected_cliques = [['a'], - ['b'], - ['c'], - ['d'], - ['e'], - ['f'], - ['g'], - ['a', 'b'], - ['a', 'b', 'd'], - ['a', 'b', 'd', 'e'], - ['a', 'b', 'e'], - ['a', 'c'], - ['a', 'c', 'd'], - ['a', 'c', 'd', 'e'], - ['a', 'c', 'e'], - ['a', 'd'], - ['a', 'd', 'e'], - ['a', 'e'], - ['b', 'c'], - ['b', 'c', 'd'], - ['b', 'c', 'd', 'e'], - ['b', 'c', 'e'], - ['b', 'c', 'f'], - ['b', 'd'], - ['b', 'd', 'e'], - ['b', 'e'], - ['b', 'f'], - ['c', 'd'], - ['c', 'd', 'e'], - ['c', 'd', 'e', 'g'], - ['c', 'd', 'g'], - ['c', 'e'], - ['c', 'e', 'g'], - ['c', 'f'], - ['c', 'f', 'g'], - ['c', 'g'], - ['d', 'e'], - ['d', 'e', 'g'], - ['d', 'g'], - ['e', 'g'], - ['f', 'g'], - ['a', 'b', 'c'], - ['a', 'b', 'c', 'd'], - ['a', 'b', 'c', 'd', 'e'], - ['a', 'b', 'c', 'e']] - - assert (sorted(map(sorted, cliques)) == - sorted(map(sorted, expected_cliques))) + expected_cliques = [ + ["a"], + ["b"], + ["c"], + ["d"], + ["e"], + ["f"], + ["g"], + ["a", "b"], + ["a", "b", "d"], + ["a", "b", "d", "e"], + ["a", "b", "e"], + ["a", "c"], + ["a", "c", "d"], + ["a", "c", "d", "e"], + ["a", "c", "e"], + ["a", "d"], + ["a", "d", "e"], + ["a", "e"], + ["b", "c"], + ["b", "c", "d"], + ["b", "c", "d", "e"], + ["b", "c", "e"], + ["b", "c", "f"], + ["b", "d"], + ["b", "d", "e"], + ["b", "e"], + ["b", "f"], + ["c", "d"], + ["c", "d", "e"], + ["c", "d", "e", "g"], + ["c", "d", "g"], + ["c", "e"], + ["c", "e", "g"], + ["c", "f"], + ["c", "f", "g"], + ["c", "g"], + ["d", "e"], + ["d", "e", "g"], + ["d", "g"], + ["e", "g"], + ["f", "g"], + ["a", "b", "c"], + ["a", "b", "c", "d"], + ["a", "b", "c", "d", "e"], + ["a", "b", "c", "e"], + ] + + assert sorted(map(sorted, cliques)) == sorted(map(sorted, expected_cliques)) diff --git a/networkx/algorithms/tests/test_cluster.py b/networkx/algorithms/tests/test_cluster.py index dc1933b0..c3e9aac6 100644 --- a/networkx/algorithms/tests/test_cluster.py +++ b/networkx/algorithms/tests/test_cluster.py @@ -2,23 +2,29 @@ import networkx as nx class TestTriangles: - def test_empty(self): G = nx.Graph() assert list(nx.triangles(G).values()) == [] def test_path(self): G = nx.path_graph(10) - assert (list(nx.triangles(G).values()) == - [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) - assert (nx.triangles(G) == - {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, - 5: 0, 6: 0, 7: 0, 8: 0, 9: 0}) + assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + assert nx.triangles(G) == { + 0: 0, + 1: 0, + 2: 0, + 3: 0, + 4: 0, + 5: 0, + 6: 0, + 7: 0, + 8: 0, + 9: 0, + } def test_cubical(self): G = nx.cubical_graph() - assert (list(nx.triangles(G).values()) == - [0, 0, 0, 0, 0, 0, 0, 0]) + assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0] assert nx.triangles(G, 1) == 0 assert list(nx.triangles(G, [1, 2]).values()) == [0, 0] assert nx.triangles(G, 1) == 0 @@ -35,7 +41,6 @@ class TestTriangles: class TestDirectedClustering: - def test_clustering(self): G = nx.DiGraph() assert list(nx.clustering(G).values()) == [] @@ -43,23 +48,52 @@ class TestDirectedClustering: def test_path(self): G = nx.path_graph(10, create_using=nx.DiGraph()) - assert (list(nx.clustering(G).values()) == - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - assert (nx.clustering(G) == - {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) + assert list(nx.clustering(G).values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.clustering(G) == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } def test_k5(self): G = nx.complete_graph(5, create_using=nx.DiGraph()) assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1] assert nx.average_clustering(G) == 1 G.remove_edge(1, 2) - assert (list(nx.clustering(G).values()) == - [11. / 12., 1.0, 1.0, 11. / 12., 11. / 12.]) - assert nx.clustering(G, [1, 4]) == {1: 1.0, 4: 11. / 12.} + assert list(nx.clustering(G).values()) == [ + 11.0 / 12.0, + 1.0, + 1.0, + 11.0 / 12.0, + 11.0 / 12.0, + ] + assert nx.clustering(G, [1, 4]) == {1: 1.0, 4: 11.0 / 12.0} G.remove_edge(2, 1) - assert (list(nx.clustering(G).values()) == - [5. / 6., 1.0, 1.0, 5. / 6., 5. / 6.]) + assert list(nx.clustering(G).values()) == [ + 5.0 / 6.0, + 1.0, + 1.0, + 5.0 / 6.0, + 5.0 / 6.0, + ] assert nx.clustering(G, [1, 4]) == {1: 1.0, 4: 0.83333333333333337} def test_triangle_and_edge(self): @@ -69,82 +103,146 @@ class TestDirectedClustering: class TestDirectedWeightedClustering: - def test_clustering(self): G = nx.DiGraph() - assert list(nx.clustering(G, weight='weight').values()) == [] + assert list(nx.clustering(G, weight="weight").values()) == [] assert nx.clustering(G) == {} def test_path(self): G = nx.path_graph(10, create_using=nx.DiGraph()) - assert (list(nx.clustering(G, weight='weight').values()) == - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - assert (nx.clustering(G, weight='weight') == - {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) + assert list(nx.clustering(G, weight="weight").values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.clustering(G, weight="weight") == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } def test_k5(self): G = nx.complete_graph(5, create_using=nx.DiGraph()) - assert list(nx.clustering(G, weight='weight').values()) == [1, 1, 1, 1, 1] - assert nx.average_clustering(G, weight='weight') == 1 + assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G, weight="weight") == 1 G.remove_edge(1, 2) - assert (list(nx.clustering(G, weight='weight').values()) == - [11. / 12., 1.0, 1.0, 11. / 12., 11. / 12.]) - assert nx.clustering(G, [1, 4], weight='weight') == {1: 1.0, 4: 11. / 12.} + assert list(nx.clustering(G, weight="weight").values()) == [ + 11.0 / 12.0, + 1.0, + 1.0, + 11.0 / 12.0, + 11.0 / 12.0, + ] + assert nx.clustering(G, [1, 4], weight="weight") == {1: 1.0, 4: 11.0 / 12.0} G.remove_edge(2, 1) - assert (list(nx.clustering(G, weight='weight').values()) == - [5. / 6., 1.0, 1.0, 5. / 6., 5. / 6.]) - assert nx.clustering(G, [1, 4], weight='weight') == {1: 1.0, 4: 0.83333333333333337} + assert list(nx.clustering(G, weight="weight").values()) == [ + 5.0 / 6.0, + 1.0, + 1.0, + 5.0 / 6.0, + 5.0 / 6.0, + ] + assert nx.clustering(G, [1, 4], weight="weight") == { + 1: 1.0, + 4: 0.83333333333333337, + } def test_triangle_and_edge(self): G = nx.cycle_graph(3, create_using=nx.DiGraph()) G.add_edge(0, 4, weight=2) assert nx.clustering(G)[0] == 1.0 / 6.0 - assert nx.clustering(G, weight='weight')[0] == 1.0 / 12.0 + assert nx.clustering(G, weight="weight")[0] == 1.0 / 12.0 class TestWeightedClustering: - def test_clustering(self): G = nx.Graph() - assert list(nx.clustering(G, weight='weight').values()) == [] + assert list(nx.clustering(G, weight="weight").values()) == [] assert nx.clustering(G) == {} def test_path(self): G = nx.path_graph(10) - assert (list(nx.clustering(G, weight='weight').values()) == - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - assert (nx.clustering(G, weight='weight') == - {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) + assert list(nx.clustering(G, weight="weight").values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.clustering(G, weight="weight") == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } def test_cubical(self): G = nx.cubical_graph() - assert (list(nx.clustering(G, weight='weight').values()) == - [0, 0, 0, 0, 0, 0, 0, 0]) + assert list(nx.clustering(G, weight="weight").values()) == [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ] assert nx.clustering(G, 1) == 0 - assert list(nx.clustering(G, [1, 2], weight='weight').values()) == [0, 0] - assert nx.clustering(G, 1, weight='weight') == 0 - assert nx.clustering(G, [1, 2], weight='weight') == {1: 0, 2: 0} + assert list(nx.clustering(G, [1, 2], weight="weight").values()) == [0, 0] + assert nx.clustering(G, 1, weight="weight") == 0 + assert nx.clustering(G, [1, 2], weight="weight") == {1: 0, 2: 0} def test_k5(self): G = nx.complete_graph(5) - assert list(nx.clustering(G, weight='weight').values()) == [1, 1, 1, 1, 1] - assert nx.average_clustering(G, weight='weight') == 1 + assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G, weight="weight") == 1 G.remove_edge(1, 2) - assert (list(nx.clustering(G, weight='weight').values()) == - [5. / 6., 1.0, 1.0, 5. / 6., 5. / 6.]) - assert nx.clustering(G, [1, 4], weight='weight') == {1: 1.0, 4: 0.83333333333333337} + assert list(nx.clustering(G, weight="weight").values()) == [ + 5.0 / 6.0, + 1.0, + 1.0, + 5.0 / 6.0, + 5.0 / 6.0, + ] + assert nx.clustering(G, [1, 4], weight="weight") == { + 1: 1.0, + 4: 0.83333333333333337, + } def test_triangle_and_edge(self): G = nx.cycle_graph(3) G.add_edge(0, 4, weight=2) assert nx.clustering(G)[0] == 1.0 / 3.0 - assert nx.clustering(G, weight='weight')[0] == 1.0 / 6.0 + assert nx.clustering(G, weight="weight")[0] == 1.0 / 6.0 class TestClustering: - def test_clustering(self): G = nx.Graph() assert list(nx.clustering(G).values()) == [] @@ -152,16 +250,34 @@ class TestClustering: def test_path(self): G = nx.path_graph(10) - assert (list(nx.clustering(G).values()) == - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - assert (nx.clustering(G) == - {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) + assert list(nx.clustering(G).values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.clustering(G) == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } def test_cubical(self): G = nx.cubical_graph() - assert (list(nx.clustering(G).values()) == - [0, 0, 0, 0, 0, 0, 0, 0]) + assert list(nx.clustering(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0] assert nx.clustering(G, 1) == 0 assert list(nx.clustering(G, [1, 2]).values()) == [0, 0] assert nx.clustering(G, 1) == 0 @@ -172,13 +288,17 @@ class TestClustering: assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1] assert nx.average_clustering(G) == 1 G.remove_edge(1, 2) - assert (list(nx.clustering(G).values()) == - [5. / 6., 1.0, 1.0, 5. / 6., 5. / 6.]) + assert list(nx.clustering(G).values()) == [ + 5.0 / 6.0, + 1.0, + 1.0, + 5.0 / 6.0, + 5.0 / 6.0, + ] assert nx.clustering(G, [1, 4]) == {1: 1.0, 4: 0.83333333333333337} class TestTransitivity: - def test_transitivity(self): G = nx.Graph() assert nx.transitivity(G) == 0.0 @@ -199,7 +319,6 @@ class TestTransitivity: class TestSquareClustering: - def test_clustering(self): G = nx.Graph() assert list(nx.square_clustering(G).values()) == [] @@ -207,16 +326,43 @@ class TestSquareClustering: def test_path(self): G = nx.path_graph(10) - assert (list(nx.square_clustering(G).values()) == - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) - assert (nx.square_clustering(G) == - {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, - 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) + assert list(nx.square_clustering(G).values()) == [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ] + assert nx.square_clustering(G) == { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0, + 6: 0.0, + 7: 0.0, + 8: 0.0, + 9: 0.0, + } def test_cubical(self): G = nx.cubical_graph() - assert (list(nx.square_clustering(G).values()) == - [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5]) + assert list(nx.square_clustering(G).values()) == [ + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + ] assert list(nx.square_clustering(G, [1, 2]).values()) == [0.5, 0.5] assert nx.square_clustering(G, [1])[1] == 0.5 assert nx.square_clustering(G, [1, 2]) == {1: 0.5, 2: 0.5} @@ -227,14 +373,33 @@ class TestSquareClustering: def test_bipartite_k5(self): G = nx.complete_bipartite_graph(5, 5) - assert (list(nx.square_clustering(G).values()) == - [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]) + assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] def test_lind_square_clustering(self): """Test C4 for figure 1 Lind et al (2005)""" - G = nx.Graph([(1, 2), (1, 3), (1, 6), (1, 7), (2, 4), (2, 5), - (3, 4), (3, 5), (6, 7), (7, 8), (6, 8), (7, 9), - (7, 10), (6, 11), (6, 12), (2, 13), (2, 14), (3, 15), (3, 16)]) + G = nx.Graph( + [ + (1, 2), + (1, 3), + (1, 6), + (1, 7), + (2, 4), + (2, 5), + (3, 4), + (3, 5), + (6, 7), + (7, 8), + (6, 8), + (7, 9), + (7, 10), + (6, 11), + (6, 12), + (2, 13), + (2, 14), + (3, 15), + (3, 16), + ] + ) G1 = G.subgraph([1, 2, 3, 4, 5, 13, 14, 15, 16]) G2 = G.subgraph([1, 6, 7, 8, 9, 10, 11, 12]) assert nx.square_clustering(G, [1])[1] == 3 / 75.0 @@ -251,7 +416,6 @@ def test_average_clustering(): class TestGeneralizedDegree: - def test_generalized_degree(self): G = nx.Graph() assert nx.generalized_degree(G) == {} diff --git a/networkx/algorithms/tests/test_communicability.py b/networkx/algorithms/tests/test_communicability.py index 30b8f7d9..c35cd33d 100644 --- a/networkx/algorithms/tests/test_communicability.py +++ b/networkx/algorithms/tests/test_communicability.py @@ -1,31 +1,25 @@ from collections import defaultdict import pytest -numpy = pytest.importorskip('numpy') -scipy = pytest.importorskip('scipy') + +numpy = pytest.importorskip("numpy") +scipy = pytest.importorskip("scipy") import networkx as nx from networkx.testing import almost_equal -from networkx.algorithms.communicability_alg import ( - communicability, - communicability_exp -) +from networkx.algorithms.communicability_alg import communicability, communicability_exp class TestCommunicability: - def test_communicability(self): - answer = {0: {0: 1.5430806348152435, - 1: 1.1752011936438012 - }, - 1: {0: 1.1752011936438012, - 1: 1.5430806348152435 - } - } -# answer={(0, 0): 1.5430806348152435, -# (0, 1): 1.1752011936438012, -# (1, 0): 1.1752011936438012, -# (1, 1): 1.5430806348152435} + answer = { + 0: {0: 1.5430806348152435, 1: 1.1752011936438012}, + 1: {0: 1.1752011936438012, 1: 1.5430806348152435}, + } + # answer={(0, 0): 1.5430806348152435, + # (0, 1): 1.1752011936438012, + # (1, 0): 1.1752011936438012, + # (1, 1): 1.5430806348152435} result = communicability(nx.path_graph(2)) for k1, val in result.items(): @@ -34,38 +28,48 @@ class TestCommunicability: def test_communicability2(self): - answer_orig = {('1', '1'): 1.6445956054135658, - ('1', 'Albert'): 0.7430186221096251, - ('1', 'Aric'): 0.7430186221096251, - ('1', 'Dan'): 1.6208126320442937, - ('1', 'Franck'): 0.42639707170035257, - ('Albert', '1'): 0.7430186221096251, - ('Albert', 'Albert'): 2.4368257358712189, - ('Albert', 'Aric'): 1.4368257358712191, - ('Albert', 'Dan'): 2.0472097037446453, - ('Albert', 'Franck'): 1.8340111678944691, - ('Aric', '1'): 0.7430186221096251, - ('Aric', 'Albert'): 1.4368257358712191, - ('Aric', 'Aric'): 2.4368257358712193, - ('Aric', 'Dan'): 2.0472097037446457, - ('Aric', 'Franck'): 1.8340111678944691, - ('Dan', '1'): 1.6208126320442937, - ('Dan', 'Albert'): 2.0472097037446453, - ('Dan', 'Aric'): 2.0472097037446457, - ('Dan', 'Dan'): 3.1306328496328168, - ('Dan', 'Franck'): 1.4860372442192515, - ('Franck', '1'): 0.42639707170035257, - ('Franck', 'Albert'): 1.8340111678944691, - ('Franck', 'Aric'): 1.8340111678944691, - ('Franck', 'Dan'): 1.4860372442192515, - ('Franck', 'Franck'): 2.3876142275231915} + answer_orig = { + ("1", "1"): 1.6445956054135658, + ("1", "Albert"): 0.7430186221096251, + ("1", "Aric"): 0.7430186221096251, + ("1", "Dan"): 1.6208126320442937, + ("1", "Franck"): 0.42639707170035257, + ("Albert", "1"): 0.7430186221096251, + ("Albert", "Albert"): 2.4368257358712189, + ("Albert", "Aric"): 1.4368257358712191, + ("Albert", "Dan"): 2.0472097037446453, + ("Albert", "Franck"): 1.8340111678944691, + ("Aric", "1"): 0.7430186221096251, + ("Aric", "Albert"): 1.4368257358712191, + ("Aric", "Aric"): 2.4368257358712193, + ("Aric", "Dan"): 2.0472097037446457, + ("Aric", "Franck"): 1.8340111678944691, + ("Dan", "1"): 1.6208126320442937, + ("Dan", "Albert"): 2.0472097037446453, + ("Dan", "Aric"): 2.0472097037446457, + ("Dan", "Dan"): 3.1306328496328168, + ("Dan", "Franck"): 1.4860372442192515, + ("Franck", "1"): 0.42639707170035257, + ("Franck", "Albert"): 1.8340111678944691, + ("Franck", "Aric"): 1.8340111678944691, + ("Franck", "Dan"): 1.4860372442192515, + ("Franck", "Franck"): 2.3876142275231915, + } answer = defaultdict(dict) for (k1, k2), v in answer_orig.items(): answer[k1][k2] = v - G1 = nx.Graph([('Franck', 'Aric'), ('Aric', 'Dan'), ('Dan', 'Albert'), - ('Albert', 'Franck'), ('Dan', '1'), ('Franck', 'Albert')]) + G1 = nx.Graph( + [ + ("Franck", "Aric"), + ("Aric", "Dan"), + ("Dan", "Albert"), + ("Albert", "Franck"), + ("Dan", "1"), + ("Franck", "Albert"), + ] + ) result = communicability(G1) for k1, val in result.items(): diff --git a/networkx/algorithms/tests/test_core.py b/networkx/algorithms/tests/test_core.py index 1df4e928..a5392774 100644 --- a/networkx/algorithms/tests/test_core.py +++ b/networkx/algorithms/tests/test_core.py @@ -15,10 +15,27 @@ class TestCore: t1 = nx.convert_node_labels_to_integers(nx.tetrahedral_graph(), 1) t2 = nx.convert_node_labels_to_integers(t1, 5) G = nx.union(t1, t2) - G.add_edges_from([(3, 7), (2, 11), (11, 5), (11, 12), (5, 12), - (12, 19), (12, 18), (3, 9), (7, 9), (7, 10), - (9, 10), (9, 20), (17, 13), (13, 14), (14, 15), - (15, 16), (16, 13)]) + G.add_edges_from( + [ + (3, 7), + (2, 11), + (11, 5), + (11, 12), + (5, 12), + (12, 19), + (12, 18), + (3, 9), + (7, 9), + (7, 10), + (9, 10), + (9, 20), + (17, 13), + (13, 14), + (14, 15), + (15, 16), + (16, 13), + ] + ) G.add_node(21) cls.G = G @@ -37,8 +54,9 @@ class TestCore: def test_find_cores(self): core = nx.find_cores(self.G) - nodes_by_core = [sorted([n for n in core if core[n] == val]) - for val in range(4)] + nodes_by_core = [ + sorted([n for n in core if core[n] == val]) for val in range(4) + ] assert_nodes_equal(nodes_by_core[0], [21]) assert_nodes_equal(nodes_by_core[1], [17, 18, 19, 20]) assert_nodes_equal(nodes_by_core[2], [9, 10, 11, 12, 13, 14, 15, 16]) @@ -50,14 +68,15 @@ class TestCore: def test_find_cores2(self): core = nx.find_cores(self.H) - nodes_by_core = [sorted([n for n in core if core[n] == val]) - for val in range(3)] + nodes_by_core = [ + sorted([n for n in core if core[n] == val]) for val in range(3) + ] assert_nodes_equal(nodes_by_core[0], [0]) assert_nodes_equal(nodes_by_core[1], [1, 3]) assert_nodes_equal(nodes_by_core[2], [2, 4, 5, 6]) def test_directed_find_cores(self): - '''core number had a bug for directed graphs found in issue #1959''' + """core number had a bug for directed graphs found in issue #1959""" # small example where too timid edge removal can make cn[2] = 3 G = nx.DiGraph() edges = [(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)] @@ -149,8 +168,9 @@ class TestCore: def test_onion_layers(self): layers = nx.onion_layers(self.G) - nodes_by_layer = [sorted([n for n in layers if layers[n] == val]) - for val in range(1, 7)] + nodes_by_layer = [ + sorted([n for n in layers if layers[n] == val]) for val in range(1, 7) + ] assert_nodes_equal(nodes_by_layer[0], [21]) assert_nodes_equal(nodes_by_layer[1], [17, 18, 19, 20]) assert_nodes_equal(nodes_by_layer[2], [10, 12, 13, 14, 15, 16]) diff --git a/networkx/algorithms/tests/test_covering.py b/networkx/algorithms/tests/test_covering.py index 459266a6..78487b73 100644 --- a/networkx/algorithms/tests/test_covering.py +++ b/networkx/algorithms/tests/test_covering.py @@ -21,11 +21,11 @@ class TestMinEdgeCover: def test_bipartite_explicit(self): G = nx.Graph() G.add_nodes_from([1, 2, 3, 4], bipartite=0) - G.add_nodes_from(['a', 'b', 'c'], bipartite=1) - G.add_edges_from([(1, 'a'), (1, 'b'), (2, 'b'), - (2, 'c'), (3, 'c'), (4, 'a')]) - min_cover = nx.min_edge_cover(G, nx.algorithms.bipartite.matching. - eppstein_matching) + G.add_nodes_from(["a", "b", "c"], bipartite=1) + G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) + min_cover = nx.min_edge_cover( + G, nx.algorithms.bipartite.matching.eppstein_matching + ) min_cover2 = nx.min_edge_cover(G) assert nx.is_edge_cover(G, min_cover) assert len(min_cover) == 8 diff --git a/networkx/algorithms/tests/test_cuts.py b/networkx/algorithms/tests/test_cuts.py index b5d7a5a8..8eea2939 100644 --- a/networkx/algorithms/tests/test_cuts.py +++ b/networkx/algorithms/tests/test_cuts.py @@ -41,8 +41,8 @@ class TestCutSize: def test_multigraph(self): """Tests that parallel edges are each counted for a cut.""" - G = nx.MultiGraph(['ab', 'ab']) - assert nx.cut_size(G, {'a'}, {'b'}) == 2 + G = nx.MultiGraph(["ab", "ab"]) + assert nx.cut_size(G, {"a"}, {"b"}) == 2 class TestVolume: diff --git a/networkx/algorithms/tests/test_cycles.py b/networkx/algorithms/tests/test_cycles.py index be8d392f..d509e41c 100644 --- a/networkx/algorithms/tests/test_cycles.py +++ b/networkx/algorithms/tests/test_cycles.py @@ -24,7 +24,7 @@ class TestCycles: if len(b) != n: return False l = a + a - return any(l[i:i + n] == b for i in range(n)) + return any(l[i : i + n] == b for i in range(n)) def test_cycle_basis(self): G = self.G @@ -41,8 +41,7 @@ class TestCycles: nx.add_cycle(G, "ABC") cy = networkx.cycle_basis(G, 9) sort_cy = sorted(sorted(c) for c in cy[:-1]) + [sorted(cy[-1])] - assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5], - ['A', 'B', 'C']] + assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5], ["A", "B", "C"]] def test_cycle_basis2(self): with pytest.raises(nx.NetworkXNotImplemented): @@ -71,7 +70,7 @@ class TestCycles: def test_unsortable(self): # TODO What does this test do? das 6/2013 G = nx.DiGraph() - nx.add_cycle(G, ['a', 1]) + nx.add_cycle(G, ["a", 1]) c = list(nx.simple_cycles(G)) def test_simple_cycles_small(self): @@ -136,9 +135,23 @@ class TestCycles: def test_simple_graph_with_reported_bug(self): G = nx.DiGraph() - edges = [(0, 2), (0, 3), (1, 0), (1, 3), (2, 1), (2, 4), - (3, 2), (3, 4), (4, 0), (4, 1), (4, 5), (5, 0), - (5, 1), (5, 2), (5, 3)] + edges = [ + (0, 2), + (0, 3), + (1, 0), + (1, 3), + (2, 1), + (2, 4), + (3, 2), + (3, 4), + (4, 0), + (4, 1), + (4, 5), + (5, 0), + (5, 1), + (5, 2), + (5, 3), + ] G.add_edges_from(edges) cc = sorted(nx.simple_cycles(G)) assert len(cc) == 26 @@ -149,6 +162,7 @@ class TestCycles: for rc in rcc: assert any(self.is_cyclic_permutation(rc, c) for c in cc) + # These tests might fail with hash randomization since they depend on # edge_dfs. For more information, see the comments in: # networkx/algorithms/traversal/tests/test_edgedfs.py @@ -181,7 +195,7 @@ class TestFindCycle: def test_graph_orientation_original(self): G = nx.Graph(self.edges) G.add_edge(2, 0) - x = list(find_cycle(G, self.nodes, orientation='original')) + x = list(find_cycle(G, self.nodes, orientation="original")) x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 0, FORWARD)] assert x == x_ @@ -199,7 +213,7 @@ class TestFindCycle: def test_digraph_orientation_original(self): G = nx.DiGraph(self.edges) - x = list(find_cycle(G, self.nodes, orientation='original')) + x = list(find_cycle(G, self.nodes, orientation="original")) x_ = [(0, 1, FORWARD), (1, 0, FORWARD)] assert x == x_ @@ -220,19 +234,19 @@ class TestFindCycle: def test_digraph_ignore(self): G = nx.DiGraph(self.edges) - x = list(find_cycle(G, self.nodes, orientation='ignore')) + x = list(find_cycle(G, self.nodes, orientation="ignore")) x_ = [(0, 1, FORWARD), (1, 0, FORWARD)] assert x == x_ def test_digraph_reverse(self): G = nx.DiGraph(self.edges) - x = list(find_cycle(G, self.nodes, orientation='reverse')) + x = list(find_cycle(G, self.nodes, orientation="reverse")) x_ = [(1, 0, REVERSE), (0, 1, REVERSE)] assert x == x_ def test_multidigraph_ignore(self): G = nx.MultiDiGraph(self.edges) - x = list(find_cycle(G, self.nodes, orientation='ignore')) + x = list(find_cycle(G, self.nodes, orientation="ignore")) x_ = [(0, 1, 0, FORWARD), (1, 0, 0, FORWARD)] # or (1, 0, 1, 1) assert x[0] == x_[0] assert x[1][:2] == x_[1][:2] @@ -241,7 +255,7 @@ class TestFindCycle: def test_multidigraph_ignore2(self): # Loop traversed an edge while ignoring its orientation. G = nx.MultiDiGraph([(0, 1), (1, 2), (1, 2)]) - x = list(find_cycle(G, [0, 1, 2], orientation='ignore')) + x = list(find_cycle(G, [0, 1, 2], orientation="ignore")) x_ = [(1, 2, 0, FORWARD), (1, 2, 1, REVERSE)] assert x == x_ @@ -251,14 +265,20 @@ class TestFindCycle: # when 4 is visited from the first time (so we must make sure that 4 # is not visited from 2, and hence, we respect the edge orientation). G = nx.MultiDiGraph([(0, 1), (1, 2), (2, 3), (4, 2)]) - pytest.raises(nx.exception.NetworkXNoCycle, - find_cycle, G, [0, 1, 2, 3, 4], orientation='original') + pytest.raises( + nx.exception.NetworkXNoCycle, + find_cycle, + G, + [0, 1, 2, 3, 4], + orientation="original", + ) def test_dag(self): G = nx.DiGraph([(0, 1), (0, 2), (1, 2)]) - pytest.raises(nx.exception.NetworkXNoCycle, - find_cycle, G, orientation='original') - x = list(find_cycle(G, orientation='ignore')) + pytest.raises( + nx.exception.NetworkXNoCycle, find_cycle, G, orientation="original" + ) + x = list(find_cycle(G, orientation="ignore")) assert x == [(0, 1, FORWARD), (1, 2, FORWARD), (0, 2, REVERSE)] def test_prev_explored(self): @@ -305,7 +325,7 @@ class TestMinimumCycles: assert_basis_equal([sorted(c) for c in mcb], [[1, 2, 4], [2, 3, 4]]) def test_weighted_diamond(self): - mcb = minimum_cycle_basis(self.diamond_graph, weight='weight') + mcb = minimum_cycle_basis(self.diamond_graph, weight="weight") assert_basis_equal([sorted(c) for c in mcb], [[1, 2, 4], [1, 2, 3, 4]]) def test_dimensionality(self): diff --git a/networkx/algorithms/tests/test_d_separation.py b/networkx/algorithms/tests/test_d_separation.py index 5204101a..6314f11f 100644 --- a/networkx/algorithms/tests/test_d_separation.py +++ b/networkx/algorithms/tests/test_d_separation.py @@ -38,52 +38,56 @@ def naive_bayes_graph(): def asia_graph(): """Return the 'Asia' PGM graph.""" G = nx.DiGraph(name="asia") - G.add_edges_from([('asia', 'tuberculosis'), ('smoking', 'cancer'), - ('smoking', 'bronchitis'), ('tuberculosis', 'either'), - ('cancer', 'either'), ('either', 'xray'), - ('either', 'dyspnea'), ('bronchitis', 'dyspnea')]) + G.add_edges_from( + [ + ("asia", "tuberculosis"), + ("smoking", "cancer"), + ("smoking", "bronchitis"), + ("tuberculosis", "either"), + ("cancer", "either"), + ("either", "xray"), + ("either", "dyspnea"), + ("bronchitis", "dyspnea"), + ] + ) nx.freeze(G) return G -@pytest.fixture(name='path_graph') +@pytest.fixture(name="path_graph") def path_graph_fixture(): return path_graph() -@pytest.fixture(name='fork_graph') +@pytest.fixture(name="fork_graph") def fork_graph_fixture(): return fork_graph() -@pytest.fixture(name='collider_graph') +@pytest.fixture(name="collider_graph") def collider_graph_fixture(): return collider_graph() -@pytest.fixture(name='naive_bayes_graph') +@pytest.fixture(name="naive_bayes_graph") def naive_bayes_graph_fixture(): return naive_bayes_graph() -@pytest.fixture(name='asia_graph') +@pytest.fixture(name="asia_graph") def asia_graph_fixture(): return asia_graph() -@pytest.mark.parametrize("graph", [ - path_graph(), - fork_graph(), - collider_graph(), - naive_bayes_graph(), - asia_graph(), -]) +@pytest.mark.parametrize( + "graph", + [path_graph(), fork_graph(), collider_graph(), naive_bayes_graph(), asia_graph(),], +) def test_markov_condition(graph): """Test that the Markov condition holds for each PGM graph.""" for node in graph.nodes: parents = set(graph.predecessors(node)) - non_descendants = graph.nodes - nx.descendants(graph, - node) - {node} - parents + non_descendants = graph.nodes - nx.descendants(graph, node) - {node} - parents assert nx.d_separated(graph, {node}, non_descendants, parents) @@ -114,10 +118,12 @@ def test_naive_bayes_dsep(naive_bayes_graph): def test_asia_graph_dsep(asia_graph): """Example-based test of d-separation for asia_graph.""" - assert nx.d_separated(asia_graph, {'asia', 'smoking'}, {'dyspnea', 'xray'}, - {'bronchitis', 'either'}) - assert nx.d_separated(asia_graph, {'tuberculosis', 'cancer'}, - {'bronchitis'}, {'smoking', 'xray'}) + assert nx.d_separated( + asia_graph, {"asia", "smoking"}, {"dyspnea", "xray"}, {"bronchitis", "either"} + ) + assert nx.d_separated( + asia_graph, {"tuberculosis", "cancer"}, {"bronchitis"}, {"smoking", "xray"} + ) def test_undirected_graphs_are_not_supported(): diff --git a/networkx/algorithms/tests/test_dag.py b/networkx/algorithms/tests/test_dag.py index d28f70d6..ee89f19d 100644 --- a/networkx/algorithms/tests/test_dag.py +++ b/networkx/algorithms/tests/test_dag.py @@ -27,8 +27,7 @@ class TestDagLongestPath: def test_weighted(self): G = nx.DiGraph() - edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), - (1, 6, 2)] + edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), (1, 6, 2)] G.add_weighted_edges_from(edges) assert nx.dag_longest_path(G) == [2, 3, 5] @@ -81,15 +80,13 @@ class TestDagLongestPathLength: pytest.raises(nx.NetworkXNotImplemented, nx.dag_longest_path_length, G) def test_weighted(self): - edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), - (1, 6, 2)] + edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), (1, 6, 2)] G = nx.DiGraph() G.add_weighted_edges_from(edges) assert nx.dag_longest_path_length(G) == 5 class TestDAG: - @classmethod def setup_class(cls): pass @@ -97,20 +94,17 @@ class TestDAG: def test_topological_sort1(self): DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)]) - for algorithm in [nx.topological_sort, - nx.lexicographical_topological_sort]: + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: assert tuple(algorithm(DG)) == (1, 2, 3) DG.add_edge(3, 2) - for algorithm in [nx.topological_sort, - nx.lexicographical_topological_sort]: + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: pytest.raises(nx.NetworkXUnfeasible, consume, algorithm(DG)) DG.remove_edge(2, 3) - for algorithm in [nx.topological_sort, - nx.lexicographical_topological_sort]: + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: assert tuple(algorithm(DG)) == (1, 3, 2) DG.remove_edge(3, 2) @@ -126,9 +120,19 @@ class TestDAG: assert nx.is_directed_acyclic_graph(nx.DiGraph([(3, 4), (4, 5)])) def test_topological_sort2(self): - DG = nx.DiGraph({1: [2], 2: [3], 3: [4], - 4: [5], 5: [1], 11: [12], - 12: [13], 13: [14], 14: [15]}) + DG = nx.DiGraph( + { + 1: [2], + 2: [3], + 3: [4], + 4: [5], + 5: [1], + 11: [12], + 12: [13], + 13: [14], + 14: [15], + } + ) pytest.raises(nx.NetworkXUnfeasible, consume, nx.topological_sort(DG)) assert not nx.is_directed_acyclic_graph(DG) @@ -149,6 +153,7 @@ class TestDAG: assert set(order) == set(DG) for u, v in combinations(order, 2): assert not nx.has_path(DG, v, u) + validate(list(nx.topological_sort(DG))) DG.add_edge(14, 1) @@ -166,8 +171,8 @@ class TestDAG: assert list(nx.topological_sort(G)) == [0, 1] def test_topological_sort6(self): - for algorithm in [nx.topological_sort, - nx.lexicographical_topological_sort]: + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: + def runtime_error(): DG = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) first = True @@ -202,12 +207,13 @@ class TestDAG: def test_all_topological_sorts_2(self): DG = nx.DiGraph([(1, 3), (2, 1), (2, 4), (4, 3), (4, 5)]) - assert (sorted(nx.all_topological_sorts(DG)) == - [[2, 1, 4, 3, 5], - [2, 1, 4, 5, 3], - [2, 4, 1, 3, 5], - [2, 4, 1, 5, 3], - [2, 4, 5, 1, 3]]) + assert sorted(nx.all_topological_sorts(DG)) == [ + [2, 1, 4, 3, 5], + [2, 1, 4, 5, 3], + [2, 4, 1, 3, 5], + [2, 4, 1, 5, 3], + [2, 4, 5, 1, 3], + ] def test_all_topological_sorts_3(self): def unfeasible(): @@ -223,6 +229,7 @@ class TestDAG: def not_implemted_2(): G = nx.MultiGraph([(1, 2), (1, 2), (2, 3)]) list(nx.all_topological_sorts(G)) + pytest.raises(nx.NetworkXUnfeasible, unfeasible) pytest.raises(nx.NetworkXNotImplemented, not_implemented) pytest.raises(nx.NetworkXNotImplemented, not_implemted_2) @@ -231,30 +238,28 @@ class TestDAG: DG = nx.DiGraph() for i in range(7): DG.add_node(i) - assert (sorted(map(list, permutations(DG.nodes))) == - sorted(nx.all_topological_sorts(DG))) + assert sorted(map(list, permutations(DG.nodes))) == sorted( + nx.all_topological_sorts(DG) + ) def test_all_topological_sorts_multigraph_1(self): - DG = nx.MultiDiGraph([(1, 2), (1, 2), (2, 3), - (3, 4), (3, 5), (3, 5), (3, 5)]) - assert (sorted(nx.all_topological_sorts(DG)) == - sorted([[1, 2, 3, 4, 5], - [1, 2, 3, 5, 4]])) + DG = nx.MultiDiGraph([(1, 2), (1, 2), (2, 3), (3, 4), (3, 5), (3, 5), (3, 5)]) + assert sorted(nx.all_topological_sorts(DG)) == sorted( + [[1, 2, 3, 4, 5], [1, 2, 3, 5, 4]] + ) def test_all_topological_sorts_multigraph_2(self): N = 9 edges = [] for i in range(1, N): - edges.extend([(i, i+1)] * i) + edges.extend([(i, i + 1)] * i) DG = nx.MultiDiGraph(edges) - assert (list(nx.all_topological_sorts(DG)) == - [list(range(1, N+1))]) + assert list(nx.all_topological_sorts(DG)) == [list(range(1, N + 1))] def test_ancestors(self): G = nx.DiGraph() ancestors = nx.algorithms.dag.ancestors - G.add_edges_from([ - (1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) + G.add_edges_from([(1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) assert ancestors(G, 6) == {1, 2, 4, 5} assert ancestors(G, 3) == {1, 4} assert ancestors(G, 1) == set() @@ -263,8 +268,7 @@ class TestDAG: def test_descendants(self): G = nx.DiGraph() descendants = nx.algorithms.dag.descendants - G.add_edges_from([ - (1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) + G.add_edges_from([(1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) assert descendants(G, 1) == {2, 3, 6} assert descendants(G, 4) == {2, 3, 5, 6} assert descendants(G, 3) == set() @@ -367,12 +371,41 @@ class TestDAG: solution = [[], [4], [3], [2], [1]] self._check_antichains(list(antichains(G)), solution) G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (5, 7)]) - solution = [[], [4], [7], [7, 4], [6], [6, 4], [6, 7], [6, 7, 4], - [5], [5, 4], [3], [3, 4], [2], [1]] + solution = [ + [], + [4], + [7], + [7, 4], + [6], + [6, 4], + [6, 7], + [6, 7, 4], + [5], + [5, 4], + [3], + [3, 4], + [2], + [1], + ] self._check_antichains(list(antichains(G)), solution) G = nx.DiGraph([(1, 2), (1, 3), (3, 4), (3, 5), (5, 6)]) - solution = [[], [6], [5], [4], [4, 6], [4, 5], [3], [2], [2, 6], - [2, 5], [2, 4], [2, 4, 6], [2, 4, 5], [2, 3], [1]] + solution = [ + [], + [6], + [5], + [4], + [4, 6], + [4, 5], + [3], + [2], + [2, 6], + [2, 5], + [2, 4], + [2, 4, 6], + [2, 4, 5], + [2, 3], + [1], + ] self._check_antichains(list(antichains(G)), solution) G = nx.DiGraph({0: [1, 2], 1: [4], 2: [3], 3: [4]}) solution = [[], [4], [3], [2], [1], [1, 3], [1, 2], [0]] @@ -384,7 +417,9 @@ class TestDAG: solution = [[], [0], [1], [1, 0], [2], [2, 0], [2, 1], [2, 1, 0]] self._check_antichains(list(antichains(G)), solution) - def f(x): return list(antichains(x)) + def f(x): + return list(antichains(x)) + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) pytest.raises(nx.NetworkXNotImplemented, f, G) G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) @@ -392,28 +427,38 @@ class TestDAG: def test_lexicographical_topological_sort(self): G = nx.DiGraph([(1, 2), (2, 3), (1, 4), (1, 5), (2, 6)]) - assert (list(nx.lexicographical_topological_sort(G)) == - [1, 2, 3, 4, 5, 6]) - assert (list(nx.lexicographical_topological_sort( - G, key=lambda x: x)) == - [1, 2, 3, 4, 5, 6]) - assert (list(nx.lexicographical_topological_sort( - G, key=lambda x: -x)) == - [1, 5, 4, 2, 6, 3]) + assert list(nx.lexicographical_topological_sort(G)) == [1, 2, 3, 4, 5, 6] + assert list(nx.lexicographical_topological_sort(G, key=lambda x: x)) == [ + 1, + 2, + 3, + 4, + 5, + 6, + ] + assert list(nx.lexicographical_topological_sort(G, key=lambda x: -x)) == [ + 1, + 5, + 4, + 2, + 6, + 3, + ] def test_lexicographical_topological_sort2(self): - ''' + """ Check the case of two or more nodes with same key value. Want to avoid exception raised due to comparing nodes directly. See Issue #3493 - ''' + """ + class Test_Node: def __init__(self, n): self.label = n self.priority = 1 def __repr__(self): - return f'Node({self.label})' + return f"Node({self.label})" def sorting_key(node): return node.priority @@ -463,9 +508,7 @@ def test_is_aperiodic_selfloop(): def test_is_aperiodic_raise(): G = nx.Graph() - pytest.raises(nx.NetworkXError, - nx.is_aperiodic, - G) + pytest.raises(nx.NetworkXError, nx.is_aperiodic, G) def test_is_aperiodic_bipartite(): @@ -574,7 +617,7 @@ class TestDagToBranching: def test_not_acyclic(self): """Tests that a non-acyclic graph causes an exception.""" with pytest.raises(nx.HasACycle): - G = nx.DiGraph(pairwise('abc', cyclic=True)) + G = nx.DiGraph(pairwise("abc", cyclic=True)) nx.dag_to_branching(G) def test_undirected(self): diff --git a/networkx/algorithms/tests/test_distance_measures.py b/networkx/algorithms/tests/test_distance_measures.py index d4db6987..142deaad 100644 --- a/networkx/algorithms/tests/test_distance_measures.py +++ b/networkx/algorithms/tests/test_distance_measures.py @@ -96,9 +96,9 @@ class TestResistanceDistance: def setup_class(cls): global np global sp_sparse - np = pytest.importorskip('numpy') - scipy = pytest.importorskip('scipy') - sp_sparse = pytest.importorskip('scipy.sparse') + np = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") + sp_sparse = pytest.importorskip("scipy.sparse") def setup_method(self): G = nx.Graph() @@ -110,11 +110,9 @@ class TestResistanceDistance: def test_laplacian_submatrix(self): from networkx.algorithms.distance_measures import _laplacian_submatrix - M = sp_sparse.csr_matrix([[1, 2, 3], - [4, 5, 6], - [7, 8, 9]], dtype=np.float32) - N = sp_sparse.csr_matrix([[5, 6], - [8, 9]], dtype=np.float32) + + M = sp_sparse.csr_matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32) + N = sp_sparse.csr_matrix([[5, 6], [8, 9]], dtype=np.float32) Mn, Mn_nodelist = _laplacian_submatrix(1, M, [1, 2, 3]) assert Mn_nodelist == [2, 3] assert np.allclose(Mn.toarray(), N.toarray()) @@ -122,27 +120,27 @@ class TestResistanceDistance: def test_laplacian_submatrix_square(self): with pytest.raises(nx.NetworkXError): from networkx.algorithms.distance_measures import _laplacian_submatrix - M = sp_sparse.csr_matrix([[1, 2], - [4, 5], - [7, 8]], dtype=np.float32) + + M = sp_sparse.csr_matrix([[1, 2], [4, 5], [7, 8]], dtype=np.float32) _laplacian_submatrix(1, M, [1, 2, 3]) def test_laplacian_submatrix_matrix_node_dim(self): with pytest.raises(nx.NetworkXError): from networkx.algorithms.distance_measures import _laplacian_submatrix - M = sp_sparse.csr_matrix([[1, 2, 3], - [4, 5, 6], - [7, 8, 9]], dtype=np.float32) + + M = sp_sparse.csr_matrix( + [[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32 + ) _laplacian_submatrix(1, M, [1, 2, 3, 4]) def test_resistance_distance(self): - rd = nx.resistance_distance(self.G, 1, 3, 'weight', True) - test_data = 1/(1/(2+4) + 1/(1+3)) + rd = nx.resistance_distance(self.G, 1, 3, "weight", True) + test_data = 1 / (1 / (2 + 4) + 1 / (1 + 3)) assert round(rd, 5) == round(test_data, 5) def test_resistance_distance_noinv(self): - rd = nx.resistance_distance(self.G, 1, 3, 'weight', False) - test_data = 1/(1/(1/2+1/4) + 1/(1/1+1/3)) + rd = nx.resistance_distance(self.G, 1, 3, "weight", False) + test_data = 1 / (1 / (1 / 2 + 1 / 4) + 1 / (1 / 1 + 1 / 3)) assert round(rd, 5) == round(test_data, 5) def test_resistance_distance_no_weight(self): @@ -150,9 +148,9 @@ class TestResistanceDistance: assert round(rd, 5) == 1 def test_resistance_distance_neg_weight(self): - self.G[2][3]['weight'] = -4 - rd = nx.resistance_distance(self.G, 1, 3, 'weight', True) - test_data = 1/(1/(2+-4) + 1/(1+3)) + self.G[2][3]["weight"] = -4 + rd = nx.resistance_distance(self.G, 1, 3, "weight", True) + test_data = 1 / (1 / (2 + -4) + 1 / (1 + 3)) assert round(rd, 5) == round(test_data, 5) def test_multigraph(self): @@ -161,13 +159,13 @@ class TestResistanceDistance: G.add_edge(2, 3, weight=4) G.add_edge(3, 4, weight=1) G.add_edge(1, 4, weight=3) - rd = nx.resistance_distance(G, 1, 3, 'weight', True) - assert np.isclose(rd, 1/(1/(2+4) + 1/(1+3))) + rd = nx.resistance_distance(G, 1, 3, "weight", True) + assert np.isclose(rd, 1 / (1 / (2 + 4) + 1 / (1 + 3))) def test_resistance_distance_div0(self): with pytest.raises(ZeroDivisionError): - self.G[1][2]['weight'] = 0 - nx.resistance_distance(self.G, 1, 3, 'weight') + self.G[1][2]["weight"] = 0 + nx.resistance_distance(self.G, 1, 3, "weight") def test_resistance_distance_not_connected(self): with pytest.raises(nx.NetworkXError): @@ -189,6 +187,7 @@ class TestResistanceDistance: class TestBarycenter: """Test :func:`networkx.algorithms.distance_measures.barycenter`.""" + def barycenter_as_subgraph(self, g, **kwargs): """Return the subgraph induced on the barycenter of g""" b = nx.barycenter(g, **kwargs) @@ -207,8 +206,8 @@ class TestBarycenter: # ...but not with the weight argument for u, v, data in K_5.edges.data(): - data['weight'] = 1 - pytest.raises(ValueError, nx.barycenter, K_5, sp=sp, weight='weight') + data["weight"] = 1 + pytest.raises(ValueError, nx.barycenter, K_5, sp=sp, weight="weight") # ...and a corrupted sp can make it seem like K_5 is disconnected del sp[0][1] @@ -219,7 +218,7 @@ class TestBarycenter: See [West01]_, p. 78. """ - prng = Random(0xdeadbeef) + prng = Random(0xDEADBEEF) for i in range(50): RT = nx.random_tree(prng.randint(1, 75), prng) b = self.barycenter_as_subgraph(RT) @@ -231,28 +230,43 @@ class TestBarycenter: def test_this_one_specific_tree(self): """Test the tree pictured at the bottom of [West01]_, p. 78.""" - g = nx.Graph({ - 'a': ['b'], - 'b': ['a', 'x'], - 'x': ['b', 'y'], - 'y': ['x', 'z'], - 'z': ['y', 0, 1, 2, 3, 4], - 0: ['z'], 1: ['z'], 2: ['z'], 3: ['z'], 4: ['z']}) - b = self.barycenter_as_subgraph(g, attr='barycentricity') - assert list(b) == ['z'] + g = nx.Graph( + { + "a": ["b"], + "b": ["a", "x"], + "x": ["b", "y"], + "y": ["x", "z"], + "z": ["y", 0, 1, 2, 3, 4], + 0: ["z"], + 1: ["z"], + 2: ["z"], + 3: ["z"], + 4: ["z"], + } + ) + b = self.barycenter_as_subgraph(g, attr="barycentricity") + assert list(b) == ["z"] assert not b.edges - expected_barycentricity = {0: 23, 1: 23, 2: 23, 3: 23, 4: 23, - 'a': 35, 'b': 27, 'x': 21, 'y': 17, 'z': 15 - } + expected_barycentricity = { + 0: 23, + 1: 23, + 2: 23, + 3: 23, + 4: 23, + "a": 35, + "b": 27, + "x": 21, + "y": 17, + "z": 15, + } for node, barycentricity in expected_barycentricity.items(): - assert g.nodes[node]['barycentricity'] == barycentricity + assert g.nodes[node]["barycentricity"] == barycentricity # Doubling weights should do nothing but double the barycentricities for edge in g.edges: - g.edges[edge]['weight'] = 2 - b = self.barycenter_as_subgraph(g, weight='weight', - attr='barycentricity2') - assert list(b) == ['z'] + g.edges[edge]["weight"] = 2 + b = self.barycenter_as_subgraph(g, weight="weight", attr="barycentricity2") + assert list(b) == ["z"] assert not b.edges for node, barycentricity in expected_barycentricity.items(): - assert g.nodes[node]['barycentricity2'] == barycentricity*2 + assert g.nodes[node]["barycentricity2"] == barycentricity * 2 diff --git a/networkx/algorithms/tests/test_distance_regular.py b/networkx/algorithms/tests/test_distance_regular.py index f7b8cfb9..d336b188 100644 --- a/networkx/algorithms/tests/test_distance_regular.py +++ b/networkx/algorithms/tests/test_distance_regular.py @@ -3,7 +3,6 @@ from networkx import is_strongly_regular class TestDistanceRegular: - def test_is_distance_regular(self): assert nx.is_distance_regular(nx.icosahedral_graph()) assert nx.is_distance_regular(nx.petersen_graph()) diff --git a/networkx/algorithms/tests/test_dominance.py b/networkx/algorithms/tests/test_dominance.py index 5acd643a..3246d5cc 100644 --- a/networkx/algorithms/tests/test_dominance.py +++ b/networkx/algorithms/tests/test_dominance.py @@ -3,7 +3,6 @@ import pytest class TestImmediateDominators: - def test_exceptions(self): G = nx.Graph() G.add_node(0) @@ -23,21 +22,20 @@ class TestImmediateDominators: def test_path(self): n = 5 G = nx.path_graph(n, create_using=nx.DiGraph()) - assert (nx.immediate_dominators(G, 0) == - {i: max(i - 1, 0) for i in range(n)}) + assert nx.immediate_dominators(G, 0) == {i: max(i - 1, 0) for i in range(n)} def test_cycle(self): n = 5 G = nx.cycle_graph(n, create_using=nx.DiGraph()) - assert (nx.immediate_dominators(G, 0) == - {i: max(i - 1, 0) for i in range(n)}) + assert nx.immediate_dominators(G, 0) == {i: max(i - 1, 0) for i in range(n)} def test_unreachable(self): n = 5 assert n > 1 G = nx.path_graph(n, create_using=nx.DiGraph()) - assert (nx.immediate_dominators(G, n // 2) == - {i: max(i - 1, n // 2) for i in range(n // 2, n)}) + assert nx.immediate_dominators(G, n // 2) == { + i: max(i - 1, n // 2) for i in range(n // 2, n) + } def test_irreducible1(self): # Graph taken from Figure 2 of @@ -46,45 +44,41 @@ class TestImmediateDominators: # Software Practice & Experience, 4:110, 2001. edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)] G = nx.DiGraph(edges) - assert (nx.immediate_dominators(G, 5) == - {i: 5 for i in range(1, 6)}) + assert nx.immediate_dominators(G, 5) == {i: 5 for i in range(1, 6)} def test_irreducible2(self): # Graph taken from Figure 4 of # K. D. Cooper, T. J. Harvey, and K. Kennedy. # A simple, fast dominance algorithm. # Software Practice & Experience, 4:110, 2001. - edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), - (6, 4), (6, 5)] + edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), (6, 4), (6, 5)] G = nx.DiGraph(edges) result = nx.immediate_dominators(G, 6) - assert (result == {i: 6 for i in range(1, 7)}) + assert result == {i: 6 for i in range(1, 7)} def test_domrel_png(self): # Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)] G = nx.DiGraph(edges) result = nx.immediate_dominators(G, 1) - assert (result == {1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 2}) + assert result == {1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 2} # Test postdominance. result = nx.immediate_dominators(G.reverse(copy=False), 6) - assert (result == {1: 2, 2: 6, 3: 5, 4: 5, 5: 2, 6: 6}) + assert result == {1: 2, 2: 6, 3: 5, 4: 5, 5: 2, 6: 6} def test_boost_example(self): # Graph taken from Figure 1 of # http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm - edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), - (5, 7), (6, 4)] + edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), (5, 7), (6, 4)] G = nx.DiGraph(edges) result = nx.immediate_dominators(G, 0) - assert (result == {0: 0, 1: 0, 2: 1, 3: 1, 4: 3, 5: 4, 6: 4, 7: 1}) + assert result == {0: 0, 1: 0, 2: 1, 3: 1, 4: 3, 5: 4, 6: 4, 7: 1} # Test postdominance. result = nx.immediate_dominators(G.reverse(copy=False), 7) - assert (result == {0: 1, 1: 7, 2: 7, 3: 4, 4: 5, 5: 7, 6: 4, 7: 7}) + assert result == {0: 1, 1: 7, 2: 7, 3: 4, 4: 5, 5: 7, 6: 4, 7: 7} class TestDominanceFrontiers: - def test_exceptions(self): G = nx.Graph() G.add_node(0) @@ -104,21 +98,18 @@ class TestDominanceFrontiers: def test_path(self): n = 5 G = nx.path_graph(n, create_using=nx.DiGraph()) - assert (nx.dominance_frontiers(G, 0) == - {i: set() for i in range(n)}) + assert nx.dominance_frontiers(G, 0) == {i: set() for i in range(n)} def test_cycle(self): n = 5 G = nx.cycle_graph(n, create_using=nx.DiGraph()) - assert (nx.dominance_frontiers(G, 0) == - {i: set() for i in range(n)}) + assert nx.dominance_frontiers(G, 0) == {i: set() for i in range(n)} def test_unreachable(self): n = 5 assert n > 1 G = nx.path_graph(n, create_using=nx.DiGraph()) - assert (nx.dominance_frontiers(G, n // 2) == - {i: set() for i in range(n // 2, n)}) + assert nx.dominance_frontiers(G, n // 2) == {i: set() for i in range(n // 2, n)} def test_irreducible1(self): # Graph taken from Figure 2 of @@ -127,85 +118,121 @@ class TestDominanceFrontiers: # Software Practice & Experience, 4:110, 2001. edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)] G = nx.DiGraph(edges) - assert ({u: df - for u, df in nx.dominance_frontiers(G, 5).items()} == - {1: {2}, 2: {1}, 3: {2}, - 4: {1}, 5: set()}) + assert {u: df for u, df in nx.dominance_frontiers(G, 5).items()} == { + 1: {2}, + 2: {1}, + 3: {2}, + 4: {1}, + 5: set(), + } def test_irreducible2(self): # Graph taken from Figure 4 of # K. D. Cooper, T. J. Harvey, and K. Kennedy. # A simple, fast dominance algorithm. # Software Practice & Experience, 4:110, 2001. - edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), - (6, 4), (6, 5)] + edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), (6, 4), (6, 5)] G = nx.DiGraph(edges) - assert (nx.dominance_frontiers(G, 6) == - {1: {2}, 2: {1, 3}, 3: {2}, 4: {2, 3}, 5: {1}, 6: set()}) + assert nx.dominance_frontiers(G, 6) == { + 1: {2}, + 2: {1, 3}, + 3: {2}, + 4: {2, 3}, + 5: {1}, + 6: set(), + } def test_domrel_png(self): # Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)] G = nx.DiGraph(edges) - assert (nx.dominance_frontiers(G, 1) == - {1: set(), 2: {2}, 3: {5}, 4: {5}, 5: {2}, 6: set()}) + assert nx.dominance_frontiers(G, 1) == { + 1: set(), + 2: {2}, + 3: {5}, + 4: {5}, + 5: {2}, + 6: set(), + } # Test postdominance. result = nx.dominance_frontiers(G.reverse(copy=False), 6) - assert (result == {1: set(), 2: {2}, 3: {2}, 4: {2}, 5: {2}, 6: set()}) + assert result == {1: set(), 2: {2}, 3: {2}, 4: {2}, 5: {2}, 6: set()} def test_boost_example(self): # Graph taken from Figure 1 of # http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm - edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), - (5, 7), (6, 4)] + edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), (5, 7), (6, 4)] G = nx.DiGraph(edges) - assert (nx.dominance_frontiers(G, 0) == - {0: set(), 1: set(), 2: {7}, 3: {7}, - 4: {4, 7}, 5: {7}, 6: {4}, 7: set()}) + assert nx.dominance_frontiers(G, 0) == { + 0: set(), + 1: set(), + 2: {7}, + 3: {7}, + 4: {4, 7}, + 5: {7}, + 6: {4}, + 7: set(), + } # Test postdominance. result = nx.dominance_frontiers(G.reverse(copy=False), 7) - expected = {0: set(), 1: set(), 2: {1}, 3: {1}, - 4: {1, 4}, 5: {1}, 6: {4}, 7: set()} + expected = { + 0: set(), + 1: set(), + 2: {1}, + 3: {1}, + 4: {1, 4}, + 5: {1}, + 6: {4}, + 7: set(), + } assert result == expected def test_discard_issue(self): # https://github.com/networkx/networkx/issues/2071 g = nx.DiGraph() - g.add_edges_from([ - ('b0', 'b1'), - ('b1', 'b2'), - ('b2', 'b3'), - ('b3', 'b1'), - ('b1', 'b5'), - ('b5', 'b6'), - ('b5', 'b8'), - ('b6', 'b7'), - ('b8', 'b7'), - ('b7', 'b3'), - ('b3', 'b4') - ] + g.add_edges_from( + [ + ("b0", "b1"), + ("b1", "b2"), + ("b2", "b3"), + ("b3", "b1"), + ("b1", "b5"), + ("b5", "b6"), + ("b5", "b8"), + ("b6", "b7"), + ("b8", "b7"), + ("b7", "b3"), + ("b3", "b4"), + ] ) - df = nx.dominance_frontiers(g, 'b0') - assert df == {'b4': set(), 'b5': {'b3'}, 'b6': {'b7'}, - 'b7': {'b3'}, - 'b0': set(), 'b1': {'b1'}, 'b2': {'b3'}, - 'b3': {'b1'}, 'b8': {'b7'}} + df = nx.dominance_frontiers(g, "b0") + assert df == { + "b4": set(), + "b5": {"b3"}, + "b6": {"b7"}, + "b7": {"b3"}, + "b0": set(), + "b1": {"b1"}, + "b2": {"b3"}, + "b3": {"b1"}, + "b8": {"b7"}, + } def test_loop(self): g = nx.DiGraph() - g.add_edges_from([('a', 'b'), ('b', 'c'), ('b', 'a')]) - df = nx.dominance_frontiers(g, 'a') - assert df == {'a': set(), 'b': set(), 'c': set()} + g.add_edges_from([("a", "b"), ("b", "c"), ("b", "a")]) + df = nx.dominance_frontiers(g, "a") + assert df == {"a": set(), "b": set(), "c": set()} def test_missing_immediate_doms(self): # see https://github.com/networkx/networkx/issues/2070 g = nx.DiGraph() edges = [ - ('entry_1', 'b1'), - ('b1', 'b2'), - ('b2', 'b3'), - ('b3', 'exit'), - ('entry_2', 'b3') + ("entry_1", "b1"), + ("b1", "b2"), + ("b2", "b3"), + ("b3", "exit"), + ("entry_2", "b3"), ] # entry_1 @@ -221,35 +248,37 @@ class TestDominanceFrontiers: g.add_edges_from(edges) # formerly raised KeyError on entry_2 when parsing b3 # because entry_2 does not have immediate doms (no path) - nx.dominance_frontiers(g, 'entry_1') + nx.dominance_frontiers(g, "entry_1") def test_loops_larger(self): # from # http://ecee.colorado.edu/~waite/Darmstadt/motion.html g = nx.DiGraph() edges = [ - ('entry', 'exit'), - ('entry', '1'), - ('1', '2'), - ('2', '3'), - ('3', '4'), - ('4', '5'), - ('5', '6'), - ('6', 'exit'), - ('6', '2'), - ('5', '3'), - ('4', '4') + ("entry", "exit"), + ("entry", "1"), + ("1", "2"), + ("2", "3"), + ("3", "4"), + ("4", "5"), + ("5", "6"), + ("6", "exit"), + ("6", "2"), + ("5", "3"), + ("4", "4"), ] g.add_edges_from(edges) - df = nx.dominance_frontiers(g, 'entry') - answer = {'entry': set(), - '1': {'exit'}, - '2': {'exit', '2'}, - '3': {'exit', '3', '2'}, - '4': {'exit', '4', '3', '2'}, - '5': {'exit', '3', '2'}, - '6': {'exit', '2'}, - 'exit': set()} + df = nx.dominance_frontiers(g, "entry") + answer = { + "entry": set(), + "1": {"exit"}, + "2": {"exit", "2"}, + "3": {"exit", "3", "2"}, + "4": {"exit", "4", "3", "2"}, + "5": {"exit", "3", "2"}, + "6": {"exit", "2"}, + "exit": set(), + } for n in df: assert set(df[n]) == set(answer[n]) diff --git a/networkx/algorithms/tests/test_euler.py b/networkx/algorithms/tests/test_euler.py index 0b3bdb3a..f136ab0f 100644 --- a/networkx/algorithms/tests/test_euler.py +++ b/networkx/algorithms/tests/test_euler.py @@ -96,7 +96,9 @@ class TestEulerianCircuit: nodes = [u for u, v, k in edges] assert nodes == [0, 3, 2, 1, 2, 1] assert edges[:2] == [(0, 3, 0), (3, 2, 0)] - assert collections.Counter(edges[2:5]) == collections.Counter([(2, 1, 0), (1, 2, 1), (2, 1, 2)]) + assert collections.Counter(edges[2:5]) == collections.Counter( + [(2, 1, 0), (1, 2, 1), (2, 1, 2)] + ) assert edges[5:] == [(1, 0, 0)] def test_not_eulerian(self): diff --git a/networkx/algorithms/tests/test_graph_hashing.py b/networkx/algorithms/tests/test_graph_hashing.py index b378b857..719b5411 100644 --- a/networkx/algorithms/tests/test_graph_hashing.py +++ b/networkx/algorithms/tests/test_graph_hashing.py @@ -13,27 +13,26 @@ def test_empty_graph_hash(): def test_relabel(): G1 = nx.Graph() - G1.add_edges_from([(1, 2, {'label': 'A'}), - (2, 3, {'label': 'A'}), - (3, 1, {'label': 'A'}), - (1, 4, {'label': 'B'})]) - h_before = nx.weisfeiler_lehman_graph_hash(G1, edge_attr='label') + G1.add_edges_from( + [ + (1, 2, {"label": "A"}), + (2, 3, {"label": "A"}), + (3, 1, {"label": "A"}), + (1, 4, {"label": "B"}), + ] + ) + h_before = nx.weisfeiler_lehman_graph_hash(G1, edge_attr="label") G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) - h_after = nx.weisfeiler_lehman_graph_hash(G2, edge_attr='label') + h_after = nx.weisfeiler_lehman_graph_hash(G2, edge_attr="label") assert h_after == h_before def test_directed(): G1 = nx.DiGraph() - G1.add_edges_from([ - (1, 2), - (2, 3), - (3, 1), - (1, 5) - ]) + G1.add_edges_from([(1, 2), (2, 3), (3, 1), (1, 5)]) h_directed = nx.weisfeiler_lehman_graph_hash(G1) diff --git a/networkx/algorithms/tests/test_graphical.py b/networkx/algorithms/tests/test_graphical.py index 7dd2d78e..5c5a8819 100644 --- a/networkx/algorithms/tests/test_graphical.py +++ b/networkx/algorithms/tests/test_graphical.py @@ -4,12 +4,12 @@ import networkx as nx def test_valid_degree_sequence1(): n = 100 - p = .3 + p = 0.3 for i in range(10): G = nx.erdos_renyi_graph(n, p) deg = (d for n, d in G.degree()) - assert nx.is_graphical(deg, method='eg') - assert nx.is_graphical(deg, method='hh') + assert nx.is_graphical(deg, method="eg") + assert nx.is_graphical(deg, method="hh") def test_valid_degree_sequence2(): @@ -17,33 +17,33 @@ def test_valid_degree_sequence2(): for i in range(10): G = nx.barabasi_albert_graph(n, 1) deg = (d for n, d in G.degree()) - assert nx.is_graphical(deg, method='eg') - assert nx.is_graphical(deg, method='hh') + assert nx.is_graphical(deg, method="eg") + assert nx.is_graphical(deg, method="hh") def test_string_input(): - pytest.raises(nx.NetworkXException, nx.is_graphical, [], 'foo') - pytest.raises(nx.NetworkXException, nx.is_graphical, ['red'], 'hh') - pytest.raises(nx.NetworkXException, nx.is_graphical, ['red'], 'eg') + pytest.raises(nx.NetworkXException, nx.is_graphical, [], "foo") + pytest.raises(nx.NetworkXException, nx.is_graphical, ["red"], "hh") + pytest.raises(nx.NetworkXException, nx.is_graphical, ["red"], "eg") def test_non_integer_input(): - pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], 'eg') - pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], 'hh') + pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], "eg") + pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], "hh") def test_negative_input(): - assert not nx.is_graphical([-1], 'hh') - assert not nx.is_graphical([-1], 'eg') + assert not nx.is_graphical([-1], "hh") + assert not nx.is_graphical([-1], "eg") class TestAtlas: @classmethod def setup_class(cls): global atlas -# import platform -# if platform.python_implementation() == 'Jython': -# raise SkipTest('graph atlas not available under Jython.') + # import platform + # if platform.python_implementation() == 'Jython': + # raise SkipTest('graph atlas not available under Jython.') import networkx.generators.atlas as atlas cls.GAG = atlas.graph_atlas_g() @@ -51,32 +51,32 @@ class TestAtlas: def test_atlas(self): for graph in self.GAG: deg = (d for n, d in graph.degree()) - assert nx.is_graphical(deg, method='eg') - assert nx.is_graphical(deg, method='hh') + assert nx.is_graphical(deg, method="eg") + assert nx.is_graphical(deg, method="hh") def test_small_graph_true(): z = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] - assert nx.is_graphical(z, method='hh') - assert nx.is_graphical(z, method='eg') + assert nx.is_graphical(z, method="hh") + assert nx.is_graphical(z, method="eg") z = [10, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2] - assert nx.is_graphical(z, method='hh') - assert nx.is_graphical(z, method='eg') + assert nx.is_graphical(z, method="hh") + assert nx.is_graphical(z, method="eg") z = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] - assert nx.is_graphical(z, method='hh') - assert nx.is_graphical(z, method='eg') + assert nx.is_graphical(z, method="hh") + assert nx.is_graphical(z, method="eg") def test_small_graph_false(): z = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] - assert not nx.is_graphical(z, method='hh') - assert not nx.is_graphical(z, method='eg') + assert not nx.is_graphical(z, method="hh") + assert not nx.is_graphical(z, method="eg") z = [6, 5, 4, 4, 2, 1, 1, 1] - assert not nx.is_graphical(z, method='hh') - assert not nx.is_graphical(z, method='eg') + assert not nx.is_graphical(z, method="hh") + assert not nx.is_graphical(z, method="eg") z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] - assert not nx.is_graphical(z, method='hh') - assert not nx.is_graphical(z, method='eg') + assert not nx.is_graphical(z, method="hh") + assert not nx.is_graphical(z, method="eg") def test_directed_degree_sequence(): @@ -153,13 +153,13 @@ def test_pseudo_sequence(): def test_numpy_degree_sequence(): - numpy = pytest.importorskip('numpy') + numpy = pytest.importorskip("numpy") ds = numpy.array([1, 2, 2, 2, 1], dtype=numpy.int64) - assert nx.is_graphical(ds, 'eg') - assert nx.is_graphical(ds, 'hh') + assert nx.is_graphical(ds, "eg") + assert nx.is_graphical(ds, "hh") ds = numpy.array([1, 2, 2, 2, 1], dtype=numpy.float64) - assert nx.is_graphical(ds, 'eg') - assert nx.is_graphical(ds, 'hh') + assert nx.is_graphical(ds, "eg") + assert nx.is_graphical(ds, "hh") ds = numpy.array([1.1, 2, 2, 2, 1], dtype=numpy.float64) - pytest.raises(nx.NetworkXException, nx.is_graphical, ds, 'eg') - pytest.raises(nx.NetworkXException, nx.is_graphical, ds, 'hh') + pytest.raises(nx.NetworkXException, nx.is_graphical, ds, "eg") + pytest.raises(nx.NetworkXException, nx.is_graphical, ds, "hh") diff --git a/networkx/algorithms/tests/test_hierarchy.py b/networkx/algorithms/tests/test_hierarchy.py index 52eaa236..9e7c6e34 100644 --- a/networkx/algorithms/tests/test_hierarchy.py +++ b/networkx/algorithms/tests/test_hierarchy.py @@ -25,10 +25,14 @@ def test_hierarchy_1(): def test_hierarchy_weight(): G = nx.DiGraph() - G.add_edges_from([(0, 1, {'weight': .3}), - (1, 2, {'weight': .1}), - (2, 3, {'weight': .1}), - (3, 1, {'weight': .1}), - (3, 4, {'weight': .3}), - (0, 4, {'weight': .3})]) - assert nx.flow_hierarchy(G, weight='weight') == .75 + G.add_edges_from( + [ + (0, 1, {"weight": 0.3}), + (1, 2, {"weight": 0.1}), + (2, 3, {"weight": 0.1}), + (3, 1, {"weight": 0.1}), + (3, 4, {"weight": 0.3}), + (0, 4, {"weight": 0.3}), + ] + ) + assert nx.flow_hierarchy(G, weight="weight") == 0.75 diff --git a/networkx/algorithms/tests/test_hybrid.py b/networkx/algorithms/tests/test_hybrid.py index a9fe20f7..6af00164 100644 --- a/networkx/algorithms/tests/test_hybrid.py +++ b/networkx/algorithms/tests/test_hybrid.py @@ -18,7 +18,7 @@ def test_small_graph(): G.add_edge(2, 3) assert nx.is_kl_connected(G, 2, 2) H = nx.kl_connected_subgraph(G, 2, 2) - (H, graphOK) = nx.kl_connected_subgraph(G, 2, 2, - low_memory=True, - same_as_graph=True) + (H, graphOK) = nx.kl_connected_subgraph( + G, 2, 2, low_memory=True, same_as_graph=True + ) assert graphOK diff --git a/networkx/algorithms/tests/test_link_prediction.py b/networkx/algorithms/tests/test_link_prediction.py index 0a21d31d..cb3e58c0 100644 --- a/networkx/algorithms/tests/test_link_prediction.py +++ b/networkx/algorithms/tests/test_link_prediction.py @@ -16,7 +16,7 @@ def _test_func(G, ebunch, expected, predict_func, **kwargs): assert nx.testing.almost_equal(exp_dict[p], res_dict[p]) -class TestResourceAllocationIndex(): +class TestResourceAllocationIndex: @classmethod def setup_class(cls): cls.func = staticmethod(nx.resource_allocation_index) @@ -35,12 +35,21 @@ class TestResourceAllocationIndex(): self.test(G, [(1, 2)], [(1, 2, 0.25)]) def test_notimplemented(self): - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.MultiGraph([(0, 1), (1, 2)]), [(0, 2)]) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), [(0, 2)]) + assert pytest.raises( + nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiDiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) def test_no_common_neighbor(self): G = nx.Graph() @@ -57,7 +66,7 @@ class TestResourceAllocationIndex(): self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)]) -class TestJaccardCoefficient(): +class TestJaccardCoefficient: @classmethod def setup_class(cls): cls.func = staticmethod(nx.jaccard_coefficient) @@ -72,12 +81,21 @@ class TestJaccardCoefficient(): self.test(G, [(0, 2)], [(0, 2, 0.5)]) def test_notimplemented(self): - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.MultiGraph([(0, 1), (1, 2)]), [(0, 2)]) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), [(0, 2)]) + assert pytest.raises( + nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiDiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) def test_no_common_neighbor(self): G = nx.Graph() @@ -95,7 +113,7 @@ class TestJaccardCoefficient(): self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)]) -class TestAdamicAdarIndex(): +class TestAdamicAdarIndex: @classmethod def setup_class(cls): cls.func = staticmethod(nx.adamic_adar_index) @@ -114,12 +132,21 @@ class TestAdamicAdarIndex(): self.test(G, [(1, 2)], [(1, 2, 1 / math.log(4))]) def test_notimplemented(self): - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.MultiGraph([(0, 1), (1, 2)]), [(0, 2)]) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), [(0, 2)]) + assert pytest.raises( + nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiDiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) def test_no_common_neighbor(self): G = nx.Graph() @@ -133,11 +160,12 @@ class TestAdamicAdarIndex(): def test_all_nonexistent_edges(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) - self.test(G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)), - (1, 3, 0)]) + self.test( + G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)), (1, 3, 0)] + ) -class TestPreferentialAttachment(): +class TestPreferentialAttachment: @classmethod def setup_class(cls): cls.func = staticmethod(nx.preferential_attachment) @@ -156,12 +184,21 @@ class TestPreferentialAttachment(): self.test(G, [(0, 2)], [(0, 2, 4)]) def test_notimplemented(self): - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.MultiGraph([(0, 1), (1, 2)]), [(0, 2)]) - assert pytest.raises(nx.NetworkXNotImplemented, self.func, - nx.MultiDiGraph([(0, 1), (1, 2)]), [(0, 2)]) + assert pytest.raises( + nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)] + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) + assert pytest.raises( + nx.NetworkXNotImplemented, + self.func, + nx.MultiDiGraph([(0, 1), (1, 2)]), + [(0, 2)], + ) def test_zero_degrees(self): G = nx.Graph() @@ -174,36 +211,35 @@ class TestPreferentialAttachment(): self.test(G, None, [(0, 3, 2), (1, 2, 2), (1, 3, 1)]) -class TestCNSoundarajanHopcroft(): +class TestCNSoundarajanHopcroft: @classmethod def setup_class(cls): cls.func = staticmethod(nx.cn_soundarajan_hopcroft) - cls.test = partial(_test_func, predict_func=cls.func, - community='community') + cls.test = partial(_test_func, predict_func=cls.func, community="community") def test_K5(self): G = nx.complete_graph(5) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 1 self.test(G, [(0, 1)], [(0, 1, 5)]) def test_P3(self): G = nx.path_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 self.test(G, [(0, 2)], [(0, 2, 1)]) def test_S4(self): G = nx.star_graph(4) - G.nodes[0]['community'] = 1 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 1 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[0]["community"] = 1 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 1 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 2)]) def test_notimplemented(self): @@ -220,24 +256,24 @@ class TestCNSoundarajanHopcroft(): def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 self.test(G, [(0, 1)], [(0, 1, 0)]) def test_equal_nodes(self): G = nx.complete_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 self.test(G, [(0, 0)], [(0, 0, 4)]) def test_different_community(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 1 self.test(G, [(0, 3)], [(0, 3, 2)]) def test_no_community_information(self): @@ -247,69 +283,68 @@ class TestCNSoundarajanHopcroft(): def test_insufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[3]["community"] = 0 assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)])) def test_sufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]) - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 4)], [(1, 4, 4)]) def test_custom_community_attribute_name(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['cmty'] = 0 - G.nodes[1]['cmty'] = 0 - G.nodes[2]['cmty'] = 0 - G.nodes[3]['cmty'] = 1 - self.test(G, [(0, 3)], [(0, 3, 2)], community='cmty') + G.nodes[0]["cmty"] = 0 + G.nodes[1]["cmty"] = 0 + G.nodes[2]["cmty"] = 0 + G.nodes[3]["cmty"] = 1 + self.test(G, [(0, 3)], [(0, 3, 2)], community="cmty") def test_all_nonexistent_edges(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 self.test(G, None, [(0, 3, 2), (1, 2, 1), (1, 3, 0)]) -class TestRAIndexSoundarajanHopcroft(): +class TestRAIndexSoundarajanHopcroft: @classmethod def setup_class(cls): cls.func = staticmethod(nx.ra_index_soundarajan_hopcroft) - cls.test = partial(_test_func, predict_func=cls.func, - community='community') + cls.test = partial(_test_func, predict_func=cls.func, community="community") def test_K5(self): G = nx.complete_graph(5) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 1 self.test(G, [(0, 1)], [(0, 1, 0.5)]) def test_P3(self): G = nx.path_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 self.test(G, [(0, 2)], [(0, 2, 0)]) def test_S4(self): G = nx.star_graph(4) - G.nodes[0]['community'] = 1 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 1 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[0]["community"] = 1 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 1 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 0.25)]) def test_notimplemented(self): @@ -326,24 +361,24 @@ class TestRAIndexSoundarajanHopcroft(): def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 self.test(G, [(0, 1)], [(0, 1, 0)]) def test_equal_nodes(self): G = nx.complete_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 self.test(G, [(0, 0)], [(0, 0, 1)]) def test_different_community(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 1 self.test(G, [(0, 3)], [(0, 3, 0)]) def test_no_community_information(self): @@ -353,70 +388,71 @@ class TestRAIndexSoundarajanHopcroft(): def test_insufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[3]["community"] = 0 assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)])) def test_sufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]) - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 4)], [(1, 4, 1)]) def test_custom_community_attribute_name(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['cmty'] = 0 - G.nodes[1]['cmty'] = 0 - G.nodes[2]['cmty'] = 0 - G.nodes[3]['cmty'] = 1 - self.test(G, [(0, 3)], [(0, 3, 0)], community='cmty') + G.nodes[0]["cmty"] = 0 + G.nodes[1]["cmty"] = 0 + G.nodes[2]["cmty"] = 0 + G.nodes[3]["cmty"] = 1 + self.test(G, [(0, 3)], [(0, 3, 0)], community="cmty") def test_all_nonexistent_edges(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 self.test(G, None, [(0, 3, 0.5), (1, 2, 0), (1, 3, 0)]) -class TestWithinInterCluster(): +class TestWithinInterCluster: @classmethod def setup_class(cls): cls.delta = 0.001 cls.func = staticmethod(nx.within_inter_cluster) - cls.test = partial(_test_func, predict_func=cls.func, - delta=cls.delta, community='community') + cls.test = partial( + _test_func, predict_func=cls.func, delta=cls.delta, community="community" + ) def test_K5(self): G = nx.complete_graph(5) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 1 self.test(G, [(0, 1)], [(0, 1, 2 / (1 + self.delta))]) def test_P3(self): G = nx.path_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 self.test(G, [(0, 2)], [(0, 2, 0)]) def test_S4(self): G = nx.star_graph(4) - G.nodes[0]['community'] = 1 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 1 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[0]["community"] = 1 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 1 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 2)], [(1, 2, 1 / self.delta)]) def test_notimplemented(self): @@ -433,32 +469,32 @@ class TestWithinInterCluster(): def test_no_common_neighbor(self): G = nx.Graph() G.add_nodes_from([0, 1]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 self.test(G, [(0, 1)], [(0, 1, 0)]) def test_equal_nodes(self): G = nx.complete_graph(3) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 self.test(G, [(0, 0)], [(0, 0, 2 / self.delta)]) def test_different_community(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 1 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 1 self.test(G, [(0, 3)], [(0, 3, 0)]) def test_no_inter_cluster_common_neighbor(self): G = nx.complete_graph(4) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)]) def test_no_community_information(self): @@ -468,18 +504,18 @@ class TestWithinInterCluster(): def test_insufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[3]["community"] = 0 assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)])) def test_sufficient_community_information(self): G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]) - G.nodes[1]['community'] = 0 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 - G.nodes[4]['community'] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 self.test(G, [(1, 4)], [(1, 4, 2 / self.delta)]) def test_invalid_delta(self): @@ -490,17 +526,17 @@ class TestWithinInterCluster(): def test_custom_community_attribute_name(self): G = nx.complete_graph(4) - G.nodes[0]['cmty'] = 0 - G.nodes[1]['cmty'] = 0 - G.nodes[2]['cmty'] = 0 - G.nodes[3]['cmty'] = 0 - self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)], community='cmty') + G.nodes[0]["cmty"] = 0 + G.nodes[1]["cmty"] = 0 + G.nodes[2]["cmty"] = 0 + G.nodes[3]["cmty"] = 0 + self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)], community="cmty") def test_all_nonexistent_edges(self): G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (2, 3)]) - G.nodes[0]['community'] = 0 - G.nodes[1]['community'] = 1 - G.nodes[2]['community'] = 0 - G.nodes[3]['community'] = 0 + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 self.test(G, None, [(0, 3, 1 / self.delta), (1, 2, 0), (1, 3, 0)]) diff --git a/networkx/algorithms/tests/test_lowest_common_ancestors.py b/networkx/algorithms/tests/test_lowest_common_ancestors.py index 0455c224..fb09e1d6 100644 --- a/networkx/algorithms/tests/test_lowest_common_ancestors.py +++ b/networkx/algorithms/tests/test_lowest_common_ancestors.py @@ -23,21 +23,25 @@ class TestTreeLCA: cls.ans = dict(tree_all_pairs_lca(cls.DG, 0)) gold = {(n, n): n for n in cls.DG} gold.update({(0, i): 0 for i in range(1, 7)}) - gold.update({(1, 2): 0, - (1, 3): 1, - (1, 4): 1, - (1, 5): 0, - (1, 6): 0, - (2, 3): 0, - (2, 4): 0, - (2, 5): 2, - (2, 6): 2, - (3, 4): 1, - (3, 5): 0, - (3, 6): 0, - (4, 5): 0, - (4, 6): 0, - (5, 6): 2}) + gold.update( + { + (1, 2): 0, + (1, 3): 1, + (1, 4): 1, + (1, 5): 0, + (1, 6): 0, + (2, 3): 0, + (2, 4): 0, + (2, 5): 2, + (2, 6): 2, + (3, 4): 1, + (3, 5): 0, + (3, 6): 0, + (4, 5): 0, + (4, 6): 0, + (5, 6): 2, + } + ) cls.gold = gold @@ -59,8 +63,7 @@ class TestTreeLCA: def test_tree_all_pairs_lowest_common_ancestor3(self): """Specifying no pairs same as specifying all.""" - all_pairs = chain(combinations(self.DG, 2), - ((node, node) for node in self.DG)) + all_pairs = chain(combinations(self.DG, 2), ((node, node) for node in self.DG)) ans = dict(tree_all_pairs_lca(self.DG, 0, all_pairs)) self.assert_has_same_pairs(ans, self.ans) @@ -81,8 +84,11 @@ class TestTreeLCA: def test_tree_all_pairs_lowest_common_ancestor6(self): """Works on subtrees.""" ans = dict(tree_all_pairs_lca(self.DG, 1)) - gold = {pair: lca for (pair, lca) in self.gold.items() - if all(n in (1, 3, 4) for n in pair)} + gold = { + pair: lca + for (pair, lca) in self.gold.items() + if all(n in (1, 3, 4) for n in pair) + } self.assert_has_same_pairs(gold, ans) def test_tree_all_pairs_lowest_common_ancestor7(self): @@ -122,8 +128,9 @@ class TestTreeLCA: """Test that None as a node in the graph raises an error.""" G = nx.DiGraph([(None, 3)]) pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) - pytest.raises(nx.NodeNotFound, list, - tree_all_pairs_lca(self.DG, pairs=G.edges())) + pytest.raises( + nx.NodeNotFound, list, tree_all_pairs_lca(self.DG, pairs=G.edges()) + ) def test_tree_all_pairs_lowest_common_ancestor12(self): """Test that tree routine bails on DAGs.""" @@ -166,42 +173,44 @@ class TestDAGLCA: cls.root_distance = nx.shortest_path_length(cls.DG, source=0) - cls.gold = {(1, 1): 1, - (1, 2): 1, - (1, 3): 1, - (1, 4): 0, - (1, 5): 0, - (1, 6): 0, - (1, 7): 0, - (1, 8): 0, - (2, 2): 2, - (2, 3): 2, - (2, 4): 0, - (2, 5): 5, - (2, 6): 6, - (2, 7): 7, - (2, 8): 7, - (3, 3): 8, - (3, 4): 4, - (3, 5): 5, - (3, 6): 6, - (3, 7): 7, - (3, 8): 8, - (4, 4): 4, - (4, 5): 0, - (4, 6): 0, - (4, 7): 0, - (4, 8): 0, - (5, 5): 5, - (5, 6): 5, - (5, 7): 5, - (5, 8): 5, - (6, 6): 6, - (6, 7): 5, - (6, 8): 6, - (7, 7): 7, - (7, 8): 7, - (8, 8): 8} + cls.gold = { + (1, 1): 1, + (1, 2): 1, + (1, 3): 1, + (1, 4): 0, + (1, 5): 0, + (1, 6): 0, + (1, 7): 0, + (1, 8): 0, + (2, 2): 2, + (2, 3): 2, + (2, 4): 0, + (2, 5): 5, + (2, 6): 6, + (2, 7): 7, + (2, 8): 7, + (3, 3): 8, + (3, 4): 4, + (3, 5): 5, + (3, 6): 6, + (3, 7): 7, + (3, 8): 8, + (4, 4): 4, + (4, 5): 0, + (4, 6): 0, + (4, 7): 0, + (4, 8): 0, + (5, 5): 5, + (5, 6): 5, + (5, 7): 5, + (5, 8): 5, + (6, 6): 6, + (6, 7): 5, + (6, 8): 6, + (7, 7): 7, + (7, 8): 7, + (8, 8): 8, + } cls.gold.update(((0, n), 0) for n in cls.DG) def assert_lca_dicts_same(self, d1, d2, G=None): @@ -213,12 +222,13 @@ class TestDAGLCA: root_distance = self.root_distance else: roots = [n for n, deg in G.in_degree if deg == 0] - assert(len(roots) == 1) + assert len(roots) == 1 root_distance = nx.shortest_path_length(G, source=roots[0]) for a, b in ((min(pair), max(pair)) for pair in chain(d1, d2)): - assert (root_distance[get_pair(d1, a, b)] == - root_distance[get_pair(d2, a, b)]) + assert ( + root_distance[get_pair(d1, a, b)] == root_distance[get_pair(d2, a, b)] + ) def test_all_pairs_lowest_common_ancestor1(self): """Produces the correct results.""" @@ -269,14 +279,11 @@ class TestDAGLCA: def test_all_pairs_lowest_common_ancestor7(self): """Test that LCA on null graph bails.""" - pytest.raises(nx.NetworkXPointlessConcept, - all_pairs_lca, - nx.DiGraph()) + pytest.raises(nx.NetworkXPointlessConcept, all_pairs_lca, nx.DiGraph()) def test_all_pairs_lowest_common_ancestor8(self): """Test that LCA on non-dags bails.""" - pytest.raises(nx.NetworkXError, all_pairs_lca, - nx.DiGraph([(3, 4), (4, 3)])) + pytest.raises(nx.NetworkXError, all_pairs_lca, nx.DiGraph([(3, 4), (4, 3)])) def test_all_pairs_lowest_common_ancestor9(self): """Test that it works on non-empty graphs with no LCAs.""" @@ -289,15 +296,13 @@ class TestDAGLCA: """Test that it bails on None as a node.""" G = nx.DiGraph([(None, 3)]) pytest.raises(nx.NetworkXError, all_pairs_lca, G) - pytest.raises(nx.NodeNotFound, all_pairs_lca, - self.DG, pairs=G.edges()) + pytest.raises(nx.NodeNotFound, all_pairs_lca, self.DG, pairs=G.edges()) def test_lowest_common_ancestor1(self): """Test that the one-pair function works on default.""" G = nx.DiGraph([(0, 1), (2, 1)]) sentinel = object() - assert (nx.lowest_common_ancestor(G, 0, 2, default=sentinel) is - sentinel) + assert nx.lowest_common_ancestor(G, 0, 2, default=sentinel) is sentinel def test_lowest_common_ancestor2(self): """Test that the one-pair function works on identity.""" diff --git a/networkx/algorithms/tests/test_matching.py b/networkx/algorithms/tests/test_matching.py index 20f29808..5886ed64 100644 --- a/networkx/algorithms/tests/test_matching.py +++ b/networkx/algorithms/tests/test_matching.py @@ -27,16 +27,19 @@ class TestMaxWeightMatching: """Single edge""" G = nx.Graph() G.add_edge(0, 1) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({0: 1, 1: 0})) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({0: 1, 1: 0}) + ) def test_trivial4(self): """Small graph""" G = nx.Graph() - G.add_edge('one', 'two', weight=10) - G.add_edge('two', 'three', weight=11) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({'three': 'two', 'two': 'three'})) + G.add_edge("one", "two", weight=10) + G.add_edge("two", "three", weight=11) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({"three": "two", "two": "three"}), + ) def test_trivial5(self): """Path""" @@ -44,18 +47,22 @@ class TestMaxWeightMatching: G.add_edge(1, 2, weight=5) G.add_edge(2, 3, weight=11) G.add_edge(3, 4, weight=5) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({2: 3, 3: 2})) - assert_edges_equal(nx.max_weight_matching(G, 1), - matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3})) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({2: 3, 3: 2}) + ) + assert_edges_equal( + nx.max_weight_matching(G, 1), matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3}) + ) def test_trivial6(self): """Small graph with arbitrary weight attribute""" G = nx.Graph() - G.add_edge('one', 'two', weight=10, abcd=11) - G.add_edge('two', 'three', weight=11, abcd=10) - assert_edges_equal(nx.max_weight_matching(G, weight='abcd'), - matching_dict_to_set({'one': 'two', 'two': 'one'})) + G.add_edge("one", "two", weight=10, abcd=11) + G.add_edge("two", "three", weight=11, abcd=10) + assert_edges_equal( + nx.max_weight_matching(G, weight="abcd"), + matching_dict_to_set({"one": "two", "two": "one"}), + ) def test_floating_point_weights(self): """Floating point weights""" @@ -64,8 +71,9 @@ class TestMaxWeightMatching: G.add_edge(2, 3, weight=math.exp(1)) G.add_edge(1, 3, weight=3.0) G.add_edge(1, 4, weight=math.sqrt(2.0)) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1})) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1}) + ) def test_negative_weights(self): """Negative weights""" @@ -75,46 +83,65 @@ class TestMaxWeightMatching: G.add_edge(2, 3, weight=1) G.add_edge(2, 4, weight=-1) G.add_edge(3, 4, weight=-6) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1})) - assert_edges_equal(nx.max_weight_matching(G, 1), - matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2})) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({1: 2, 2: 1}) + ) + assert_edges_equal( + nx.max_weight_matching(G, 1), matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2}) + ) def test_s_blossom(self): """Create S-blossom and use it for augmentation:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 8), (1, 3, 9), - (2, 3, 10), (3, 4, 7)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3})) + G.add_weighted_edges_from([(1, 2, 8), (1, 3, 9), (2, 3, 10), (3, 4, 7)]) + assert_edges_equal( + nx.max_weight_matching(G), matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3}) + ) G.add_weighted_edges_from([(1, 6, 5), (4, 5, 6)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}), + ) def test_s_t_blossom(self): """Create S-blossom, relabel as T-blossom, use for augmentation:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 9), (1, 3, 8), (2, 3, 10), - (1, 4, 5), (4, 5, 4), (1, 6, 3)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})) + G.add_weighted_edges_from( + [(1, 2, 9), (1, 3, 8), (2, 3, 10), (1, 4, 5), (4, 5, 4), (1, 6, 3)] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}), + ) G.add_edge(4, 5, weight=3) G.add_edge(1, 6, weight=4) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1}), + ) G.remove_edge(1, 6) G.add_edge(3, 6, weight=4) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 6, 4: 5, 5: 4, 6: 3})) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 2, 2: 1, 3: 6, 4: 5, 5: 4, 6: 3}), + ) def test_nested_s_blossom(self): """Create nested S-blossom, use for augmentation:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 9), (1, 3, 9), (2, 3, 10), - (2, 4, 8), (3, 5, 8), (4, 5, 10), - (5, 6, 6)]) + G.add_weighted_edges_from( + [ + (1, 2, 9), + (1, 3, 9), + (2, 3, 10), + (2, 4, 8), + (3, 5, 8), + (4, 5, 10), + (5, 6, 6), + ] + ) dict_format = {1: 3, 2: 4, 3: 1, 4: 2, 5: 6, 6: 5} expected = {frozenset(e) for e in matching_dict_to_set(dict_format)} answer = {frozenset(e) for e in nx.max_weight_matching(G)} @@ -123,101 +150,229 @@ class TestMaxWeightMatching: def test_nested_s_blossom_relabel(self): """Create S-blossom, relabel as S, include in nested S-blossom:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 10), (1, 7, 10), (2, 3, 12), - (3, 4, 20), (3, 5, 20), (4, 5, 25), - (5, 6, 10), (6, 7, 10), (7, 8, 8)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3, 5: 6, 6: 5, 7: 8, 8: 7})) + G.add_weighted_edges_from( + [ + (1, 2, 10), + (1, 7, 10), + (2, 3, 12), + (3, 4, 20), + (3, 5, 20), + (4, 5, 25), + (5, 6, 10), + (6, 7, 10), + (7, 8, 8), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3, 5: 6, 6: 5, 7: 8, 8: 7}), + ) def test_nested_s_blossom_expand(self): """Create nested S-blossom, augment, expand recursively:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 8), (1, 3, 8), (2, 3, 10), - (2, 4, 12), (3, 5, 12), (4, 5, 14), - (4, 6, 12), (5, 7, 12), (6, 7, 14), - (7, 8, 12)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 6, 5: 3, 6: 4, 7: 8, 8: 7})) + G.add_weighted_edges_from( + [ + (1, 2, 8), + (1, 3, 8), + (2, 3, 10), + (2, 4, 12), + (3, 5, 12), + (4, 5, 14), + (4, 6, 12), + (5, 7, 12), + (6, 7, 14), + (7, 8, 12), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 6, 5: 3, 6: 4, 7: 8, 8: 7}), + ) def test_s_blossom_relabel_expand(self): """Create S-blossom, relabel as T, expand:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 23), (1, 5, 22), (1, 6, 15), - (2, 3, 25), (3, 4, 22), (4, 5, 25), - (4, 8, 14), (5, 7, 13)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4})) + G.add_weighted_edges_from( + [ + (1, 2, 23), + (1, 5, 22), + (1, 6, 15), + (2, 3, 25), + (3, 4, 22), + (4, 5, 25), + (4, 8, 14), + (5, 7, 13), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4}), + ) def test_nested_s_blossom_relabel_expand(self): """Create nested S-blossom, relabel as T, expand:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 19), (1, 3, 20), (1, 8, 8), - (2, 3, 25), (2, 4, 18), (3, 5, 18), - (4, 5, 13), (4, 7, 7), (5, 6, 7)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 7, 5: 6, 6: 5, 7: 4, 8: 1})) + G.add_weighted_edges_from( + [ + (1, 2, 19), + (1, 3, 20), + (1, 8, 8), + (2, 3, 25), + (2, 4, 18), + (3, 5, 18), + (4, 5, 13), + (4, 7, 7), + (5, 6, 7), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 7, 5: 6, 6: 5, 7: 4, 8: 1}), + ) def test_nasty_blossom1(self): """Create blossom, relabel as T in more than one way, expand, augment: """ G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 45), (1, 5, 45), (2, 3, 50), - (3, 4, 45), (4, 5, 50), (1, 6, 30), - (3, 9, 35), (4, 8, 35), (5, 7, 26), - (9, 10, 5)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, - 6: 1, 7: 5, 8: 4, 9: 10, 10: 9})) + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 5, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 50), + (1, 6, 30), + (3, 9, 35), + (4, 8, 35), + (5, 7, 26), + (9, 10, 5), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9} + ), + ) def test_nasty_blossom2(self): """Again but slightly different:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 45), (1, 5, 45), (2, 3, 50), - (3, 4, 45), (4, 5, 50), (1, 6, 30), - (3, 9, 35), (4, 8, 26), (5, 7, 40), - (9, 10, 5)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, - 6: 1, 7: 5, 8: 4, 9: 10, 10: 9})) + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 5, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 50), + (1, 6, 30), + (3, 9, 35), + (4, 8, 26), + (5, 7, 40), + (9, 10, 5), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9} + ), + ) def test_nasty_blossom_least_slack(self): """Create blossom, relabel as T, expand such that a new least-slack S-to-free dge is produced, augment: """ G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 45), (1, 5, 45), (2, 3, 50), - (3, 4, 45), (4, 5, 50), (1, 6, 30), - (3, 9, 35), (4, 8, 28), (5, 7, 26), - (9, 10, 5)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, - 6: 1, 7: 5, 8: 4, 9: 10, 10: 9})) + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 5, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 50), + (1, 6, 30), + (3, 9, 35), + (4, 8, 28), + (5, 7, 26), + (9, 10, 5), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9} + ), + ) def test_nasty_blossom_augmenting(self): """Create nested blossom, relabel as T in more than one way""" # expand outer blossom such that inner blossom ends up on an # augmenting path: G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 45), (1, 7, 45), (2, 3, 50), - (3, 4, 45), (4, 5, 95), (4, 6, 94), - (5, 6, 94), (6, 7, 50), (1, 8, 30), - (3, 11, 35), (5, 9, 36), (7, 10, 26), - (11, 12, 5)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 6, 5: 9, 6: 4, - 7: 10, 8: 1, 9: 5, 10: 7, 11: 12, 12: 11})) + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 7, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 95), + (4, 6, 94), + (5, 6, 94), + (6, 7, 50), + (1, 8, 30), + (3, 11, 35), + (5, 9, 36), + (7, 10, 26), + (11, 12, 5), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + { + 1: 8, + 2: 3, + 3: 2, + 4: 6, + 5: 9, + 6: 4, + 7: 10, + 8: 1, + 9: 5, + 10: 7, + 11: 12, + 12: 11, + } + ), + ) def test_nasty_blossom_expand_recursively(self): """Create nested S-blossom, relabel as S, expand recursively:""" G = nx.Graph() - G.add_weighted_edges_from([(1, 2, 40), (1, 3, 40), (2, 3, 60), - (2, 4, 55), (3, 5, 55), (4, 5, 50), - (1, 8, 15), (5, 7, 30), (7, 6, 10), - (8, 10, 10), (4, 9, 30)]) - assert_edges_equal(nx.max_weight_matching(G), - matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 9, 5: 3, - 6: 7, 7: 6, 8: 10, 9: 4, 10: 8})) + G.add_weighted_edges_from( + [ + (1, 2, 40), + (1, 3, 40), + (2, 3, 60), + (2, 4, 55), + (3, 5, 55), + (4, 5, 50), + (1, 8, 15), + (5, 7, 30), + (7, 6, 10), + (8, 10, 10), + (4, 9, 30), + ] + ) + assert_edges_equal( + nx.max_weight_matching(G), + matching_dict_to_set( + {1: 2, 2: 1, 3: 5, 4: 9, 5: 3, 6: 7, 7: 6, 8: 10, 9: 4, 10: 8} + ), + ) class TestIsMatching: diff --git a/networkx/algorithms/tests/test_minors.py b/networkx/algorithms/tests/test_minors.py index 3400e320..2689f948 100644 --- a/networkx/algorithms/tests/test_minors.py +++ b/networkx/algorithms/tests/test_minors.py @@ -20,7 +20,7 @@ class TestQuotient: # neighbor set. def same_neighbors(u, v): - return (u not in G[v] and v not in G[u] and G[u] == G[v]) + return u not in G[v] and v not in G[u] and G[u] == G[v] expected = nx.complete_graph(3) actual = nx.quotient_graph(G, same_neighbors) @@ -38,7 +38,7 @@ class TestQuotient: # neighbor set. def same_neighbors(u, v): - return (u not in G[v] and v not in G[u] and G[u] == G[v]) + return u not in G[v] and v not in G[u] and G[u] == G[v] expected = nx.complete_graph(2) actual = nx.quotient_graph(G, same_neighbors) @@ -57,7 +57,7 @@ class TestQuotient: return u == v def same_parity(b, c): - return (arbitrary_element(b) % 2 == arbitrary_element(c) % 2) + return arbitrary_element(b) % 2 == arbitrary_element(c) % 2 actual = nx.quotient_graph(G, identity, same_parity) expected = nx.Graph() @@ -73,13 +73,34 @@ class TestQuotient: """ # This example graph comes from the file `test_strongly_connected.py`. G = nx.DiGraph() - G.add_edges_from([(1, 2), (2, 3), (2, 11), (2, 12), (3, 4), (4, 3), - (4, 5), (5, 6), (6, 5), (6, 7), (7, 8), (7, 9), - (7, 10), (8, 9), (9, 7), (10, 6), (11, 2), (11, 4), - (11, 6), (12, 6), (12, 11)]) + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 11), + (2, 12), + (3, 4), + (4, 3), + (4, 5), + (5, 6), + (6, 5), + (6, 7), + (7, 8), + (7, 9), + (7, 10), + (8, 9), + (9, 7), + (10, 6), + (11, 2), + (11, 4), + (11, 6), + (12, 6), + (12, 11), + ] + ) scc = list(nx.strongly_connected_components(G)) C = nx.condensation(G, scc) - component_of = C.graph['mapping'] + component_of = C.graph["mapping"] # Two nodes are equivalent if they are in the same connected component. def same_component(u, v): @@ -95,9 +116,9 @@ class TestQuotient: assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M: - assert M.nodes[n]['nedges'] == 1 - assert M.nodes[n]['nnodes'] == 2 - assert M.nodes[n]['density'] == 1 + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 def test_multigraph_path(self): G = nx.MultiGraph(nx.path_graph(6)) @@ -106,9 +127,9 @@ class TestQuotient: assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M: - assert M.nodes[n]['nedges'] == 1 - assert M.nodes[n]['nnodes'] == 2 - assert M.nodes[n]['density'] == 1 + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 def test_directed_path(self): G = nx.DiGraph() @@ -118,9 +139,9 @@ class TestQuotient: assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M: - assert M.nodes[n]['nedges'] == 1 - assert M.nodes[n]['nnodes'] == 2 - assert M.nodes[n]['density'] == 0.5 + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 0.5 def test_directed_multigraph_path(self): G = nx.MultiDiGraph() @@ -130,9 +151,9 @@ class TestQuotient: assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M: - assert M.nodes[n]['nedges'] == 1 - assert M.nodes[n]['nnodes'] == 2 - assert M.nodes[n]['density'] == 0.5 + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 0.5 def test_overlapping_blocks(self): with pytest.raises(nx.NetworkXException): @@ -143,17 +164,17 @@ class TestQuotient: def test_weighted_path(self): G = nx.path_graph(6) for i in range(5): - G[i][i + 1]['weight'] = i + 1 + G[i][i + 1]["weight"] = i + 1 partition = [{0, 1}, {2, 3}, {4, 5}] M = nx.quotient_graph(G, partition, relabel=True) assert_nodes_equal(M, [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) - assert M[0][1]['weight'] == 2 - assert M[1][2]['weight'] == 4 + assert M[0][1]["weight"] == 2 + assert M[1][2]["weight"] == 4 for n in M: - assert M.nodes[n]['nedges'] == 1 - assert M.nodes[n]['nnodes'] == 2 - assert M.nodes[n]['density'] == 1 + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 def test_barbell(self): G = nx.barbell_graph(3, 0) @@ -162,9 +183,9 @@ class TestQuotient: assert_nodes_equal(M, [0, 1]) assert_edges_equal(M.edges(), [(0, 1)]) for n in M: - assert M.nodes[n]['nedges'] == 3 - assert M.nodes[n]['nnodes'] == 3 - assert M.nodes[n]['density'] == 1 + assert M.nodes[n]["nedges"] == 3 + assert M.nodes[n]["nnodes"] == 3 + assert M.nodes[n]["density"] == 1 def test_barbell_plus(self): G = nx.barbell_graph(3, 0) @@ -174,11 +195,11 @@ class TestQuotient: M = nx.quotient_graph(G, partition, relabel=True) assert_nodes_equal(M, [0, 1]) assert_edges_equal(M.edges(), [(0, 1)]) - assert M[0][1]['weight'] == 2 + assert M[0][1]["weight"] == 2 for n in M: - assert M.nodes[n]['nedges'] == 3 - assert M.nodes[n]['nnodes'] == 3 - assert M.nodes[n]['density'] == 1 + assert M.nodes[n]["nedges"] == 3 + assert M.nodes[n]["nnodes"] == 3 + assert M.nodes[n]["density"] == 1 def test_blockmodel(self): G = nx.path_graph(6) @@ -187,21 +208,20 @@ class TestQuotient: assert_nodes_equal(M.nodes(), [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M.nodes(): - assert M.nodes[n]['nedges'] == 1 - assert M.nodes[n]['nnodes'] == 2 - assert M.nodes[n]['density'] == 1.0 + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1.0 def test_multigraph_blockmodel(self): G = nx.MultiGraph(nx.path_graph(6)) partition = [[0, 1], [2, 3], [4, 5]] - M = nx.quotient_graph(G, partition, - create_using=nx.MultiGraph(), relabel=True) + M = nx.quotient_graph(G, partition, create_using=nx.MultiGraph(), relabel=True) assert_nodes_equal(M.nodes(), [0, 1, 2]) assert_edges_equal(M.edges(), [(0, 1), (1, 2)]) for n in M.nodes(): - assert M.nodes[n]['nedges'] == 1 - assert M.nodes[n]['nnodes'] == 2 - assert M.nodes[n]['density'] == 1.0 + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1.0 def test_quotient_graph_incomplete_partition(self): G = nx.path_graph(6) @@ -291,16 +311,16 @@ class TestContraction: """Tests that node contraction preserves node attributes.""" G = nx.cycle_graph(4) # Add some data to the two nodes being contracted. - G.nodes[0]['foo'] = 'bar' - G.nodes[1]['baz'] = 'xyzzy' + G.nodes[0]["foo"] = "bar" + G.nodes[1]["baz"] = "xyzzy" actual = nx.contracted_nodes(G, 0, 1) # We expect that contracting the nodes 0 and 1 in C_4 yields K_3, but # with nodes labeled 0, 2, and 3, and with a self-loop on 0. expected = nx.complete_graph(3) expected = nx.relabel_nodes(expected, {1: 2, 2: 3}) expected.add_edge(0, 0) - cdict = {1: {'baz': 'xyzzy'}} - expected.nodes[0].update(dict(foo='bar', contraction=cdict)) + cdict = {1: {"baz": "xyzzy"}} + expected.nodes[0].update(dict(foo="bar", contraction=cdict)) assert nx.is_isomorphic(actual, expected) assert actual.nodes == expected.nodes diff --git a/networkx/algorithms/tests/test_mis.py b/networkx/algorithms/tests/test_mis.py index 67294136..ad942004 100644 --- a/networkx/algorithms/tests/test_mis.py +++ b/networkx/algorithms/tests/test_mis.py @@ -11,26 +11,26 @@ import random class TestMaximalIndependantSet: def setup(self): self.florentine = nx.Graph() - self.florentine.add_edge('Acciaiuoli', 'Medici') - self.florentine.add_edge('Castellani', 'Peruzzi') - self.florentine.add_edge('Castellani', 'Strozzi') - self.florentine.add_edge('Castellani', 'Barbadori') - self.florentine.add_edge('Medici', 'Barbadori') - self.florentine.add_edge('Medici', 'Ridolfi') - self.florentine.add_edge('Medici', 'Tornabuoni') - self.florentine.add_edge('Medici', 'Albizzi') - self.florentine.add_edge('Medici', 'Salviati') - self.florentine.add_edge('Salviati', 'Pazzi') - self.florentine.add_edge('Peruzzi', 'Strozzi') - self.florentine.add_edge('Peruzzi', 'Bischeri') - self.florentine.add_edge('Strozzi', 'Ridolfi') - self.florentine.add_edge('Strozzi', 'Bischeri') - self.florentine.add_edge('Ridolfi', 'Tornabuoni') - self.florentine.add_edge('Tornabuoni', 'Guadagni') - self.florentine.add_edge('Albizzi', 'Ginori') - self.florentine.add_edge('Albizzi', 'Guadagni') - self.florentine.add_edge('Bischeri', 'Guadagni') - self.florentine.add_edge('Guadagni', 'Lamberteschi') + self.florentine.add_edge("Acciaiuoli", "Medici") + self.florentine.add_edge("Castellani", "Peruzzi") + self.florentine.add_edge("Castellani", "Strozzi") + self.florentine.add_edge("Castellani", "Barbadori") + self.florentine.add_edge("Medici", "Barbadori") + self.florentine.add_edge("Medici", "Ridolfi") + self.florentine.add_edge("Medici", "Tornabuoni") + self.florentine.add_edge("Medici", "Albizzi") + self.florentine.add_edge("Medici", "Salviati") + self.florentine.add_edge("Salviati", "Pazzi") + self.florentine.add_edge("Peruzzi", "Strozzi") + self.florentine.add_edge("Peruzzi", "Bischeri") + self.florentine.add_edge("Strozzi", "Ridolfi") + self.florentine.add_edge("Strozzi", "Bischeri") + self.florentine.add_edge("Ridolfi", "Tornabuoni") + self.florentine.add_edge("Tornabuoni", "Guadagni") + self.florentine.add_edge("Albizzi", "Ginori") + self.florentine.add_edge("Albizzi", "Guadagni") + self.florentine.add_edge("Bischeri", "Guadagni") + self.florentine.add_edge("Guadagni", "Lamberteschi") def test_random_seed(self): G = nx.complete_graph(5) @@ -52,10 +52,10 @@ class TestMaximalIndependantSet: def test_exception(self): """Bad input should raise exception.""" G = self.florentine - pytest.raises(nx.NetworkXUnfeasible, - nx.maximal_independent_set, G, ["Smith"]) - pytest.raises(nx.NetworkXUnfeasible, - nx.maximal_independent_set, G, ["Salviati", "Pazzi"]) + pytest.raises(nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Smith"]) + pytest.raises( + nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Salviati", "Pazzi"] + ) def test_digraph_exception(self): G = nx.DiGraph([(1, 2), (3, 4)]) @@ -64,9 +64,9 @@ class TestMaximalIndependantSet: def test_florentine_family(self): G = self.florentine indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"]) - assert (sorted(indep) == - sorted(["Medici", "Bischeri", "Castellani", "Pazzi", - "Ginori", "Lamberteschi"])) + assert sorted(indep) == sorted( + ["Medici", "Bischeri", "Castellani", "Pazzi", "Ginori", "Lamberteschi"] + ) def test_bipartite(self): G = nx.complete_bipartite_graph(12, 34) diff --git a/networkx/algorithms/tests/test_non_randomness.py b/networkx/algorithms/tests/test_non_randomness.py index efbf3048..17925eb7 100644 --- a/networkx/algorithms/tests/test_non_randomness.py +++ b/networkx/algorithms/tests/test_non_randomness.py @@ -1,12 +1,14 @@ import networkx as nx import pytest -numpy = pytest.importorskip('numpy') -npt = pytest.importorskip('numpy.testing') + +numpy = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") def test_non_randomness(): G = nx.karate_club_graph() npt.assert_almost_equal(nx.non_randomness(G, 2)[0], 11.7, decimal=2) - npt.assert_almost_equal(nx.non_randomness(G)[0], - 7.21, decimal=2) # infers 3 communities + npt.assert_almost_equal( + nx.non_randomness(G)[0], 7.21, decimal=2 + ) # infers 3 communities diff --git a/networkx/algorithms/tests/test_planar_drawing.py b/networkx/algorithms/tests/test_planar_drawing.py index e8b8e866..025e8229 100644 --- a/networkx/algorithms/tests/test_planar_drawing.py +++ b/networkx/algorithms/tests/test_planar_drawing.py @@ -11,28 +11,47 @@ def test_graph1(): def test_graph2(): embedding_data = { - 0: [8, 6], 1: [2, 6, 9], 2: [8, 1, 7, 9, 6, 4], 3: [9], 4: [2], - 5: [6, 8], 6: [9, 1, 0, 5, 2], 7: [9, 2], 8: [0, 2, 5], - 9: [1, 6, 2, 7, 3] + 0: [8, 6], + 1: [2, 6, 9], + 2: [8, 1, 7, 9, 6, 4], + 3: [9], + 4: [2], + 5: [6, 8], + 6: [9, 1, 0, 5, 2], + 7: [9, 2], + 8: [0, 2, 5], + 9: [1, 6, 2, 7, 3], } check_embedding_data(embedding_data) def test_circle_graph(): embedding_data = { - 0: [1, 9], 1: [0, 2], 2: [1, 3], 3: [2, 4], 4: [3, 5], - 5: [4, 6], 6: [5, 7], 7: [6, 8], 8: [7, 9], 9: [8, 0] + 0: [1, 9], + 1: [0, 2], + 2: [1, 3], + 3: [2, 4], + 4: [3, 5], + 5: [4, 6], + 6: [5, 7], + 7: [6, 8], + 8: [7, 9], + 9: [8, 0], } check_embedding_data(embedding_data) def test_grid_graph(): embedding_data = { - (0, 1): [(0, 0), (1, 1), (0, 2)], (1, 2): [(1, 1), (2, 2), (0, 2)], - (0, 0): [(0, 1), (1, 0)], (2, 1): [(2, 0), (2, 2), (1, 1)], + (0, 1): [(0, 0), (1, 1), (0, 2)], + (1, 2): [(1, 1), (2, 2), (0, 2)], + (0, 0): [(0, 1), (1, 0)], + (2, 1): [(2, 0), (2, 2), (1, 1)], (1, 1): [(2, 1), (1, 2), (0, 1), (1, 0)], - (2, 0): [(1, 0), (2, 1)], (2, 2): [(1, 2), (2, 1)], - (1, 0): [(0, 0), (2, 0), (1, 1)], (0, 2): [(1, 2), (0, 1)] + (2, 0): [(1, 0), (2, 1)], + (2, 2): [(1, 2), (2, 1)], + (1, 0): [(0, 0), (2, 0), (1, 1)], + (0, 2): [(1, 2), (0, 1)], } check_embedding_data(embedding_data) @@ -58,10 +77,7 @@ def test_multiple_component_graph1(): def test_multiple_component_graph2(): - embedding_data = { - 0: [1, 2], 1: [0, 2], 2: [0, 1], - 3: [4, 5], 4: [3, 5], 5: [3, 4] - } + embedding_data = {0: [1, 2], 1: [0, 2], 2: [0, 1], 3: [4, 5], 4: [3, 5], 5: [3, 4]} check_embedding_data(embedding_data) @@ -89,9 +105,13 @@ def test_triangulate_embedding2(): def check_triangulation(embedding, expected_embedding): res_embedding, _ = triangulate_embedding(embedding, True) - assert res_embedding.get_data() == expected_embedding, "Expected embedding incorrect" + assert ( + res_embedding.get_data() == expected_embedding + ), "Expected embedding incorrect" res_embedding, _ = triangulate_embedding(embedding, False) - assert res_embedding.get_data() == expected_embedding, "Expected embedding incorrect" + assert ( + res_embedding.get_data() == expected_embedding + ), "Expected embedding incorrect" def check_embedding_data(embedding_data): @@ -99,22 +119,19 @@ def check_embedding_data(embedding_data): embedding = nx.PlanarEmbedding() embedding.set_data(embedding_data) pos_fully = nx.combinatorial_embedding_to_pos(embedding, False) - msg = "Planar drawing does not conform to the embedding (fully " \ - "triangulation)" + msg = "Planar drawing does not conform to the embedding (fully " "triangulation)" assert planar_drawing_conforms_to_embedding(embedding, pos_fully), msg check_edge_intersections(embedding, pos_fully) pos_internally = nx.combinatorial_embedding_to_pos(embedding, True) - msg = "Planar drawing does not conform to the embedding (internal " \ - "triangulation)" - assert planar_drawing_conforms_to_embedding(embedding, - pos_internally), msg + msg = "Planar drawing does not conform to the embedding (internal " "triangulation)" + assert planar_drawing_conforms_to_embedding(embedding, pos_internally), msg check_edge_intersections(embedding, pos_internally) def is_close(a, b, rel_tol=1e-09, abs_tol=0.0): # Check if float numbers are basically equal, for python >=3.5 there is # function for that in the standard library - return abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol) + return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol) def point_in_between(a, b, p): @@ -122,10 +139,10 @@ def point_in_between(a, b, p): x1, y1 = a x2, y2 = b px, py = p - dist_1_2 = math.sqrt((x1 - x2)**2 + (y1 - y2)**2) - dist_1_p = math.sqrt((x1 - px)**2 + (y1 - py)**2) - dist_2_p = math.sqrt((x2 - px)**2 + (y2 - py)**2) - return is_close(dist_1_p+dist_2_p, dist_1_2) + dist_1_2 = math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2) + dist_1_p = math.sqrt((x1 - px) ** 2 + (y1 - py) ** 2) + dist_2_p = math.sqrt((x2 - px) ** 2 + (y2 - py) ** 2) + return is_close(dist_1_p + dist_2_p, dist_1_2) def check_edge_intersections(G, pos): @@ -152,23 +169,28 @@ def check_edge_intersections(G, pos): if determinant != 0: # the lines are not parallel # calculate intersection point, see: # https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection - px = ((x1 * y2 - y1 * x2) * (x3 - x4) - - (x1 - x2) * (x3 * y4 - y3 * x4) / float(determinant)) - py = ((x1 * y2 - y1 * x2) * (y3 - y4) - - (y1 - y2) * (x3 * y4 - y3 * x4) / float(determinant)) + px = (x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * ( + x3 * y4 - y3 * x4 + ) / float(determinant) + py = (x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * ( + x3 * y4 - y3 * x4 + ) / float(determinant) # Check if intersection lies between the points - if (point_in_between(pos[a], pos[b], (px, py)) and - point_in_between(pos[c], pos[d], (px, py))): + if point_in_between(pos[a], pos[b], (px, py)) and point_in_between( + pos[c], pos[d], (px, py) + ): msg = f"There is an intersection at {px},{py}" raise nx.NetworkXException(msg) # Check overlap msg = "A node lies on a edge connecting two other nodes" - if (point_in_between(pos[a], pos[b], pos[c]) or - point_in_between(pos[a], pos[b], pos[d]) or - point_in_between(pos[c], pos[d], pos[a]) or - point_in_between(pos[c], pos[d], pos[b])): + if ( + point_in_between(pos[a], pos[b], pos[c]) + or point_in_between(pos[a], pos[b], pos[d]) + or point_in_between(pos[c], pos[d], pos[a]) + or point_in_between(pos[c], pos[d], pos[b]) + ): raise nx.NetworkXException(msg) # No edge intersection found @@ -179,7 +201,8 @@ class Vector: All vectors in direction [0, 1] are the smallest. The vectors grow in clockwise direction. """ - __slots__ = ['x', 'y', 'node', 'quadrant'] + + __slots__ = ["x", "y", "node", "quadrant"] def __init__(self, x, y, node): self.x = x @@ -195,8 +218,7 @@ class Vector: self.quadrant = 4 def __eq__(self, other): - return (self.quadrant == other.quadrant and - self.x * other.y == self.y * other.x) + return self.quadrant == other.quadrant and self.x * other.y == self.y * other.x def __lt__(self, other): if self.quadrant < other.quadrant: @@ -229,16 +251,17 @@ def planar_drawing_conforms_to_embedding(embedding, pos): nbr_vectors = [] v_pos = pos[v] for nbr in embedding[v]: - new_vector = Vector(pos[nbr][0] - v_pos[0], pos[nbr][1] - v_pos[1], - nbr) + new_vector = Vector(pos[nbr][0] - v_pos[0], pos[nbr][1] - v_pos[1], nbr) nbr_vectors.append(new_vector) # Sort neighbors according to their phi angle nbr_vectors.sort() for idx, nbr_vector in enumerate(nbr_vectors): cw_vector = nbr_vectors[(idx + 1) % len(nbr_vectors)] ccw_vector = nbr_vectors[idx - 1] - if (embedding[v][nbr_vector.node]['cw'] != cw_vector.node or - embedding[v][nbr_vector.node]['ccw'] != ccw_vector.node): + if ( + embedding[v][nbr_vector.node]["cw"] != cw_vector.node + or embedding[v][nbr_vector.node]["ccw"] != ccw_vector.node + ): return False if cw_vector.node != nbr_vector.node and cw_vector == nbr_vector: # Lines overlap diff --git a/networkx/algorithms/tests/test_planarity.py b/networkx/algorithms/tests/test_planarity.py index 833dd024..89fb8e7c 100644 --- a/networkx/algorithms/tests/test_planarity.py +++ b/networkx/algorithms/tests/test_planarity.py @@ -53,13 +53,37 @@ class TestLRPlanarity: check_counterexample(G, result_rec) def test_simple_planar_graph(self): - e = [(1, 2), (2, 3), (3, 4), (4, 6), (6, 7), (7, 1), (1, 5), - (5, 2), (2, 4), (4, 5), (5, 7)] + e = [ + (1, 2), + (2, 3), + (3, 4), + (4, 6), + (6, 7), + (7, 1), + (1, 5), + (5, 2), + (2, 4), + (4, 5), + (5, 7), + ] self.check_graph(nx.Graph(e), is_planar=True) def test_planar_with_selfloop(self): - e = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (1, 2), (1, 3), - (1, 5), (2, 5), (2, 4), (3, 4), (3, 5), (4, 5)] + e = [ + (1, 1), + (2, 2), + (3, 3), + (4, 4), + (5, 5), + (1, 2), + (1, 3), + (1, 5), + (2, 5), + (2, 4), + (3, 4), + (3, 5), + (4, 5), + ] self.check_graph(nx.Graph(e), is_planar=True) def test_k3_3(self): @@ -88,8 +112,18 @@ class TestLRPlanarity: def test_non_planar1(self): # tests a graph that has no subgraph directly isomorph to K5 or K3_3 - e = [(1, 5), (1, 6), (1, 7), (2, 6), (2, 3), (3, 5), (3, 7), (4, 5), - (4, 6), (4, 7)] + e = [ + (1, 5), + (1, 6), + (1, 7), + (2, 6), + (2, 3), + (3, 5), + (3, 7), + (4, 5), + (4, 6), + (4, 7), + ] self.check_graph(nx.Graph(e), is_planar=False) def test_loop(self): @@ -108,10 +142,33 @@ class TestLRPlanarity: def test_goldner_harary(self): # test goldner-harary graph (a maximal planar graph) e = [ - (1, 2), (1, 3), (1, 4), (1, 5), (1, 7), (1, 8), (1, 10), - (1, 11), (2, 3), (2, 4), (2, 6), (2, 7), (2, 9), (2, 10), - (2, 11), (3, 4), (4, 5), (4, 6), (4, 7), (5, 7), (6, 7), - (7, 8), (7, 9), (7, 10), (8, 10), (9, 10), (10, 11) + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (1, 7), + (1, 8), + (1, 10), + (1, 11), + (2, 3), + (2, 4), + (2, 6), + (2, 7), + (2, 9), + (2, 10), + (2, 11), + (3, 4), + (4, 5), + (4, 6), + (4, 7), + (5, 7), + (6, 7), + (7, 8), + (7, 9), + (7, 10), + (8, 10), + (9, 10), + (10, 11), ] G = nx.Graph(e) self.check_graph(G, is_planar=True) @@ -122,13 +179,11 @@ class TestLRPlanarity: def test_non_planar_multigraph(self): G = nx.MultiGraph(nx.complete_graph(5)) - G.add_edges_from([(1, 2)]*5) + G.add_edges_from([(1, 2)] * 5) self.check_graph(G, is_planar=False) def test_planar_digraph(self): - G = nx.DiGraph([ - (1, 2), (2, 3), (2, 4), (4, 1), (4, 2), (1, 4), (3, 2) - ]) + G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (4, 1), (4, 2), (1, 4), (3, 2)]) self.check_graph(G, is_planar=True) def test_non_planar_digraph(self): @@ -144,25 +199,65 @@ class TestLRPlanarity: self.check_graph(G, is_planar=True) def test_graph1(self): - G = nx.OrderedGraph([ - (3, 10), (2, 13), (1, 13), (7, 11), (0, 8), (8, 13), (0, 2), - (0, 7), (0, 10), (1, 7) - ]) + G = nx.OrderedGraph( + [ + (3, 10), + (2, 13), + (1, 13), + (7, 11), + (0, 8), + (8, 13), + (0, 2), + (0, 7), + (0, 10), + (1, 7), + ] + ) self.check_graph(G, is_planar=True) def test_graph2(self): - G = nx.OrderedGraph([ - (1, 2), (4, 13), (0, 13), (4, 5), (7, 10), (1, 7), (0, 3), (2, 6), - (5, 6), (7, 13), (4, 8), (0, 8), (0, 9), (2, 13), (6, 7), (3, 6), - (2, 8) - ]) + G = nx.OrderedGraph( + [ + (1, 2), + (4, 13), + (0, 13), + (4, 5), + (7, 10), + (1, 7), + (0, 3), + (2, 6), + (5, 6), + (7, 13), + (4, 8), + (0, 8), + (0, 9), + (2, 13), + (6, 7), + (3, 6), + (2, 8), + ] + ) self.check_graph(G, is_planar=False) def test_graph3(self): - G = nx.OrderedGraph([ - (0, 7), (3, 11), (3, 4), (8, 9), (4, 11), (1, 7), (1, 13), (1, 11), - (3, 5), (5, 7), (1, 3), (0, 4), (5, 11), (5, 13) - ]) + G = nx.OrderedGraph( + [ + (0, 7), + (3, 11), + (3, 4), + (8, 9), + (4, 11), + (1, 7), + (1, 13), + (1, 11), + (3, 5), + (5, 7), + (1, 3), + (0, 4), + (5, 11), + (5, 13), + ] + ) self.check_graph(G, is_planar=False) def test_counterexample_planar(self): @@ -200,15 +295,16 @@ def check_embedding(G, embedding): """ if not isinstance(embedding, nx.PlanarEmbedding): - raise nx.NetworkXException( - "Bad embedding. Not of type nx.PlanarEmbedding") + raise nx.NetworkXException("Bad embedding. Not of type nx.PlanarEmbedding") # Check structure embedding.check_structure() # Check that graphs are equivalent - assert set(G.nodes) == set(embedding.nodes), "Bad embedding. Nodes don't match the original graph." + assert set(G.nodes) == set( + embedding.nodes + ), "Bad embedding. Nodes don't match the original graph." # Check that the edges are equal g_edges = set() @@ -216,7 +312,9 @@ def check_embedding(G, embedding): if edge[0] != edge[1]: g_edges.add((edge[0], edge[1])) g_edges.add((edge[1], edge[0])) - assert g_edges == set(embedding.edges), "Bad embedding. Edges don't match the original graph." + assert g_edges == set( + embedding.edges + ), "Bad embedding. Edges don't match the original graph." def check_counterexample(G, sub_graph): diff --git a/networkx/algorithms/tests/test_richclub.py b/networkx/algorithms/tests/test_richclub.py index e2eb02e4..f933e61b 100644 --- a/networkx/algorithms/tests/test_richclub.py +++ b/networkx/algorithms/tests/test_richclub.py @@ -27,41 +27,44 @@ def test_richclub_normalized(): def test_richclub2(): T = nx.balanced_tree(2, 10) rc = nx.richclub.rich_club_coefficient(T, normalized=False) - assert rc == {0: 4092 / (2047 * 2046.0), - 1: (2044.0 / (1023 * 1022)), - 2: (2040.0 / (1022 * 1021))} + assert rc == { + 0: 4092 / (2047 * 2046.0), + 1: (2044.0 / (1023 * 1022)), + 2: (2040.0 / (1022 * 1021)), + } def test_richclub3(): # tests edgecase G = nx.karate_club_graph() rc = nx.rich_club_coefficient(G, normalized=False) - assert rc == {0: 156.0 / 1122, - 1: 154.0 / 1056, - 2: 110.0 / 462, - 3: 78.0 / 240, - 4: 44.0 / 90, - 5: 22.0 / 42, - 6: 10.0 / 20, - 7: 10.0 / 20, - 8: 10.0 / 20, - 9: 6.0 / 12, - 10: 2.0 / 6, - 11: 2.0 / 6, - 12: 0.0, - 13: 0.0, - 14: 0.0, - 15: 0.0, } + assert rc == { + 0: 156.0 / 1122, + 1: 154.0 / 1056, + 2: 110.0 / 462, + 3: 78.0 / 240, + 4: 44.0 / 90, + 5: 22.0 / 42, + 6: 10.0 / 20, + 7: 10.0 / 20, + 8: 10.0 / 20, + 9: 6.0 / 12, + 10: 2.0 / 6, + 11: 2.0 / 6, + 12: 0.0, + 13: 0.0, + 14: 0.0, + 15: 0.0, + } def test_richclub4(): G = nx.Graph() - G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4), (4, 5), (5, 9), (6, 9), (7, 9), (8, 9)]) + G.add_edges_from( + [(0, 1), (0, 2), (0, 3), (0, 4), (4, 5), (5, 9), (6, 9), (7, 9), (8, 9)] + ) rc = nx.rich_club_coefficient(G, normalized=False) - assert rc == {0: 18 / 90.0, - 1: 6 / 12.0, - 2: 0.0, - 3: 0.0} + assert rc == {0: 18 / 90.0, 1: 6 / 12.0, 2: 0.0, 3: 0.0} def test_richclub_exception(): diff --git a/networkx/algorithms/tests/test_similarity.py b/networkx/algorithms/tests/test_similarity.py index 6b9f1d20..229c2f53 100644 --- a/networkx/algorithms/tests/test_similarity.py +++ b/networkx/algorithms/tests/test_similarity.py @@ -4,13 +4,13 @@ import networkx as nx from networkx.algorithms.similarity import ( graph_edit_distance, optimal_edit_paths, - optimize_graph_edit_distance + optimize_graph_edit_distance, ) from networkx.generators.classic import ( circular_ladder_graph, cycle_graph, path_graph, - wheel_graph + wheel_graph, ) @@ -24,24 +24,23 @@ def ematch(e1, e2): def getCanonical(): G = nx.Graph() - G.add_node('A', label='A') - G.add_node('B', label='B') - G.add_node('C', label='C') - G.add_node('D', label='D') - G.add_edge('A', 'B', label='a-b') - G.add_edge('B', 'C', label='b-c') - G.add_edge('B', 'D', label='b-d') + G.add_node("A", label="A") + G.add_node("B", label="B") + G.add_node("C", label="C") + G.add_node("D", label="D") + G.add_edge("A", "B", label="a-b") + G.add_edge("B", "C", label="b-c") + G.add_edge("B", "D", label="b-d") return G class TestSimilarity: - @classmethod def setup_class(cls): global numpy global scipy - numpy = pytest.importorskip('numpy') - scipy = pytest.importorskip('scipy') + numpy = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") def test_graph_edit_distance_roots_and_timeout(self): G0 = nx.star_graph(5) @@ -89,83 +88,105 @@ class TestSimilarity: G1 = cycle_graph(5) G2 = cycle_graph(5) for n, attr in G1.nodes.items(): - attr['color'] = 'red' if n % 2 == 0 else 'blue' + attr["color"] = "red" if n % 2 == 0 else "blue" for n, attr in G2.nodes.items(): - attr['color'] = 'red' if n % 2 == 1 else 'blue' + attr["color"] = "red" if n % 2 == 1 else "blue" assert graph_edit_distance(G1, G2) == 0 - assert graph_edit_distance(G1, G2, node_match=lambda n1, n2: n1['color'] == n2['color']) == 1 + assert ( + graph_edit_distance( + G1, G2, node_match=lambda n1, n2: n1["color"] == n2["color"] + ) + == 1 + ) def test_graph_edit_distance_edge_match(self): G1 = path_graph(6) G2 = path_graph(6) for e, attr in G1.edges.items(): - attr['color'] = 'red' if min(e) % 2 == 0 else 'blue' + attr["color"] = "red" if min(e) % 2 == 0 else "blue" for e, attr in G2.edges.items(): - attr['color'] = 'red' if min(e) // 3 == 0 else 'blue' + attr["color"] = "red" if min(e) // 3 == 0 else "blue" assert graph_edit_distance(G1, G2) == 0 - assert graph_edit_distance(G1, G2, edge_match=lambda e1, e2: e1['color'] == e2['color']) == 2 + assert ( + graph_edit_distance( + G1, G2, edge_match=lambda e1, e2: e1["color"] == e2["color"] + ) + == 2 + ) def test_graph_edit_distance_node_cost(self): G1 = path_graph(6) G2 = path_graph(6) for n, attr in G1.nodes.items(): - attr['color'] = 'red' if n % 2 == 0 else 'blue' + attr["color"] = "red" if n % 2 == 0 else "blue" for n, attr in G2.nodes.items(): - attr['color'] = 'red' if n % 2 == 1 else 'blue' + attr["color"] = "red" if n % 2 == 1 else "blue" def node_subst_cost(uattr, vattr): - if uattr['color'] == vattr['color']: + if uattr["color"] == vattr["color"]: return 1 else: return 10 def node_del_cost(attr): - if attr['color'] == 'blue': + if attr["color"] == "blue": return 20 else: return 50 def node_ins_cost(attr): - if attr['color'] == 'blue': + if attr["color"] == "blue": return 40 else: return 100 - assert graph_edit_distance(G1, G2, - node_subst_cost=node_subst_cost, - node_del_cost=node_del_cost, - node_ins_cost=node_ins_cost) == 6 + assert ( + graph_edit_distance( + G1, + G2, + node_subst_cost=node_subst_cost, + node_del_cost=node_del_cost, + node_ins_cost=node_ins_cost, + ) + == 6 + ) def test_graph_edit_distance_edge_cost(self): G1 = path_graph(6) G2 = path_graph(6) for e, attr in G1.edges.items(): - attr['color'] = 'red' if min(e) % 2 == 0 else 'blue' + attr["color"] = "red" if min(e) % 2 == 0 else "blue" for e, attr in G2.edges.items(): - attr['color'] = 'red' if min(e) // 3 == 0 else 'blue' + attr["color"] = "red" if min(e) // 3 == 0 else "blue" def edge_subst_cost(gattr, hattr): - if gattr['color'] == hattr['color']: + if gattr["color"] == hattr["color"]: return 0.01 else: return 0.1 def edge_del_cost(attr): - if attr['color'] == 'blue': + if attr["color"] == "blue": return 0.2 else: return 0.5 def edge_ins_cost(attr): - if attr['color'] == 'blue': + if attr["color"] == "blue": return 0.4 else: return 1.0 - assert graph_edit_distance(G1, G2, - edge_subst_cost=edge_subst_cost, - edge_del_cost=edge_del_cost, - edge_ins_cost=edge_ins_cost) == 0.23 + assert ( + graph_edit_distance( + G1, + G2, + edge_subst_cost=edge_subst_cost, + edge_del_cost=edge_del_cost, + edge_ins_cost=edge_ins_cost, + ) + == 0.23 + ) def test_graph_edit_distance_upper_bound(self): G1 = circular_ladder_graph(2) @@ -182,16 +203,38 @@ class TestSimilarity: assert len(paths) == 6 def canonical(vertex_path, edge_path): - return tuple(sorted(vertex_path)), tuple(sorted(edge_path, key=lambda x: (None in x, x))) - - expected_paths = [([(0, 0), (1, 1), (2, 2)], [((0, 1), (0, 1)), ((1, 2), (1, 2)), (None, (0, 2))]), - ([(0, 0), (1, 2), (2, 1)], [((0, 1), (0, 2)), ((1, 2), (1, 2)), (None, (0, 1))]), - ([(0, 1), (1, 0), (2, 2)], [((0, 1), (0, 1)), ((1, 2), (0, 2)), (None, (1, 2))]), - ([(0, 1), (1, 2), (2, 0)], [((0, 1), (1, 2)), ((1, 2), (0, 2)), (None, (0, 1))]), - ([(0, 2), (1, 0), (2, 1)], [((0, 1), (0, 2)), ((1, 2), (0, 1)), (None, (1, 2))]), - ([(0, 2), (1, 1), (2, 0)], [((0, 1), (1, 2)), ((1, 2), (0, 1)), (None, (0, 2))])] - assert ({canonical(*p) for p in paths} == - {canonical(*p) for p in expected_paths}) + return ( + tuple(sorted(vertex_path)), + tuple(sorted(edge_path, key=lambda x: (None in x, x))), + ) + + expected_paths = [ + ( + [(0, 0), (1, 1), (2, 2)], + [((0, 1), (0, 1)), ((1, 2), (1, 2)), (None, (0, 2))], + ), + ( + [(0, 0), (1, 2), (2, 1)], + [((0, 1), (0, 2)), ((1, 2), (1, 2)), (None, (0, 1))], + ), + ( + [(0, 1), (1, 0), (2, 2)], + [((0, 1), (0, 1)), ((1, 2), (0, 2)), (None, (1, 2))], + ), + ( + [(0, 1), (1, 2), (2, 0)], + [((0, 1), (1, 2)), ((1, 2), (0, 2)), (None, (0, 1))], + ), + ( + [(0, 2), (1, 0), (2, 1)], + [((0, 1), (0, 2)), ((1, 2), (0, 1)), (None, (1, 2))], + ), + ( + [(0, 2), (1, 1), (2, 0)], + [((0, 1), (1, 2)), ((1, 2), (0, 1)), (None, (0, 2))], + ), + ] + assert {canonical(*p) for p in paths} == {canonical(*p) for p in expected_paths} def test_optimize_graph_edit_distance(self): G1 = circular_ladder_graph(2) @@ -210,11 +253,11 @@ class TestSimilarity: def test_selfloops(self): G0 = nx.Graph() G1 = nx.Graph() - G1.add_edges_from((('A', 'A'), ('A', 'B'))) + G1.add_edges_from((("A", "A"), ("A", "B"))) G2 = nx.Graph() - G2.add_edges_from((('A', 'B'), ('B', 'B'))) + G2.add_edges_from((("A", "B"), ("B", "B"))) G3 = nx.Graph() - G3.add_edges_from((('A', 'A'), ('A', 'B'), ('B', 'B'))) + G3.add_edges_from((("A", "A"), ("A", "B"), ("B", "B"))) assert graph_edit_distance(G0, G0) == 0 assert graph_edit_distance(G0, G1) == 4 @@ -239,11 +282,11 @@ class TestSimilarity: def test_digraph(self): G0 = nx.DiGraph() G1 = nx.DiGraph() - G1.add_edges_from((('A', 'B'), ('B', 'C'), ('C', 'D'), ('D', 'A'))) + G1.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("D", "A"))) G2 = nx.DiGraph() - G2.add_edges_from((('A', 'B'), ('B', 'C'), ('C', 'D'), ('A', 'D'))) + G2.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("A", "D"))) G3 = nx.DiGraph() - G3.add_edges_from((('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'D'))) + G3.add_edges_from((("A", "B"), ("A", "C"), ("B", "D"), ("C", "D"))) assert graph_edit_distance(G0, G0) == 0 assert graph_edit_distance(G0, G1) == 8 @@ -268,11 +311,11 @@ class TestSimilarity: def test_multigraph(self): G0 = nx.MultiGraph() G1 = nx.MultiGraph() - G1.add_edges_from((('A', 'B'), ('B', 'C'), ('A', 'C'))) + G1.add_edges_from((("A", "B"), ("B", "C"), ("A", "C"))) G2 = nx.MultiGraph() - G2.add_edges_from((('A', 'B'), ('B', 'C'), ('B', 'C'), ('A', 'C'))) + G2.add_edges_from((("A", "B"), ("B", "C"), ("B", "C"), ("A", "C"))) G3 = nx.MultiGraph() - G3.add_edges_from((('A', 'B'), ('B', 'C'), ('A', 'C'), ('A', 'C'), ('A', 'C'))) + G3.add_edges_from((("A", "B"), ("B", "C"), ("A", "C"), ("A", "C"), ("A", "C"))) assert graph_edit_distance(G0, G0) == 0 assert graph_edit_distance(G0, G1) == 6 @@ -296,9 +339,23 @@ class TestSimilarity: def test_multidigraph(self): G1 = nx.MultiDiGraph() - G1.add_edges_from((('hardware', 'kernel'), ('kernel', 'hardware'), ('kernel', 'userspace'), ('userspace', 'kernel'))) + G1.add_edges_from( + ( + ("hardware", "kernel"), + ("kernel", "hardware"), + ("kernel", "userspace"), + ("userspace", "kernel"), + ) + ) G2 = nx.MultiDiGraph() - G2.add_edges_from((('winter', 'spring'), ('spring', 'summer'), ('summer', 'autumn'), ('autumn', 'winter'))) + G2.add_edges_from( + ( + ("winter", "spring"), + ("spring", "summer"), + ("summer", "autumn"), + ("autumn", "winter"), + ) + ) assert graph_edit_distance(G1, G2) == 5 assert graph_edit_distance(G2, G1) == 5 @@ -306,216 +363,268 @@ class TestSimilarity: # by https://github.com/jfbeaumont def testCopy(self): G = nx.Graph() - G.add_node('A', label='A') - G.add_node('B', label='B') - G.add_edge('A', 'B', label='a-b') - assert graph_edit_distance(G, G.copy(), node_match=nmatch, edge_match=ematch) == 0 + G.add_node("A", label="A") + G.add_node("B", label="B") + G.add_edge("A", "B", label="a-b") + assert ( + graph_edit_distance(G, G.copy(), node_match=nmatch, edge_match=ematch) == 0 + ) def testSame(self): G1 = nx.Graph() - G1.add_node('A', label='A') - G1.add_node('B', label='B') - G1.add_edge('A', 'B', label='a-b') + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_edge('A', 'B', label='a-b') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="a-b") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 0 def testOneEdgeLabelDiff(self): G1 = nx.Graph() - G1.add_node('A', label='A') - G1.add_node('B', label='B') - G1.add_edge('A', 'B', label='a-b') + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_edge('A', 'B', label='bad') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="bad") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 def testOneNodeLabelDiff(self): G1 = nx.Graph() - G1.add_node('A', label='A') - G1.add_node('B', label='B') - G1.add_edge('A', 'B', label='a-b') + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") G2 = nx.Graph() - G2.add_node('A', label='Z') - G2.add_node('B', label='B') - G2.add_edge('A', 'B', label='a-b') + G2.add_node("A", label="Z") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="a-b") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 def testOneExtraNode(self): G1 = nx.Graph() - G1.add_node('A', label='A') - G1.add_node('B', label='B') - G1.add_edge('A', 'B', label='a-b') + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_edge('A', 'B', label='a-b') - G2.add_node('C', label='C') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="a-b") + G2.add_node("C", label="C") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 def testOneExtraEdge(self): G1 = nx.Graph() - G1.add_node('A', label='A') - G1.add_node('B', label='B') - G1.add_node('C', label='C') - G1.add_node('C', label='C') - G1.add_edge('A', 'B', label='a-b') + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_node("C", label="C") + G1.add_node("C", label="C") + G1.add_edge("A", "B", label="a-b") G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_node('C', label='C') - G2.add_edge('A', 'B', label='a-b') - G2.add_edge('A', 'C', label='a-c') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("A", "C", label="a-c") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 def testOneExtraNodeAndEdge(self): G1 = nx.Graph() - G1.add_node('A', label='A') - G1.add_node('B', label='B') - G1.add_edge('A', 'B', label='a-b') + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_node('C', label='C') - G2.add_edge('A', 'B', label='a-b') - G2.add_edge('A', 'C', label='a-c') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("A", "C", label="a-c") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2 def testGraph1(self): G1 = getCanonical() G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_node('D', label='D') - G2.add_node('E', label='E') - G2.add_edge('A', 'B', label='a-b') - G2.add_edge('B', 'D', label='b-d') - G2.add_edge('D', 'E', label='d-e') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("D", label="D") + G2.add_node("E", label="E") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "D", label="b-d") + G2.add_edge("D", "E", label="d-e") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 3 def testGraph2(self): G1 = getCanonical() G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_node('C', label='C') - G2.add_node('D', label='D') - G2.add_node('E', label='E') - G2.add_edge('A', 'B', label='a-b') - G2.add_edge('B', 'C', label='b-c') - G2.add_edge('C', 'D', label='c-d') - G2.add_edge('C', 'E', label='c-e') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_node("E", label="E") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("C", "D", label="c-d") + G2.add_edge("C", "E", label="c-e") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 4 def testGraph3(self): G1 = getCanonical() G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_node('C', label='C') - G2.add_node('D', label='D') - G2.add_node('E', label='E') - G2.add_node('F', label='F') - G2.add_node('G', label='G') - G2.add_edge('A', 'C', label='a-c') - G2.add_edge('A', 'D', label='a-d') - G2.add_edge('D', 'E', label='d-e') - G2.add_edge('D', 'F', label='d-f') - G2.add_edge('D', 'G', label='d-g') - G2.add_edge('E', 'B', label='e-b') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_node("E", label="E") + G2.add_node("F", label="F") + G2.add_node("G", label="G") + G2.add_edge("A", "C", label="a-c") + G2.add_edge("A", "D", label="a-d") + G2.add_edge("D", "E", label="d-e") + G2.add_edge("D", "F", label="d-f") + G2.add_edge("D", "G", label="d-g") + G2.add_edge("E", "B", label="e-b") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 12 def testGraph4(self): G1 = getCanonical() G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_node('C', label='C') - G2.add_node('D', label='D') - G2.add_edge('A', 'B', label='a-b') - G2.add_edge('B', 'C', label='b-c') - G2.add_edge('C', 'D', label='c-d') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("C", "D", label="c-d") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2 def testGraph4_a(self): G1 = getCanonical() G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_node('C', label='C') - G2.add_node('D', label='D') - G2.add_edge('A', 'B', label='a-b') - G2.add_edge('B', 'C', label='b-c') - G2.add_edge('A', 'D', label='a-d') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("A", "D", label="a-d") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2 def testGraph4_b(self): G1 = getCanonical() G2 = nx.Graph() - G2.add_node('A', label='A') - G2.add_node('B', label='B') - G2.add_node('C', label='C') - G2.add_node('D', label='D') - G2.add_edge('A', 'B', label='a-b') - G2.add_edge('B', 'C', label='b-c') - G2.add_edge('B', 'D', label='bad') + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("B", "D", label="bad") assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 def test_simrank_no_source_no_target(self): G = nx.cycle_graph(5) - expected = {0: {0: 1, 1: 0.3951219505902448, 2: 0.5707317069281646, 3: 0.5707317069281646, 4: 0.3951219505902449}, 1: {0: 0.3951219505902448, 1: 1, 2: 0.3951219505902449, 3: 0.5707317069281646, 4: 0.5707317069281646}, 2: {0: 0.5707317069281646, 1: 0.3951219505902449, 2: 1, 3: 0.3951219505902449, 4: 0.5707317069281646}, 3: {0: 0.5707317069281646, 1: 0.5707317069281646, 2: 0.3951219505902449, 3: 1, 4: 0.3951219505902449}, 4: {0: 0.3951219505902449, 1: 0.5707317069281646, 2: 0.5707317069281646, 3: 0.3951219505902449, 4: 1}} + expected = { + 0: { + 0: 1, + 1: 0.3951219505902448, + 2: 0.5707317069281646, + 3: 0.5707317069281646, + 4: 0.3951219505902449, + }, + 1: { + 0: 0.3951219505902448, + 1: 1, + 2: 0.3951219505902449, + 3: 0.5707317069281646, + 4: 0.5707317069281646, + }, + 2: { + 0: 0.5707317069281646, + 1: 0.3951219505902449, + 2: 1, + 3: 0.3951219505902449, + 4: 0.5707317069281646, + }, + 3: { + 0: 0.5707317069281646, + 1: 0.5707317069281646, + 2: 0.3951219505902449, + 3: 1, + 4: 0.3951219505902449, + }, + 4: { + 0: 0.3951219505902449, + 1: 0.5707317069281646, + 2: 0.5707317069281646, + 3: 0.3951219505902449, + 4: 1, + }, + } actual = nx.simrank_similarity(G) assert expected == actual # For a DiGraph test, use the first graph from the paper cited in # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 G = nx.DiGraph() - G.add_node(0, label='Univ') - G.add_node(1, label='ProfA') - G.add_node(2, label='ProfB') - G.add_node(3, label='StudentA') - G.add_node(4, label='StudentB') + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) expected = { - 0: {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, - 4: 0.03387811817640443}, - 1: {0: 0.0, 1: 1, 2: 0.4135512472705618, 3: 0.0, - 4: 0.10586911930126384}, - 2: {0: 0.1323363991265798, 1: 0.4135512472705618, 2: 1, - 3: 0.04234764772050554, 4: 0.08822426608438655}, - 3: {0: 0.0, 1: 0.0, 2: 0.04234764772050554, 3: 1, - 4: 0.3308409978164495}, - 4: {0: 0.03387811817640443, 1: 0.10586911930126384, - 2: 0.08822426608438655, 3: 0.3308409978164495, 4: 1} - } + 0: {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443}, + 1: {0: 0.0, 1: 1, 2: 0.4135512472705618, 3: 0.0, 4: 0.10586911930126384}, + 2: { + 0: 0.1323363991265798, + 1: 0.4135512472705618, + 2: 1, + 3: 0.04234764772050554, + 4: 0.08822426608438655, + }, + 3: {0: 0.0, 1: 0.0, 2: 0.04234764772050554, 3: 1, 4: 0.3308409978164495}, + 4: { + 0: 0.03387811817640443, + 1: 0.10586911930126384, + 2: 0.08822426608438655, + 3: 0.3308409978164495, + 4: 1, + }, + } # Use the importance_factor from the paper to get the same numbers. actual = nx.algorithms.similarity.simrank_similarity(G, importance_factor=0.8) assert expected == actual def test_simrank_source_no_target(self): G = nx.cycle_graph(5) - expected = {0: 1, 1: 0.3951219505902448, 2: 0.5707317069281646, 3: 0.5707317069281646, 4: 0.3951219505902449} + expected = { + 0: 1, + 1: 0.3951219505902448, + 2: 0.5707317069281646, + 3: 0.5707317069281646, + 4: 0.3951219505902449, + } actual = nx.simrank_similarity(G, source=0) assert expected == actual # For a DiGraph test, use the first graph from the paper cited in # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 G = nx.DiGraph() - G.add_node(0, label='Univ') - G.add_node(1, label='ProfA') - G.add_node(2, label='ProfB') - G.add_node(3, label='StudentA') - G.add_node(4, label='StudentB') + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) expected = {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443} # Use the importance_factor from the paper to get the same numbers. - actual = nx.algorithms.similarity.simrank_similarity(G, importance_factor=0.8, - source=0) + actual = nx.algorithms.similarity.simrank_similarity( + G, importance_factor=0.8, source=0 + ) assert expected == actual def test_simrank_source_and_target(self): @@ -526,36 +635,75 @@ class TestSimilarity: # For a DiGraph test, use the first graph from the paper cited in # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 G = nx.DiGraph() - G.add_node(0, label='Univ') - G.add_node(1, label='ProfA') - G.add_node(2, label='ProfB') - G.add_node(3, label='StudentA') - G.add_node(4, label='StudentB') + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) expected = 0.1323363991265798 # Use the importance_factor from the paper to get the same numbers. # Use the pair (0,2) because (0,0) and (0,1) have trivial results. - actual = nx.algorithms.similarity.simrank_similarity(G, importance_factor=0.8, - source=0, target=2) + actual = nx.algorithms.similarity.simrank_similarity( + G, importance_factor=0.8, source=0, target=2 + ) assert expected == actual def test_simrank_numpy_no_source_no_target(self): G = nx.cycle_graph(5) - expected = numpy.array([ - [1.0, 0.3947180735764555, 0.570482097206368, 0.570482097206368, 0.3947180735764555], - [0.3947180735764555, 1.0, 0.3947180735764555, 0.570482097206368, 0.570482097206368], - [0.570482097206368, 0.3947180735764555, 1.0, 0.3947180735764555, 0.570482097206368], - [0.570482097206368, 0.570482097206368, 0.3947180735764555, 1.0, 0.3947180735764555], - [0.3947180735764555, 0.570482097206368, 0.570482097206368, 0.3947180735764555, 1.0] - ]) + expected = numpy.array( + [ + [ + 1.0, + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + ], + [ + 0.3947180735764555, + 1.0, + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + ], + [ + 0.570482097206368, + 0.3947180735764555, + 1.0, + 0.3947180735764555, + 0.570482097206368, + ], + [ + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + 1.0, + 0.3947180735764555, + ], + [ + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + 1.0, + ], + ] + ) actual = nx.simrank_similarity_numpy(G) numpy.testing.assert_allclose(expected, actual, atol=1e-7) def test_simrank_numpy_source_no_target(self): G = nx.cycle_graph(5) expected = numpy.array( - [1.0, 0.3947180735764555, 0.570482097206368, 0.570482097206368, 0.3947180735764555], + [ + 1.0, + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + ], ) actual = nx.simrank_similarity_numpy(G, source=0) numpy.testing.assert_allclose(expected, actual, atol=1e-7) diff --git a/networkx/algorithms/tests/test_simple_paths.py b/networkx/algorithms/tests/test_simple_paths.py index 99c4bf02..aedd5a74 100644 --- a/networkx/algorithms/tests/test_simple_paths.py +++ b/networkx/algorithms/tests/test_simple_paths.py @@ -39,7 +39,7 @@ class TestIsSimplePath: """ G = nx.trivial_graph() - assert not nx.is_simple_path(G, ['not a node']) + assert not nx.is_simple_path(G, ["not a node"]) def test_simple_path(self): G = nx.path_graph(2) @@ -148,16 +148,28 @@ def test_all_simple_paths_cutoff(): def test_all_simple_paths_on_non_trivial_graph(): - ''' you may need to draw this graph to make sure it is reasonable ''' + """ you may need to draw this graph to make sure it is reasonable """ G = nx.path_graph(5, create_using=nx.DiGraph()) G.add_edges_from([(0, 5), (1, 5), (1, 3), (5, 4), (4, 2), (4, 3)]) paths = nx.all_simple_paths(G, 1, [2, 3]) assert {tuple(p) for p in paths} == { - (1, 2), (1, 3, 4, 2), (1, 5, 4, 2), (1, 3), (1, 2, 3), (1, 5, 4, 3), - (1, 5, 4, 2, 3)} + (1, 2), + (1, 3, 4, 2), + (1, 5, 4, 2), + (1, 3), + (1, 2, 3), + (1, 5, 4, 3), + (1, 5, 4, 2, 3), + } paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=3) assert {tuple(p) for p in paths} == { - (1, 2), (1, 3, 4, 2), (1, 5, 4, 2), (1, 3), (1, 2, 3), (1, 5, 4, 3)} + (1, 2), + (1, 3, 4, 2), + (1, 5, 4, 2), + (1, 3), + (1, 2, 3), + (1, 5, 4, 3), + } paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=2) assert {tuple(p) for p in paths} == {(1, 2), (1, 3), (1, 2, 3)} @@ -211,6 +223,7 @@ def hamiltonian_path(G, source): def test_hamiltonian_path(): from itertools import permutations + G = nx.complete_graph(4) paths = [list(p) for p in hamiltonian_path(G, 0)] exact = [[0] + list(p) for p in permutations([1, 2, 3], 3)] @@ -251,8 +264,9 @@ def test_all_simple_edge_paths_with_two_targets_emits_two_paths(): G.add_edge(2, 4) paths = nx.all_simple_edge_paths(G, 0, [3, 4]) assert {tuple(p) for p in paths} == { - ((0, 1), (1, 2), (2, 3)), ((0, 1), (1, 2), (2, 4)) - } + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } def test_digraph_all_simple_edge_paths_with_two_targets_emits_two_paths(): @@ -260,8 +274,9 @@ def test_digraph_all_simple_edge_paths_with_two_targets_emits_two_paths(): G.add_edge(2, 4) paths = nx.all_simple_edge_paths(G, 0, [3, 4]) assert {tuple(p) for p in paths} == { - ((0, 1), (1, 2), (2, 3)), ((0, 1), (1, 2), (2, 4)) - } + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } def test_all_simple_edge_paths_with_two_targets_cutoff(): @@ -269,8 +284,9 @@ def test_all_simple_edge_paths_with_two_targets_cutoff(): G.add_edge(2, 4) paths = nx.all_simple_edge_paths(G, 0, [3, 4], cutoff=3) assert {tuple(p) for p in paths} == { - ((0, 1), (1, 2), (2, 3)), ((0, 1), (1, 2), (2, 4)) - } + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } def test_digraph_all_simple_edge_paths_with_two_targets_cutoff(): @@ -278,8 +294,9 @@ def test_digraph_all_simple_edge_paths_with_two_targets_cutoff(): G.add_edge(2, 4) paths = nx.all_simple_edge_paths(G, 0, [3, 4], cutoff=3) assert {tuple(p) for p in paths} == { - ((0, 1), (1, 2), (2, 3)), ((0, 1), (1, 2), (2, 4)) - } + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } def test_all_simple_edge_paths_with_two_targets_in_line_emits_two_paths(): @@ -317,24 +334,30 @@ def test_all_simple_edge_paths_cutoff(): def test_all_simple_edge_paths_on_non_trivial_graph(): - ''' you may need to draw this graph to make sure it is reasonable ''' + """ you may need to draw this graph to make sure it is reasonable """ G = nx.path_graph(5, create_using=nx.DiGraph()) G.add_edges_from([(0, 5), (1, 5), (1, 3), (5, 4), (4, 2), (4, 3)]) paths = nx.all_simple_edge_paths(G, 1, [2, 3]) assert {tuple(p) for p in paths} == { - ((1, 2),), ((1, 3), (3, 4), (4, 2)), ((1, 5), (5, 4), (4, 2)), - ((1, 3),), ((1, 2), (2, 3)), ((1, 5), (5, 4), (4, 3)), - ((1, 5), (5, 4), (4, 2), (2, 3)) - } + ((1, 2),), + ((1, 3), (3, 4), (4, 2)), + ((1, 5), (5, 4), (4, 2)), + ((1, 3),), + ((1, 2), (2, 3)), + ((1, 5), (5, 4), (4, 3)), + ((1, 5), (5, 4), (4, 2), (2, 3)), + } paths = nx.all_simple_edge_paths(G, 1, [2, 3], cutoff=3) assert {tuple(p) for p in paths} == { - ((1, 2),), ((1, 3), (3, 4), (4, 2)), ((1, 5), (5, 4), (4, 2)), - ((1, 3),), ((1, 2), (2, 3)), ((1, 5), (5, 4), (4, 3)), - } + ((1, 2),), + ((1, 3), (3, 4), (4, 2)), + ((1, 5), (5, 4), (4, 2)), + ((1, 3),), + ((1, 2), (2, 3)), + ((1, 5), (5, 4), (4, 3)), + } paths = nx.all_simple_edge_paths(G, 1, [2, 3], cutoff=2) - assert {tuple(p) for p in paths} == { - ((1, 2),), ((1, 3),), ((1, 2), (2, 3)) - } + assert {tuple(p) for p in paths} == {((1, 2),), ((1, 3),), ((1, 2), (2, 3))} def test_all_simple_edge_paths_multigraph(): @@ -345,8 +368,10 @@ def test_all_simple_edge_paths_multigraph(): paths = list(nx.all_simple_edge_paths(G, 1, 2)) assert len(paths) == 3 assert {tuple(p) for p in paths} == { - ((1, 2, 0),), ((1, 2, 1),), ((1, 10, 0), (10, 2, 0)) - } + ((1, 2, 0),), + ((1, 2, 1),), + ((1, 10, 0), (10, 2, 0)), + } def test_all_simple_edge_paths_multigraph_with_cutoff(): @@ -388,6 +413,7 @@ def hamiltonian_edge_path(G, source): def test_hamiltonian__edge_path(): from itertools import permutations + G = nx.complete_graph(4) paths = hamiltonian_edge_path(G, 0) exact = [list(pairwise([0] + list(p))) for p in permutations([1, 2, 3], 3)] @@ -422,8 +448,9 @@ def test_shortest_simple_paths(): paths = nx.shortest_simple_paths(G, 1, 12) assert next(paths) == [1, 2, 3, 4, 8, 12] assert next(paths) == [1, 5, 6, 7, 8, 12] - assert ([len(path) for path in nx.shortest_simple_paths(G, 1, 12)] == - sorted([len(path) for path in nx.all_simple_paths(G, 1, 12)])) + assert [len(path) for path in nx.shortest_simple_paths(G, 1, 12)] == sorted( + [len(path) for path in nx.all_simple_paths(G, 1, 12)] + ) def test_shortest_simple_paths_directed(): @@ -435,17 +462,20 @@ def test_shortest_simple_paths_directed(): def test_shortest_simple_paths_directed_with_weight_fucntion(): def cost(u, v, x): return 1 + G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") paths = nx.shortest_simple_paths(G, 1, 12) assert next(paths) == [1, 2, 3, 4, 8, 12] assert next(paths) == [1, 5, 6, 7, 8, 12] - assert ([len(path) for path in nx.shortest_simple_paths(G, 1, 12, weight=cost)] - == sorted([len(path) for path in nx.all_simple_paths(G, 1, 12)])) + assert [ + len(path) for path in nx.shortest_simple_paths(G, 1, 12, weight=cost) + ] == sorted([len(path) for path in nx.all_simple_paths(G, 1, 12)]) def test_shortest_simple_paths_with_weight_fucntion(): def cost(u, v, x): return 1 + G = nx.cycle_graph(7, create_using=nx.DiGraph()) paths = nx.shortest_simple_paths(G, 0, 3, weight=cost) assert [path for path in paths] == [[0, 1, 2, 3]] @@ -460,20 +490,20 @@ def test_Greg_Bernstein(): g1.add_edge("N3", "N0", weight=10.0, capacity=50, name="L0") g1.add_edge("N2", "N3", weight=12.0, capacity=30, name="L2") g1.add_edge("N1", "N2", weight=15.0, capacity=42, name="L3") - solution = [['N1', 'N0', 'N3'], ['N1', 'N2', 'N3'], ['N1', 'N4', 'N0', 'N3']] - result = list(nx.shortest_simple_paths(g1, 'N1', 'N3', weight='weight')) + solution = [["N1", "N0", "N3"], ["N1", "N2", "N3"], ["N1", "N4", "N0", "N3"]] + result = list(nx.shortest_simple_paths(g1, "N1", "N3", weight="weight")) assert result == solution def test_weighted_shortest_simple_path(): def cost_func(path): - return sum(G.adj[u][v]['weight'] for (u, v) in zip(path, path[1:])) + return sum(G.adj[u][v]["weight"] for (u, v) in zip(path, path[1:])) G = nx.complete_graph(5) weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()} - nx.set_edge_attributes(G, weight, 'weight') + nx.set_edge_attributes(G, weight, "weight") cost = 0 - for path in nx.shortest_simple_paths(G, 0, 3, weight='weight'): + for path in nx.shortest_simple_paths(G, 0, 3, weight="weight"): this_cost = cost_func(path) assert cost <= this_cost cost = this_cost @@ -481,14 +511,14 @@ def test_weighted_shortest_simple_path(): def test_directed_weighted_shortest_simple_path(): def cost_func(path): - return sum(G.adj[u][v]['weight'] for (u, v) in zip(path, path[1:])) + return sum(G.adj[u][v]["weight"] for (u, v) in zip(path, path[1:])) G = nx.complete_graph(5) G = G.to_directed() weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()} - nx.set_edge_attributes(G, weight, 'weight') + nx.set_edge_attributes(G, weight, "weight") cost = 0 - for path in nx.shortest_simple_paths(G, 0, 3, weight='weight'): + for path in nx.shortest_simple_paths(G, 0, 3, weight="weight"): this_cost = cost_func(path) assert cost <= this_cost cost = this_cost @@ -496,44 +526,52 @@ def test_directed_weighted_shortest_simple_path(): def test_weighted_shortest_simple_path_issue2427(): G = nx.Graph() - G.add_edge('IN', 'OUT', weight=2) - G.add_edge('IN', 'A', weight=1) - G.add_edge('IN', 'B', weight=2) - G.add_edge('B', 'OUT', weight=2) - assert (list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")) == - [['IN', 'OUT'], ['IN', 'B', 'OUT']]) + G.add_edge("IN", "OUT", weight=2) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=2) + G.add_edge("B", "OUT", weight=2) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "OUT"], + ["IN", "B", "OUT"], + ] G = nx.Graph() - G.add_edge('IN', 'OUT', weight=10) - G.add_edge('IN', 'A', weight=1) - G.add_edge('IN', 'B', weight=1) - G.add_edge('B', 'OUT', weight=1) - assert (list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")) == - [['IN', 'B', 'OUT'], ['IN', 'OUT']]) + G.add_edge("IN", "OUT", weight=10) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=1) + G.add_edge("B", "OUT", weight=1) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "B", "OUT"], + ["IN", "OUT"], + ] def test_directed_weighted_shortest_simple_path_issue2427(): G = nx.DiGraph() - G.add_edge('IN', 'OUT', weight=2) - G.add_edge('IN', 'A', weight=1) - G.add_edge('IN', 'B', weight=2) - G.add_edge('B', 'OUT', weight=2) - assert (list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")) == - [['IN', 'OUT'], ['IN', 'B', 'OUT']]) + G.add_edge("IN", "OUT", weight=2) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=2) + G.add_edge("B", "OUT", weight=2) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "OUT"], + ["IN", "B", "OUT"], + ] G = nx.DiGraph() - G.add_edge('IN', 'OUT', weight=10) - G.add_edge('IN', 'A', weight=1) - G.add_edge('IN', 'B', weight=1) - G.add_edge('B', 'OUT', weight=1) - assert (list(nx.shortest_simple_paths(G, 'IN', 'OUT', weight="weight")) == - [['IN', 'B', 'OUT'], ['IN', 'OUT']]) + G.add_edge("IN", "OUT", weight=10) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=1) + G.add_edge("B", "OUT", weight=1) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "B", "OUT"], + ["IN", "OUT"], + ] def test_weight_name(): G = nx.cycle_graph(7) - nx.set_edge_attributes(G, 1, 'weight') - nx.set_edge_attributes(G, 1, 'foo') - G.adj[1][2]['foo'] = 7 - paths = list(nx.shortest_simple_paths(G, 0, 3, weight='foo')) + nx.set_edge_attributes(G, 1, "weight") + nx.set_edge_attributes(G, 1, "foo") + G.adj[1][2]["foo"] = 7 + paths = list(nx.shortest_simple_paths(G, 0, 3, weight="foo")) solution = [[0, 6, 5, 4, 3], [0, 1, 2, 3]] assert paths == solution @@ -583,8 +621,9 @@ def test_bidirectional_shortest_path_restricted_wheel(): assert path == [1, 2, 3] length, path = _bidirectional_shortest_path(wheel, 1, 3, ignore_nodes=[0, 2]) assert path == [1, 5, 4, 3] - length, path = _bidirectional_shortest_path(wheel, 1, 3, - ignore_edges=[(1, 0), (5, 0), (2, 3)]) + length, path = _bidirectional_shortest_path( + wheel, 1, 3, ignore_edges=[(1, 0), (5, 0), (2, 3)] + ) assert path in [[1, 2, 0, 3], [1, 5, 4, 3]] @@ -596,17 +635,20 @@ def test_bidirectional_shortest_path_restricted_directed_cycle(): nx.NetworkXNoPath, _bidirectional_shortest_path, directed_cycle, - 0, 3, + 0, + 3, ignore_nodes=[1], ) - length, path = _bidirectional_shortest_path(directed_cycle, 0, 3, - ignore_edges=[(2, 1)]) + length, path = _bidirectional_shortest_path( + directed_cycle, 0, 3, ignore_edges=[(2, 1)] + ) assert path == [0, 1, 2, 3] pytest.raises( nx.NetworkXNoPath, _bidirectional_shortest_path, directed_cycle, - 0, 3, + 0, + 3, ignore_edges=[(1, 2)], ) @@ -617,37 +659,26 @@ def test_bidirectional_shortest_path_ignore(): nx.add_path(G, [1, 3]) nx.add_path(G, [1, 4]) pytest.raises( - nx.NetworkXNoPath, - _bidirectional_shortest_path, - G, - 1, 2, - ignore_nodes=[1], + nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[1], ) pytest.raises( - nx.NetworkXNoPath, - _bidirectional_shortest_path, - G, - 1, 2, - ignore_nodes=[2], + nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[2], ) G = nx.Graph() nx.add_path(G, [1, 3]) nx.add_path(G, [1, 4]) nx.add_path(G, [3, 2]) pytest.raises( - nx.NetworkXNoPath, - _bidirectional_shortest_path, - G, - 1, 2, - ignore_nodes=[1, 2], + nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[1, 2], ) def validate_path(G, s, t, soln_len, path): assert path[0] == s assert path[-1] == t - assert soln_len == sum(G[u][v].get('weight', 1) - for u, v in zip(path[:-1], path[1:])) + assert soln_len == sum( + G[u][v].get("weight", 1) for u, v in zip(path[:-1], path[1:]) + ) def validate_length_path(G, s, t, soln_len, length, path): @@ -657,41 +688,58 @@ def validate_length_path(G, s, t, soln_len, length, path): def test_bidirectional_dijksta_restricted(): XG = nx.DiGraph() - XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5), - ('u', 'v', 1), ('u', 'x', 2), - ('v', 'y', 1), ('x', 'u', 3), - ('x', 'v', 5), ('x', 'y', 2), - ('y', 's', 7), ('y', 'v', 6)]) + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) XG3 = nx.Graph() - XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12], - [2, 3, 1], [3, 4, 5], - [4, 5, 1], [5, 0, 10]]) - validate_length_path(XG, 's', 'v', 9, - *_bidirectional_dijkstra(XG, 's', 'v')) - validate_length_path(XG, 's', 'v', 10, - *_bidirectional_dijkstra(XG, 's', 'v', ignore_nodes=['u'])) - validate_length_path(XG, 's', 'v', 11, - *_bidirectional_dijkstra(XG, 's', 'v', - ignore_edges=[('s', 'x')])) + XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) + validate_length_path(XG, "s", "v", 9, *_bidirectional_dijkstra(XG, "s", "v")) + validate_length_path( + XG, "s", "v", 10, *_bidirectional_dijkstra(XG, "s", "v", ignore_nodes=["u"]) + ) + validate_length_path( + XG, + "s", + "v", + 11, + *_bidirectional_dijkstra(XG, "s", "v", ignore_edges=[("s", "x")]) + ) pytest.raises( nx.NetworkXNoPath, _bidirectional_dijkstra, XG, - 's', 'v', - ignore_nodes=['u'], - ignore_edges=[('s', 'x')], + "s", + "v", + ignore_nodes=["u"], + ignore_edges=[("s", "x")], ) validate_length_path(XG3, 0, 3, 15, *_bidirectional_dijkstra(XG3, 0, 3)) - validate_length_path(XG3, 0, 3, 16, - *_bidirectional_dijkstra(XG3, 0, 3, ignore_nodes=[1])) - validate_length_path(XG3, 0, 3, 16, - *_bidirectional_dijkstra(XG3, 0, 3, ignore_edges=[(2, 3)])) + validate_length_path( + XG3, 0, 3, 16, *_bidirectional_dijkstra(XG3, 0, 3, ignore_nodes=[1]) + ) + validate_length_path( + XG3, 0, 3, 16, *_bidirectional_dijkstra(XG3, 0, 3, ignore_edges=[(2, 3)]) + ) pytest.raises( nx.NetworkXNoPath, _bidirectional_dijkstra, XG3, - 0, 3, + 0, + 3, ignore_nodes=[1], ignore_edges=[(5, 4)], ) @@ -710,23 +758,11 @@ def test_bidirectional_dijkstra_ignore(): nx.add_path(G, [1, 2, 10]) nx.add_path(G, [1, 3, 10]) pytest.raises( - nx.NetworkXNoPath, - _bidirectional_dijkstra, - G, - 1, 2, - ignore_nodes=[1], + nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[1], ) pytest.raises( - nx.NetworkXNoPath, - _bidirectional_dijkstra, - G, - 1, 2, - ignore_nodes=[2], + nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[2], ) pytest.raises( - nx.NetworkXNoPath, - _bidirectional_dijkstra, - G, - 1, 2, - ignore_nodes=[1, 2], + nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[1, 2], ) diff --git a/networkx/algorithms/tests/test_smallworld.py b/networkx/algorithms/tests/test_smallworld.py index 33726b48..8b1b5377 100644 --- a/networkx/algorithms/tests/test_smallworld.py +++ b/networkx/algorithms/tests/test_smallworld.py @@ -1,5 +1,6 @@ import pytest -numpy = pytest.importorskip('numpy') + +numpy = pytest.importorskip("numpy") import random diff --git a/networkx/algorithms/tests/test_smetric.py b/networkx/algorithms/tests/test_smetric.py index b1d1e81e..b6c4570d 100644 --- a/networkx/algorithms/tests/test_smetric.py +++ b/networkx/algorithms/tests/test_smetric.py @@ -11,6 +11,8 @@ def test_smetric(): g.add_edge(1, 4) sm = nx.s_metric(g, normalized=False) assert sm == 19.0 + + # smNorm = nx.s_metric(g,normalized=True) # assert_equal(smNorm, 0.95) diff --git a/networkx/algorithms/tests/test_sparsifiers.py b/networkx/algorithms/tests/test_sparsifiers.py index 366c4d45..09e934d2 100644 --- a/networkx/algorithms/tests/test_sparsifiers.py +++ b/networkx/algorithms/tests/test_sparsifiers.py @@ -61,7 +61,7 @@ def _assign_random_weights(G, seed=None): See :ref:`Randomness<randomness>`. """ for u, v in G.edges(): - G[u][v]['weight'] = seed.random() + G[u][v]["weight"] = seed.random() def test_spanner_trivial(): @@ -70,7 +70,7 @@ def test_spanner_trivial(): spanner = nx.spanner(G, 1, seed=_seed) for u, v in G.edges: - assert(spanner.has_edge(u, v)) + assert spanner.has_edge(u, v) def test_spanner_unweighted_complete_graph(): @@ -89,11 +89,11 @@ def test_spanner_weighted_complete_graph(): G = nx.complete_graph(20) _assign_random_weights(G, seed=_seed) - spanner = nx.spanner(G, 4, weight='weight', seed=_seed) - _test_spanner(G, spanner, 4, weight='weight') + spanner = nx.spanner(G, 4, weight="weight", seed=_seed) + _test_spanner(G, spanner, 4, weight="weight") - spanner = nx.spanner(G, 10, weight='weight', seed=_seed) - _test_spanner(G, spanner, 10, weight='weight') + spanner = nx.spanner(G, 10, weight="weight", seed=_seed) + _test_spanner(G, spanner, 10, weight="weight") def test_spanner_unweighted_gnp_graph(): @@ -112,11 +112,11 @@ def test_spanner_weighted_gnp_graph(): G = nx.gnp_random_graph(20, 0.4, seed=_seed) _assign_random_weights(G, seed=_seed) - spanner = nx.spanner(G, 4, weight='weight', seed=_seed) - _test_spanner(G, spanner, 4, weight='weight') + spanner = nx.spanner(G, 4, weight="weight", seed=_seed) + _test_spanner(G, spanner, 4, weight="weight") - spanner = nx.spanner(G, 10, weight='weight', seed=_seed) - _test_spanner(G, spanner, 10, weight='weight') + spanner = nx.spanner(G, 10, weight="weight", seed=_seed) + _test_spanner(G, spanner, 10, weight="weight") def test_spanner_unweighted_disconnected_graph(): diff --git a/networkx/algorithms/tests/test_structuralholes.py b/networkx/algorithms/tests/test_structuralholes.py index bfe4252f..a0499e65 100644 --- a/networkx/algorithms/tests/test_structuralholes.py +++ b/networkx/algorithms/tests/test_structuralholes.py @@ -22,14 +22,31 @@ class TestStructuralHoles: self.D_weights = {(0, 1): 2, (0, 2): 2, (1, 0): 1, (2, 1): 1} # Example from http://www.analytictech.com/connections/v20(1)/holes.htm self.G = nx.Graph() - self.G.add_edges_from([ - ('A', 'B'), ('A', 'F'), ('A', 'G'), ('A', 'E'), ('E', 'G'), - ('F', 'G'), ('B', 'G'), ('B', 'D'), ('D', 'G'), ('G', 'C'), - ]) + self.G.add_edges_from( + [ + ("A", "B"), + ("A", "F"), + ("A", "G"), + ("A", "E"), + ("E", "G"), + ("F", "G"), + ("B", "G"), + ("B", "D"), + ("D", "G"), + ("G", "C"), + ] + ) self.G_weights = { - ('A', 'B'): 2, ('A', 'F'): 3, ('A', 'G'): 5, ('A', 'E'): 2, - ('E', 'G'): 8, ('F', 'G'): 3, ('B', 'G'): 4, ('B', 'D'): 1, - ('D', 'G'): 3, ('G', 'C'): 10, + ("A", "B"): 2, + ("A", "F"): 3, + ("A", "G"): 5, + ("A", "E"): 2, + ("E", "G"): 8, + ("F", "G"): 3, + ("B", "G"): 4, + ("B", "D"): 1, + ("D", "G"): 3, + ("G", "C"): 10, } def test_constraint_directed(self): @@ -46,55 +63,55 @@ class TestStructuralHoles: def test_constraint_weighted_directed(self): D = self.D.copy() - nx.set_edge_attributes(D, self.D_weights, 'weight') - constraint = nx.constraint(D, weight='weight') + nx.set_edge_attributes(D, self.D_weights, "weight") + constraint = nx.constraint(D, weight="weight") assert almost_equal(constraint[0], 0.840, places=3) assert almost_equal(constraint[1], 1.143, places=3) assert almost_equal(constraint[2], 1.378, places=3) def test_effective_size_weighted_directed(self): D = self.D.copy() - nx.set_edge_attributes(D, self.D_weights, 'weight') - effective_size = nx.effective_size(D, weight='weight') + nx.set_edge_attributes(D, self.D_weights, "weight") + effective_size = nx.effective_size(D, weight="weight") assert almost_equal(effective_size[0], 1.567, places=3) assert almost_equal(effective_size[1], 1.083, places=3) assert almost_equal(effective_size[2], 1, places=3) def test_constraint_undirected(self): constraint = nx.constraint(self.G) - assert almost_equal(constraint['G'], 0.400, places=3) - assert almost_equal(constraint['A'], 0.595, places=3) - assert almost_equal(constraint['C'], 1, places=3) + assert almost_equal(constraint["G"], 0.400, places=3) + assert almost_equal(constraint["A"], 0.595, places=3) + assert almost_equal(constraint["C"], 1, places=3) def test_effective_size_undirected_borgatti(self): effective_size = nx.effective_size(self.G) - assert almost_equal(effective_size['G'], 4.67, places=2) - assert almost_equal(effective_size['A'], 2.50, places=2) - assert almost_equal(effective_size['C'], 1, places=2) + assert almost_equal(effective_size["G"], 4.67, places=2) + assert almost_equal(effective_size["A"], 2.50, places=2) + assert almost_equal(effective_size["C"], 1, places=2) def test_effective_size_undirected(self): G = self.G.copy() - nx.set_edge_attributes(G, 1, 'weight') - effective_size = nx.effective_size(G, weight='weight') - assert almost_equal(effective_size['G'], 4.67, places=2) - assert almost_equal(effective_size['A'], 2.50, places=2) - assert almost_equal(effective_size['C'], 1, places=2) + nx.set_edge_attributes(G, 1, "weight") + effective_size = nx.effective_size(G, weight="weight") + assert almost_equal(effective_size["G"], 4.67, places=2) + assert almost_equal(effective_size["A"], 2.50, places=2) + assert almost_equal(effective_size["C"], 1, places=2) def test_constraint_weighted_undirected(self): G = self.G.copy() - nx.set_edge_attributes(G, self.G_weights, 'weight') - constraint = nx.constraint(G, weight='weight') - assert almost_equal(constraint['G'], 0.299, places=3) - assert almost_equal(constraint['A'], 0.795, places=3) - assert almost_equal(constraint['C'], 1, places=3) + nx.set_edge_attributes(G, self.G_weights, "weight") + constraint = nx.constraint(G, weight="weight") + assert almost_equal(constraint["G"], 0.299, places=3) + assert almost_equal(constraint["A"], 0.795, places=3) + assert almost_equal(constraint["C"], 1, places=3) def test_effective_size_weighted_undirected(self): G = self.G.copy() - nx.set_edge_attributes(G, self.G_weights, 'weight') - effective_size = nx.effective_size(G, weight='weight') - assert almost_equal(effective_size['G'], 5.47, places=2) - assert almost_equal(effective_size['A'], 2.47, places=2) - assert almost_equal(effective_size['C'], 1, places=2) + nx.set_edge_attributes(G, self.G_weights, "weight") + effective_size = nx.effective_size(G, weight="weight") + assert almost_equal(effective_size["G"], 5.47, places=2) + assert almost_equal(effective_size["A"], 2.47, places=2) + assert almost_equal(effective_size["C"], 1, places=2) def test_constraint_isolated(self): G = self.G.copy() @@ -105,8 +122,8 @@ class TestStructuralHoles: def test_effective_size_isolated(self): G = self.G.copy() G.add_node(1) - nx.set_edge_attributes(G, self.G_weights, 'weight') - effective_size = nx.effective_size(G, weight='weight') + nx.set_edge_attributes(G, self.G_weights, "weight") + effective_size = nx.effective_size(G, weight="weight") assert math.isnan(effective_size[1]) def test_effective_size_borgatti_isolated(self): diff --git a/networkx/algorithms/tests/test_threshold.py b/networkx/algorithms/tests/test_threshold.py index 56a6e3f5..ff3a5aba 100644 --- a/networkx/algorithms/tests/test_threshold.py +++ b/networkx/algorithms/tests/test_threshold.py @@ -13,7 +13,7 @@ from networkx.testing import almost_equal cnlti = nx.convert_node_labels_to_integers -class TestGeneratorThreshold(): +class TestGeneratorThreshold: def test_threshold_sequence_graph_test(self): G = nx.star_graph(10) assert nxt.is_threshold_graph(G) @@ -41,42 +41,56 @@ class TestGeneratorThreshold(): cs0 = nxt.creation_sequence(deg) H0 = nxt.threshold_graph(cs0) - assert ''.join(cs0) == 'ddid' + assert "".join(cs0) == "ddid" cs1 = nxt.creation_sequence(deg, with_labels=True) H1 = nxt.threshold_graph(cs1) - assert cs1 == [(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')] + assert cs1 == [(1, "d"), (2, "d"), (3, "i"), (0, "d")] cs2 = nxt.creation_sequence(deg, compact=True) H2 = nxt.threshold_graph(cs2) assert cs2 == [2, 1, 1] - assert ''.join(nxt.uncompact(cs2)) == 'ddid' + assert "".join(nxt.uncompact(cs2)) == "ddid" assert graph_could_be_isomorphic(H0, G) assert graph_could_be_isomorphic(H0, H1) assert graph_could_be_isomorphic(H0, H2) def test_make_compact(self): - assert nxt.make_compact(['d', 'd', 'd', 'i', 'd', 'd']) == [3, 1, 2] + assert nxt.make_compact(["d", "d", "d", "i", "d", "d"]) == [3, 1, 2] assert nxt.make_compact([3, 1, 2]) == [3, 1, 2] - assert pytest.raises(TypeError, nxt.make_compact, [3., 1., 2.]) + assert pytest.raises(TypeError, nxt.make_compact, [3.0, 1.0, 2.0]) def test_uncompact(self): - assert nxt.uncompact([3, 1, 2]) == ['d', 'd', 'd', 'i', 'd', 'd'] - assert nxt.uncompact(['d', 'd', 'i', 'd']) == ['d', 'd', 'i', 'd'] - assert (nxt.uncompact(nxt.uncompact([(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')])) == - nxt.uncompact([(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')])) - assert pytest.raises(TypeError, nxt.uncompact, [3., 1., 2.]) + assert nxt.uncompact([3, 1, 2]) == ["d", "d", "d", "i", "d", "d"] + assert nxt.uncompact(["d", "d", "i", "d"]) == ["d", "d", "i", "d"] + assert nxt.uncompact( + nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")]) + ) == nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")]) + assert pytest.raises(TypeError, nxt.uncompact, [3.0, 1.0, 2.0]) def test_creation_sequence_to_weights(self): - assert nxt.creation_sequence_to_weights([3, 1, 2]) == [0.5, 0.5, 0.5, 0.25, 0.75, 0.75] - assert pytest.raises(TypeError, nxt.creation_sequence_to_weights, [3., 1., 2.]) + assert nxt.creation_sequence_to_weights([3, 1, 2]) == [ + 0.5, + 0.5, + 0.5, + 0.25, + 0.75, + 0.75, + ] + assert pytest.raises( + TypeError, nxt.creation_sequence_to_weights, [3.0, 1.0, 2.0] + ) def test_weights_to_creation_sequence(self): deg = [3, 2, 2, 1] with pytest.raises(ValueError): nxt.weights_to_creation_sequence(deg, with_labels=True, compact=True) - assert (nxt.weights_to_creation_sequence(deg, with_labels=True) == - [(3, 'd'), (1, 'd'), (2, 'd'), (0, 'd')]) + assert nxt.weights_to_creation_sequence(deg, with_labels=True) == [ + (3, "d"), + (1, "d"), + (2, "d"), + (0, "d"), + ] assert nxt.weights_to_creation_sequence(deg, compact=True) == [4] def test_find_alternating_4_cycle(self): @@ -88,10 +102,8 @@ class TestGeneratorThreshold(): deg = [3, 2, 2, 1] G = nx.generators.havel_hakimi_graph(deg) cs1 = nxt.creation_sequence(deg, with_labels=True) - for n, m in [(3, 0), (0, 3), (0, 2), (0, 1), (1, 3), - (3, 1), (1, 2), (2, 3)]: - assert (nxt.shortest_path(cs1, n, m) == - nx.shortest_path(G, n, m)) + for n, m in [(3, 0), (0, 3), (0, 2), (0, 1), (1, 3), (3, 1), (1, 2), (2, 3)]: + assert nxt.shortest_path(cs1, n, m) == nx.shortest_path(G, n, m) spl = nxt.shortest_path_length(cs1, 3) spl2 = nxt.shortest_path_length([t for v, t in cs1], 2) @@ -103,33 +115,55 @@ class TestGeneratorThreshold(): spld[n] = pl assert spld == nx.single_source_shortest_path_length(G, 3) - assert nxt.shortest_path(['d', 'd', 'd', 'i', 'd', 'd'], 1, 2) == [1, 2] + assert nxt.shortest_path(["d", "d", "d", "i", "d", "d"], 1, 2) == [1, 2] assert nxt.shortest_path([3, 1, 2], 1, 2) == [1, 2] - assert pytest.raises(TypeError, nxt.shortest_path, [3., 1., 2.], 1, 2) - assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], 'a', 2) - assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], 1, 'b') + assert pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1, 2) + assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], "a", 2) + assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], 1, "b") assert nxt.shortest_path([3, 1, 2], 1, 1) == [1] def test_shortest_path_length(self): assert nxt.shortest_path_length([3, 1, 2], 1) == [1, 0, 1, 2, 1, 1] - assert (nxt.shortest_path_length(['d', 'd', 'd', 'i', 'd', 'd'], 1) == - [1, 0, 1, 2, 1, 1]) - assert (nxt.shortest_path_length(('d', 'd', 'd', 'i', 'd', 'd'), 1) == - [1, 0, 1, 2, 1, 1]) - assert pytest.raises(TypeError, nxt.shortest_path, [3., 1., 2.], 1) + assert nxt.shortest_path_length(["d", "d", "d", "i", "d", "d"], 1) == [ + 1, + 0, + 1, + 2, + 1, + 1, + ] + assert nxt.shortest_path_length(("d", "d", "d", "i", "d", "d"), 1) == [ + 1, + 0, + 1, + 2, + 1, + 1, + ] + assert pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1) def random_threshold_sequence(self): assert len(nxt.random_threshold_sequence(10, 0.5)) == 10 - assert (nxt.random_threshold_sequence(10, 0.5, seed=42) == - ['d', 'i', 'd', 'd', 'd', 'i', 'i', 'i', 'd', 'd']) + assert nxt.random_threshold_sequence(10, 0.5, seed=42) == [ + "d", + "i", + "d", + "d", + "d", + "i", + "i", + "i", + "d", + "d", + ] assert pytest.raises(ValueError, nxt.random_threshold_sequence, 10, 1.5) def test_right_d_threshold_sequence(self): - assert nxt.right_d_threshold_sequence(3, 2) == ['d', 'i', 'd'] + assert nxt.right_d_threshold_sequence(3, 2) == ["d", "i", "d"] assert pytest.raises(ValueError, nxt.right_d_threshold_sequence, 2, 3) def test_left_d_threshold_sequence(self): - assert nxt.left_d_threshold_sequence(3, 2) == ['d', 'i', 'd'] + assert nxt.left_d_threshold_sequence(3, 2) == ["d", "i", "d"] assert pytest.raises(ValueError, nxt.left_d_threshold_sequence, 2, 3) def test_weights_thresholds(self): @@ -140,22 +174,22 @@ class TestGeneratorThreshold(): assert cs == cs2 wseq = nxt.creation_sequence_to_weights(nxt.uncompact([3, 1, 2, 3, 3, 2, 3])) - assert (wseq == - [s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]]) + assert wseq == [ + s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7] + ] wseq = nxt.creation_sequence_to_weights([3, 1, 2, 3, 3, 2, 3]) - assert (wseq == - [s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]]) + assert wseq == [ + s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7] + ] - wseq = nxt.creation_sequence_to_weights(list(enumerate('ddidiiidididi'))) - assert (wseq == - [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]]) + wseq = nxt.creation_sequence_to_weights(list(enumerate("ddidiiidididi"))) + assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]] - wseq = nxt.creation_sequence_to_weights('ddidiiidididi') - assert (wseq == - [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]]) + wseq = nxt.creation_sequence_to_weights("ddidiiidididi") + assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]] - wseq = nxt.creation_sequence_to_weights('ddidiiidididid') + wseq = nxt.creation_sequence_to_weights("ddidiiidididid") ws = [s / float(12) for s in [6, 6, 5, 7, 4, 4, 4, 8, 3, 9, 2, 10, 1, 11]] assert sum([abs(c - d) for c, d in zip(wseq, ws)]) < 1e-14 @@ -179,11 +213,10 @@ class TestGeneratorThreshold(): assert nxt.find_creation_sequence(G) == cs def test_fast_versions_properties_threshold_graphs(self): - cs = 'ddiiddid' + cs = "ddiiddid" G = nxt.threshold_graph(cs) - assert nxt.density('ddiiddid') == nx.density(G) - assert (sorted(nxt.degree_sequence(cs)) == - sorted(d for n, d in G.degree())) + assert nxt.density("ddiiddid") == nx.density(G) + assert sorted(nxt.degree_sequence(cs)) == sorted(d for n, d in G.degree()) ts = nxt.triangle_sequence(cs) assert ts == list(nx.triangles(G).values()) @@ -201,11 +234,11 @@ class TestGeneratorThreshold(): # Degree Correlation assert abs(nxt.degree_correlation(cs) + 0.593038821954) < 1e-12 - assert nxt.degree_correlation('diiiddi') == -0.8 - assert nxt.degree_correlation('did') == -1.0 - assert nxt.degree_correlation('ddd') == 1.0 - assert nxt.eigenvalues('dddiii') == [0, 0, 0, 0, 3, 3] - assert nxt.eigenvalues('dddiiid') == [0, 1, 1, 1, 4, 4, 7] + assert nxt.degree_correlation("diiiddi") == -0.8 + assert nxt.degree_correlation("did") == -1.0 + assert nxt.degree_correlation("ddd") == 1.0 + assert nxt.eigenvalues("dddiii") == [0, 0, 0, 0, 3, 3] + assert nxt.eigenvalues("dddiiid") == [0, 1, 1, 1, 4, 4, 7] def test_tg_creation_routines(self): s = nxt.left_d_threshold_sequence(5, 7) @@ -214,27 +247,32 @@ class TestGeneratorThreshold(): s1 = nxt.swap_d(s, 1.0, 1.0, seed=1) def test_eigenvectors(self): - np = pytest.importorskip('numpy') + np = pytest.importorskip("numpy") eigenval = np.linalg.eigvals - scipy = pytest.importorskip('scipy') + scipy = pytest.importorskip("scipy") - cs = 'ddiiddid' + cs = "ddiiddid" G = nxt.threshold_graph(cs) (tgeval, tgevec) = nxt.eigenvectors(cs) dot = np.dot assert [abs(dot(lv, lv) - 1.0) < 1e-9 for lv in tgevec] == [True] * 8 lapl = nx.laplacian_matrix(G) -# tgev=[ dot(lv,dot(lapl,lv)) for lv in tgevec ] -# assert_true(sum([abs(c-d) for c,d in zip(tgev,tgeval)]) < 1e-9) -# tgev.sort() -# lev=list(eigenval(lapl)) -# lev.sort() -# assert_true(sum([abs(c-d) for c,d in zip(tgev,lev)]) < 1e-9) + + # tgev=[ dot(lv,dot(lapl,lv)) for lv in tgevec ] + # assert_true(sum([abs(c-d) for c,d in zip(tgev,tgeval)]) < 1e-9) + # tgev.sort() + # lev=list(eigenval(lapl)) + # lev.sort() + # assert_true(sum([abs(c-d) for c,d in zip(tgev,lev)]) < 1e-9) def test_create_using(self): - cs = 'ddiiddid' + cs = "ddiiddid" G = nxt.threshold_graph(cs) - assert pytest.raises(nx.exception.NetworkXError, - nxt.threshold_graph, cs, create_using=nx.DiGraph()) + assert pytest.raises( + nx.exception.NetworkXError, + nxt.threshold_graph, + cs, + create_using=nx.DiGraph(), + ) MG = nxt.threshold_graph(cs, create_using=nx.MultiGraph()) assert sorted(MG.edges()) == sorted(G.edges()) diff --git a/networkx/algorithms/tests/test_tournament.py b/networkx/algorithms/tests/test_tournament.py index b170ea37..8de6f7c3 100644 --- a/networkx/algorithms/tests/test_tournament.py +++ b/networkx/algorithms/tests/test_tournament.py @@ -53,6 +53,7 @@ class TestRandomTournament: function. """ + def test_graph_is_tournament(self): for n in range(10): G = random_tournament(5) diff --git a/networkx/algorithms/tests/test_triads.py b/networkx/algorithms/tests/test_triads.py index c9f43fd8..5ede8871 100644 --- a/networkx/algorithms/tests/test_triads.py +++ b/networkx/algorithms/tests/test_triads.py @@ -8,11 +8,25 @@ from random import sample def test_triadic_census(): """Tests the triadic_census function.""" G = nx.DiGraph() - G.add_edges_from(['01', '02', '03', '04', '05', '12', '16', '51', '56', - '65']) - expected = {'030T': 2, '120C': 1, '210': 0, '120U': 0, '012': 9, '102': 3, - '021U': 0, '111U': 0, '003': 8, '030C': 0, '021D': 9, '201': 0, - '111D': 1, '300': 0, '120D': 0, '021C': 2} + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + expected = { + "030T": 2, + "120C": 1, + "210": 0, + "120U": 0, + "012": 9, + "102": 3, + "021U": 0, + "111U": 0, + "003": 8, + "030C": 0, + "021D": 9, + "201": 0, + "111D": 1, + "300": 0, + "120D": 0, + "021C": 2, + } actual = nx.triadic_census(G) assert expected == actual @@ -30,11 +44,14 @@ def test_is_triad(): def test_all_triplets(): """Tests the all_triplets function.""" G = nx.DiGraph() - G.add_edges_from(['01', '02', '03', '04', '05', '12', '16', '51', '56', - '65']) - expected = [f"{i},{j},{k}" for i in range(7) for j in range(i + 1, 7) - for k in range(j + 1, 7)] - expected = [set(x.split(',')) for x in expected] + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + expected = [ + f"{i},{j},{k}" + for i in range(7) + for j in range(i + 1, 7) + for k in range(j + 1, 7) + ] + expected = [set(x.split(",")) for x in expected] actual = list(set(x) for x in nx.all_triplets(G)) assert all([any([s1 == s2 for s1 in expected]) for s2 in actual]) @@ -42,64 +59,65 @@ def test_all_triplets(): def test_all_triads(): """Tests the all_triplets function.""" G = nx.DiGraph() - G.add_edges_from(['01', '02', '03', '04', '05', '12', '16', '51', '56', - '65']) - expected = [f"{i},{j},{k}" for i in range(7) for j in range(i + 1, 7) - for k in range(j + 1, 7)] - expected = [G.subgraph(x.split(',')) for x in expected] + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + expected = [ + f"{i},{j},{k}" + for i in range(7) + for j in range(i + 1, 7) + for k in range(j + 1, 7) + ] + expected = [G.subgraph(x.split(",")) for x in expected] actual = list(nx.all_triads(G)) - assert all(any([nx.is_isomorphic(G1, G2) for G1 in expected]) - for G2 in actual) + assert all(any([nx.is_isomorphic(G1, G2) for G1 in expected]) for G2 in actual) def test_triad_type(): """Tests the triad_type function.""" # 0 edges (1 type) G = nx.DiGraph({0: [], 1: [], 2: []}) - assert nx.triad_type(G) == '003' + assert nx.triad_type(G) == "003" # 1 edge (1 type) G = nx.DiGraph({0: [1], 1: [], 2: []}) - assert nx.triad_type(G) == '012' + assert nx.triad_type(G) == "012" # 2 edges (4 types) G = nx.DiGraph([(0, 1), (0, 2)]) - assert nx.triad_type(G) == '021D' + assert nx.triad_type(G) == "021D" G = nx.DiGraph({0: [1], 1: [0], 2: []}) - assert nx.triad_type(G) == '102' + assert nx.triad_type(G) == "102" G = nx.DiGraph([(0, 1), (2, 1)]) - assert nx.triad_type(G) == '021U' + assert nx.triad_type(G) == "021U" G = nx.DiGraph([(0, 1), (1, 2)]) - assert nx.triad_type(G) == '021C' + assert nx.triad_type(G) == "021C" # 3 edges (4 types) G = nx.DiGraph([(0, 1), (1, 0), (2, 1)]) - assert nx.triad_type(G) == '111D' + assert nx.triad_type(G) == "111D" G = nx.DiGraph([(0, 1), (1, 0), (1, 2)]) - assert nx.triad_type(G) == '111U' + assert nx.triad_type(G) == "111U" G = nx.DiGraph([(0, 1), (1, 2), (0, 2)]) - assert nx.triad_type(G) == '030T' + assert nx.triad_type(G) == "030T" G = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) - assert nx.triad_type(G) == '030C' + assert nx.triad_type(G) == "030C" # 4 edges (4 types) G = nx.DiGraph([(0, 1), (1, 0), (2, 0), (0, 2)]) - assert nx.triad_type(G) == '201' + assert nx.triad_type(G) == "201" G = nx.DiGraph([(0, 1), (1, 0), (2, 0), (2, 1)]) - assert nx.triad_type(G) == '120D' + assert nx.triad_type(G) == "120D" G = nx.DiGraph([(0, 1), (1, 0), (0, 2), (1, 2)]) - assert nx.triad_type(G) == '120U' + assert nx.triad_type(G) == "120U" G = nx.DiGraph([(0, 1), (1, 0), (0, 2,), (2, 1)]) - assert nx.triad_type(G) == '120C' + assert nx.triad_type(G) == "120C" # 5 edges (1 type) G = nx.DiGraph([(0, 1), (1, 0), (2, 1), (1, 2), (0, 2)]) - assert nx.triad_type(G) == '210' + assert nx.triad_type(G) == "210" # 6 edges (1 type) G = nx.DiGraph([(0, 1), (1, 0), (1, 2), (2, 1), (0, 2), (2, 0)]) - assert nx.triad_type(G) == '300' + assert nx.triad_type(G) == "300" def test_triads_by_type(): """Tests the all_triplets function.""" G = nx.DiGraph() - G.add_edges_from(['01', '02', '03', '04', '05', '12', '16', '51', '56', - '65']) + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) all_triads = nx.all_triads(G) expected = defaultdict(list) for triad in all_triads: diff --git a/networkx/algorithms/tests/test_vitality.py b/networkx/algorithms/tests/test_vitality.py index c0aa686e..248206e6 100644 --- a/networkx/algorithms/tests/test_vitality.py +++ b/networkx/algorithms/tests/test_vitality.py @@ -2,7 +2,6 @@ import networkx as nx class TestClosenessVitality: - def test_unweighted(self): G = nx.cycle_graph(3) vitality = nx.closeness_vitality(G) @@ -11,7 +10,7 @@ class TestClosenessVitality: def test_weighted(self): G = nx.Graph() nx.add_cycle(G, [0, 1, 2], weight=2) - vitality = nx.closeness_vitality(G, weight='weight') + vitality = nx.closeness_vitality(G, weight="weight") assert vitality == {0: 4, 1: 4, 2: 4} def test_unweighted_digraph(self): @@ -23,14 +22,14 @@ class TestClosenessVitality: G = nx.DiGraph() nx.add_cycle(G, [0, 1, 2], weight=2) nx.add_cycle(G, [2, 1, 0], weight=2) - vitality = nx.closeness_vitality(G, weight='weight') + vitality = nx.closeness_vitality(G, weight="weight") assert vitality == {0: 8, 1: 8, 2: 8} def test_weighted_multidigraph(self): G = nx.MultiDiGraph() nx.add_cycle(G, [0, 1, 2], weight=2) nx.add_cycle(G, [2, 1, 0], weight=2) - vitality = nx.closeness_vitality(G, weight='weight') + vitality = nx.closeness_vitality(G, weight="weight") assert vitality == {0: 8, 1: 8, 2: 8} def test_disconnecting_graph(self): @@ -39,4 +38,4 @@ class TestClosenessVitality: """ G = nx.path_graph(3) - assert nx.closeness_vitality(G, node=1) == -float('inf') + assert nx.closeness_vitality(G, node=1) == -float("inf") diff --git a/networkx/algorithms/tests/test_voronoi.py b/networkx/algorithms/tests/test_voronoi.py index 27f9e5e3..3269ae62 100644 --- a/networkx/algorithms/tests/test_voronoi.py +++ b/networkx/algorithms/tests/test_voronoi.py @@ -12,7 +12,7 @@ class TestVoronoiCells: """ G = nx.empty_graph(5) cells = nx.voronoi_cells(G, {0, 2, 4}) - expected = {0: {0}, 2: {2}, 4: {4}, 'unreachable': {1, 3}} + expected = {0: {0}, 2: {2}, 4: {4}, "unreachable": {1, 3}} assert expected == cells def test_undirected_unweighted(self): @@ -78,8 +78,7 @@ class TestVoronoiCells: assert G_cells == H_cells def test_multigraph_weighted(self): - edges = [(0, 1, 10), (0, 1, 10), (1, 2, 1), (1, 2, 100), (2, 3, 1), - (2, 3, 100)] + edges = [(0, 1, 10), (0, 1, 10), (1, 2, 1), (1, 2, 100), (2, 3, 1), (2, 3, 100)] G = nx.MultiGraph() G.add_weighted_edges_from(edges) cells = nx.voronoi_cells(G, {0, 3}) @@ -87,8 +86,16 @@ class TestVoronoiCells: assert expected == cells def test_multidigraph_weighted(self): - edges = [(0, 1, 10), (0, 1, 10), (1, 2, 1), (2, 3, 1), (3, 2, 10), - (3, 2, 1), (2, 1, 10), (2, 1, 1)] + edges = [ + (0, 1, 10), + (0, 1, 10), + (1, 2, 1), + (2, 3, 1), + (3, 2, 10), + (3, 2, 1), + (2, 1, 10), + (2, 1, 1), + ] G = nx.MultiDiGraph() G.add_weighted_edges_from(edges) cells = nx.voronoi_cells(G, {0, 3}) diff --git a/networkx/algorithms/tests/test_wiener.py b/networkx/algorithms/tests/test_wiener.py index c6ae2893..5402a403 100644 --- a/networkx/algorithms/tests/test_wiener.py +++ b/networkx/algorithms/tests/test_wiener.py @@ -16,7 +16,7 @@ class TestWienerIndex: positive infinity. """ - assert wiener_index(empty_graph(2)) == float('inf') + assert wiener_index(empty_graph(2)) == float("inf") def test_directed(self): """Tests that each pair of nodes in the directed graph is diff --git a/networkx/algorithms/threshold.py b/networkx/algorithms/threshold.py index 9add0456..1d0a34c3 100644 --- a/networkx/algorithms/threshold.py +++ b/networkx/algorithms/threshold.py @@ -5,7 +5,7 @@ from math import sqrt import networkx as nx from networkx.utils import py_random_state -__all__ = ['is_threshold_graph', 'find_threshold_graph'] +__all__ = ["is_threshold_graph", "find_threshold_graph"] def is_threshold_graph(G): @@ -28,12 +28,12 @@ def is_threshold_sequence(degree_sequence): ds = degree_sequence[:] # get a copy so we don't destroy original ds.sort() while ds: - if ds[0] == 0: # if isolated node - ds.pop(0) # remove it + if ds[0] == 0: # if isolated node + ds.pop(0) # remove it continue if ds[-1] != len(ds) - 1: # is the largest degree node dominating? - return False # no, not a threshold degree sequence - ds.pop() # yes, largest is the dominating node + return False # no, not a threshold degree sequence + ds.pop() # yes, largest is the dominating node ds = [d - 1 for d in ds] # remove it and decrement all degrees return True @@ -70,31 +70,31 @@ def creation_sequence(degree_sequence, with_labels=False, compact=False): raise ValueError("compact sequences cannot be labeled") # make an indexed copy - if isinstance(degree_sequence, dict): # labeled degree seqeunce + if isinstance(degree_sequence, dict): # labeled degree seqeunce ds = [[degree, label] for (label, degree) in degree_sequence.items()] else: ds = [[d, i] for i, d in enumerate(degree_sequence)] ds.sort() cs = [] # creation sequence while ds: - if ds[0][0] == 0: # isolated node + if ds[0][0] == 0: # isolated node (d, v) = ds.pop(0) - if len(ds) > 0: # make sure we start with a d - cs.insert(0, (v, 'i')) + if len(ds) > 0: # make sure we start with a d + cs.insert(0, (v, "i")) else: - cs.insert(0, (v, 'd')) + cs.insert(0, (v, "d")) continue - if ds[-1][0] != len(ds) - 1: # Not dominating node + if ds[-1][0] != len(ds) - 1: # Not dominating node return None # not a threshold degree sequence (d, v) = ds.pop() - cs.insert(0, (v, 'd')) - ds = [[d[0] - 1, d[1]] for d in ds] # decrement due to removing node + cs.insert(0, (v, "d")) + ds = [[d[0] - 1, d[1]] for d in ds] # decrement due to removing node if with_labels: return cs if compact: return make_compact(cs) - return [v[1] for v in cs] # not labeled + return [v[1] for v in cs] # not labeled def make_compact(creation_sequence): @@ -120,11 +120,11 @@ def make_compact(creation_sequence): [3, 1, 2] """ first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence cs = creation_sequence[:] - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence cs = [s[1] for s in creation_sequence] - elif isinstance(first, int): # compact creation sequence + elif isinstance(first, int): # compact creation sequence return creation_sequence else: raise TypeError("Not a valid creation sequence type") @@ -149,19 +149,19 @@ def uncompact(creation_sequence): See creation_sequence. """ first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence return creation_sequence - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence return creation_sequence - elif isinstance(first, int): # compact creation sequence + elif isinstance(first, int): # compact creation sequence ccscopy = creation_sequence[:] else: raise TypeError("Not a valid creation sequence type") cs = [] while ccscopy: - cs.extend(ccscopy.pop(0) * ['d']) + cs.extend(ccscopy.pop(0) * ["d"]) if ccscopy: - cs.extend(ccscopy.pop(0) * ['i']) + cs.extend(ccscopy.pop(0) * ["i"]) return cs @@ -174,12 +174,12 @@ def creation_sequence_to_weights(creation_sequence): """ # Turn input sequence into a labeled creation sequence first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence if isinstance(creation_sequence, list): wseq = creation_sequence[:] else: wseq = list(creation_sequence) # string like 'ddidid' - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence wseq = [v[1] for v in creation_sequence] elif isinstance(first, int): # compact creation sequence wseq = uncompact(creation_sequence) @@ -188,31 +188,33 @@ def creation_sequence_to_weights(creation_sequence): # pass through twice--first backwards wseq.reverse() w = 0 - prev = 'i' + prev = "i" for j, s in enumerate(wseq): - if s == 'i': + if s == "i": wseq[j] = w prev = s - elif prev == 'i': + elif prev == "i": prev = s w += 1 wseq.reverse() # now pass through forwards for j, s in enumerate(wseq): - if s == 'd': + if s == "d": wseq[j] = w prev = s - elif prev == 'd': + elif prev == "d": prev = s w += 1 # Now scale weights - if prev == 'd': + if prev == "d": w += 1 - wscale = 1. / float(w) + wscale = 1.0 / float(w) return [ww * wscale for ww in wseq] # return wseq -def weights_to_creation_sequence(weights, threshold=1, with_labels=False, compact=False): +def weights_to_creation_sequence( + weights, threshold=1, with_labels=False, compact=False +): """ Returns a creation sequence for a threshold graph determined by the weights and threshold given as input. @@ -244,7 +246,7 @@ def weights_to_creation_sequence(weights, threshold=1, with_labels=False, compac raise ValueError("compact sequences cannot be labeled") # make an indexed copy - if isinstance(weights, dict): # labeled weights + if isinstance(weights, dict): # labeled weights wseq = [[w, label] for (label, w) in weights.items()] else: wseq = [[w, i] for i, w in enumerate(weights)] @@ -252,16 +254,16 @@ def weights_to_creation_sequence(weights, threshold=1, with_labels=False, compac cs = [] # creation sequence cutoff = threshold - wseq[-1][0] while wseq: - if wseq[0][0] < cutoff: # isolated node + if wseq[0][0] < cutoff: # isolated node (w, label) = wseq.pop(0) - cs.append((label, 'i')) + cs.append((label, "i")) else: (w, label) = wseq.pop() - cs.append((label, 'd')) + cs.append((label, "d")) cutoff = threshold - wseq[-1][0] - if len(wseq) == 1: # make sure we start with a d + if len(wseq) == 1: # make sure we start with a d (w, label) = wseq.pop() - cs.append((label, 'd')) + cs.append((label, "d")) # put in correct order cs.reverse() @@ -269,7 +271,7 @@ def weights_to_creation_sequence(weights, threshold=1, with_labels=False, compac return cs if compact: return make_compact(cs) - return [v[1] for v in cs] # not labeled + return [v[1] for v in cs] # not labeled # Manipulating NetworkX.Graphs in context of threshold graphs @@ -291,9 +293,9 @@ def threshold_graph(creation_sequence, create_using=None): """ # Turn input sequence into a labeled creation sequence first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence ci = list(enumerate(creation_sequence)) - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence ci = creation_sequence[:] elif isinstance(first, int): # compact creation sequence cs = uncompact(creation_sequence) @@ -313,7 +315,7 @@ def threshold_graph(creation_sequence, create_using=None): # if type is a d connect to everything previous while ci: (v, node_type) = ci.pop(0) - if node_type == 'd': # dominating type, connect to all existing nodes + if node_type == "d": # dominating type, connect to all existing nodes # We use `for u in list(G):` instead of # `for u in G:` because we edit the graph `G` in # the loop. Hence using an iterator will result in @@ -363,16 +365,16 @@ def find_creation_sequence(G): ds.sort() # Update threshold graph nodes if ds[-1][0] == 0: # all are isolated - cs.extend(zip(dsdict, ['i'] * (len(ds) - 1) + ['d'])) - break # Done! + cs.extend(zip(dsdict, ["i"] * (len(ds) - 1) + ["d"])) + break # Done! # pull off isolated nodes while ds[0][0] == 0: (d, iso) = ds.pop(0) - cs.append((iso, 'i')) + cs.append((iso, "i")) # find new biggest node (d, bigv) = ds.pop() # add edges of star to t_g - cs.append((bigv, 'd')) + cs.append((bigv, "d")) # form subgraph of neighbors of big node H = H.subgraph(H.neighbors(bigv)) cs.reverse() @@ -387,8 +389,8 @@ def triangles(creation_sequence): """ # shortcut algorithm that doesn't require computing number # of triangles at each node. - cs = creation_sequence # alias - dr = cs.count("d") # number of d's in sequence + cs = creation_sequence # alias + dr = cs.count("d") # number of d's in sequence ntri = dr * (dr - 1) * (dr - 2) / 6 # number of triangles in clique of nd d's # now add dr choose 2 triangles for every 'i' in sequence where # dr is the number of d's to the right of the current i @@ -407,22 +409,22 @@ def triangle_sequence(creation_sequence): """ cs = creation_sequence seq = [] - dr = cs.count("d") # number of d's to the right of the current pos + dr = cs.count("d") # number of d's to the right of the current pos dcur = (dr - 1) * (dr - 2) // 2 # number of triangles through a node of clique dr - irun = 0 # number of i's in the last run - drun = 0 # number of d's in the last run + irun = 0 # number of i's in the last run + drun = 0 # number of d's in the last run for i, sym in enumerate(cs): if sym == "d": drun += 1 - tri = dcur + (dr - 1) * irun # new triangles at this d + tri = dcur + (dr - 1) * irun # new triangles at this d else: # cs[i]="i": - if prevsym == "d": # new string of i's - dcur += (dr - 1) * irun # accumulate shared shortest paths - irun = 0 # reset i run counter - dr -= drun # reduce number of d's to right - drun = 0 # reset d run counter + if prevsym == "d": # new string of i's + dcur += (dr - 1) * irun # accumulate shared shortest paths + irun = 0 # reset i run counter + dr -= drun # reduce number of d's to right + drun = 0 # reset d run counter irun += 1 - tri = dr * (dr - 1) // 2 # new triangles at this i + tri = dr * (dr - 1) // 2 # new triangles at this i seq.append(tri) prevsym = sym return seq @@ -437,7 +439,7 @@ def cluster_sequence(creation_sequence): cseq = [] for i, deg in enumerate(degseq): tri = triseq[i] - if deg <= 1: # isolated vertex or single pair gets cc 0 + if deg <= 1: # isolated vertex or single pair gets cc 0 cseq.append(0) continue max_size = (deg * (deg - 1)) // 2 @@ -482,7 +484,7 @@ def degree_correlation(creation_sequence): s1 = 0 # deg_i*deg_j s2 = 0 # deg_i^2+deg_j^2 s3 = 0 # deg_i+deg_j - m = 0 # number of edges + m = 0 # number of edges rd = cs.count("d") # number of d nodes to the right rdi = [i for i, sym in enumerate(cs) if sym == "d"] # index of "d"s ds = degree_sequence(cs) @@ -496,11 +498,11 @@ def degree_correlation(creation_sequence): for dj in rdi: degj = ds[dj] s1 += degj * degi - s2 += degi**2 + degj**2 + s2 += degi ** 2 + degj ** 2 s3 += degi + degj m += 1 - denom = (2 * m * s2 - s3 * s3) - numer = (4 * m * s1 - s3 * s3) + denom = 2 * m * s2 - s3 * s3 + numer = 4 * m * s1 - s3 * s3 if denom == 0: if numer == 0: return 1 @@ -527,9 +529,9 @@ def shortest_path(creation_sequence, u, v): """ # Turn input sequence into a labeled creation sequence first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence cs = [(i, creation_sequence[i]) for i in range(len(creation_sequence))] - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence cs = creation_sequence[:] elif isinstance(first, int): # compact creation sequence ci = uncompact(creation_sequence) @@ -549,13 +551,13 @@ def shortest_path(creation_sequence, u, v): uindex = verts.index(u) vindex = verts.index(v) bigind = max(uindex, vindex) - if cs[bigind][1] == 'd': + if cs[bigind][1] == "d": return [u, v] # must be that cs[bigind][1]=='i' cs = cs[bigind:] while cs: vert = cs.pop() - if vert[1] == 'd': + if vert[1] == "d": return [u, vert[0], v] # All after u are type 'i' so no connection return -1 @@ -575,12 +577,12 @@ def shortest_path_length(creation_sequence, i): """ # Turn input sequence into a labeled creation sequence first = creation_sequence[0] - if isinstance(first, str): # creation sequence + if isinstance(first, str): # creation sequence if isinstance(creation_sequence, list): cs = creation_sequence[:] else: cs = list(creation_sequence) - elif isinstance(first, tuple): # labeled creation sequence + elif isinstance(first, tuple): # labeled creation sequence cs = [v[1] for v in creation_sequence] i = [v[0] for v in creation_sequence].index(i) elif isinstance(first, int): # compact creation sequence @@ -590,13 +592,13 @@ def shortest_path_length(creation_sequence, i): # Compute N = len(cs) - spl = [2] * N # length 2 to every node - spl[i] = 0 # except self which is 0 + spl = [2] * N # length 2 to every node + spl[i] = 0 # except self which is 0 # 1 for all d's to the right for j in range(i + 1, N): if cs[j] == "d": spl[j] = 1 - if cs[i] == 'd': # 1 for all nodes to the left + if cs[i] == "d": # 1 for all nodes to the left for j in range(i): spl[j] = 1 # and -1 for any trailing i to indicate unreachable @@ -614,26 +616,26 @@ def betweenness_sequence(creation_sequence, normalized=True): to the iterval [0,1] divide by (n-1)*(n-2). """ cs = creation_sequence - seq = [] # betweenness - lastchar = 'd' # first node is always a 'd' + seq = [] # betweenness + lastchar = "d" # first node is always a 'd' dr = float(cs.count("d")) # number of d's to the right of curren pos - irun = 0 # number of i's in the last run - drun = 0 # number of d's in the last run - dlast = 0.0 # betweenness of last d + irun = 0 # number of i's in the last run + drun = 0 # number of d's in the last run + dlast = 0.0 # betweenness of last d for i, c in enumerate(cs): - if c == 'd': # cs[i]=="d": + if c == "d": # cs[i]=="d": # betweennees = amt shared with eariler d's and i's # + new isolated nodes covered # + new paths to all previous nodes b = dlast + (irun - 1) * irun / dr + 2 * irun * (i - drun - irun) / dr - drun += 1 # update counter - else: # cs[i]="i": - if lastchar == 'd': # if this is a new run of i's - dlast = b # accumulate betweenness - dr -= drun # update number of d's to the right - drun = 0 # reset d counter - irun = 0 # reset i counter - b = 0 # isolated nodes have zero betweenness + drun += 1 # update counter + else: # cs[i]="i": + if lastchar == "d": # if this is a new run of i's + dlast = b # accumulate betweenness + dr -= drun # update number of d's to the right + drun = 0 # reset d counter + irun = 0 # reset i counter + b = 0 # isolated nodes have zero betweenness irun += 1 # add another i to the run seq.append(float(b)) lastchar = c @@ -667,7 +669,7 @@ def eigenvectors(creation_sequence): dr = sum(ccs[::2]) nn = ccs[0] - vec[0] = [1. / sqrt(N)] * N + vec[0] = [1.0 / sqrt(N)] * N val[0] = 0 e = dr dr -= nn @@ -675,7 +677,7 @@ def eigenvectors(creation_sequence): i = 1 dd = 1 while dd < nn: - scale = 1. / sqrt(dd * dd + i) + scale = 1.0 / sqrt(dd * dd + i) vec[i] = i * [-scale] + [dd * scale] + [0] * (N - i - 1) val[i] = e i += 1 @@ -683,7 +685,7 @@ def eigenvectors(creation_sequence): if len(ccs) == 1: return (val, vec) for nn in ccs[1:]: - scale = 1. / sqrt(nn * i * (i + nn)) + scale = 1.0 / sqrt(nn * i * (i + nn)) vec[i] = i * [-nn * scale] + nn * [i * scale] + [0] * (N - i - nn) # find eigenvalue type_d = not type_d @@ -697,7 +699,7 @@ def eigenvectors(creation_sequence): i += 1 dd = 1 while dd < nn: - scale = 1. / sqrt(i - st + dd * dd) + scale = 1.0 / sqrt(i - st + dd * dd) vec[i] = [0] * st + (i - st) * [-scale] + [dd * scale] + [0] * (N - i - 1) val[i] = e i += 1 @@ -748,7 +750,7 @@ def eigenvalues(creation_sequence): """ degseq = degree_sequence(creation_sequence) degseq.sort() - eiglist = [] # zero is always one eigenvalue + eiglist = [] # zero is always one eigenvalue eig = 0 row = len(degseq) bigdeg = degseq.pop() @@ -767,6 +769,7 @@ def eigenvalues(creation_sequence): # Threshold graph creation routines + @py_random_state(2) def random_threshold_sequence(n, p, seed=None): """ @@ -790,12 +793,12 @@ def random_threshold_sequence(n, p, seed=None): if not (0 <= p <= 1): raise ValueError("p must be in [0,1]") - cs = ['d'] # threshold sequences always start with a d + cs = ["d"] # threshold sequences always start with a d for i in range(1, n): if seed.random() < p: - cs.append('d') + cs.append("d") else: - cs.append('i') + cs.append("i") return cs @@ -813,11 +816,11 @@ def right_d_threshold_sequence(n, m): FIXME: describe algorithm """ - cs = ['d'] + ['i'] * (n - 1) # create sequence with n insolated nodes + cs = ["d"] + ["i"] * (n - 1) # create sequence with n insolated nodes # m <n : not enough edges, make disconnected if m < n: - cs[m] = 'd' + cs[m] = "d" return cs # too many edges @@ -828,11 +831,11 @@ def right_d_threshold_sequence(n, m): ind = n - 1 sum = n - 1 while sum < m: - cs[ind] = 'd' + cs[ind] = "d" ind -= 1 sum += ind ind = m - (sum - ind) - cs[ind] = 'd' + cs[ind] = "d" return cs @@ -847,11 +850,11 @@ def left_d_threshold_sequence(n, m): FIXME: describe algorithm """ - cs = ['d'] + ['i'] * (n - 1) # create sequence with n insolated nodes + cs = ["d"] + ["i"] * (n - 1) # create sequence with n insolated nodes # m <n : not enough edges, make disconnected if m < n: - cs[m] = 'd' + cs[m] = "d" return cs # too many edges @@ -859,15 +862,15 @@ def left_d_threshold_sequence(n, m): raise ValueError("Too many edges for this many nodes.") # Connected case when M>N-1 - cs[n - 1] = 'd' + cs[n - 1] = "d" sum = n - 1 ind = 1 while sum < m: - cs[ind] = 'd' + cs[ind] = "d" sum += ind ind += 1 - if sum > m: # be sure not to change the first vertex - cs[sum - m] = 'i' + if sum > m: # be sure not to change the first vertex + cs[sum - m] = "i" return cs @@ -891,31 +894,31 @@ def swap_d(cs, p_split=1.0, p_combine=1.0, seed=None): See :ref:`Randomness<randomness>`. """ # preprocess the creation sequence - dlist = [i for (i, node_type) in enumerate(cs[1:-1]) if node_type == 'd'] + dlist = [i for (i, node_type) in enumerate(cs[1:-1]) if node_type == "d"] # split if seed.random() < p_split: choice = seed.choice(dlist) split_to = seed.choice(range(choice)) flip_side = choice - split_to - if split_to != flip_side and cs[split_to] == 'i' and cs[flip_side] == 'i': - cs[choice] = 'i' - cs[split_to] = 'd' - cs[flip_side] = 'd' + if split_to != flip_side and cs[split_to] == "i" and cs[flip_side] == "i": + cs[choice] = "i" + cs[split_to] = "d" + cs[flip_side] = "d" dlist.remove(choice) # don't add or combine may reverse this action # dlist.extend([split_to,flip_side]) -# print >>sys.stderr,"split at %s to %s and %s"%(choice,split_to,flip_side) + # print >>sys.stderr,"split at %s to %s and %s"%(choice,split_to,flip_side) # combine if seed.random() < p_combine and dlist: first_choice = seed.choice(dlist) second_choice = seed.choice(dlist) target = first_choice + second_choice - if target >= len(cs) or cs[target] == 'd' or first_choice == second_choice: + if target >= len(cs) or cs[target] == "d" or first_choice == second_choice: return cs # OK to combine - cs[first_choice] = 'i' - cs[second_choice] = 'i' - cs[target] = 'd' -# print >>sys.stderr,"combine %s and %s to make %s."%(first_choice,second_choice,target) + cs[first_choice] = "i" + cs[second_choice] = "i" + cs[target] = "d" + # print >>sys.stderr,"combine %s and %s to make %s."%(first_choice,second_choice,target) return cs diff --git a/networkx/algorithms/tournament.py b/networkx/algorithms/tournament.py index d512e8c4..a76833bb 100644 --- a/networkx/algorithms/tournament.py +++ b/networkx/algorithms/tournament.py @@ -27,8 +27,14 @@ from networkx.utils import arbitrary_element from networkx.utils import not_implemented_for from networkx.utils import py_random_state -__all__ = ['hamiltonian_path', 'is_reachable', 'is_strongly_connected', - 'is_tournament', 'random_tournament', 'score_sequence'] +__all__ = [ + "hamiltonian_path", + "is_reachable", + "is_strongly_connected", + "is_tournament", + "random_tournament", + "score_sequence", +] def index_satisfying(iterable, condition): @@ -55,11 +61,11 @@ def index_satisfying(iterable, condition): try: return i + 1 except NameError as e: - raise ValueError('iterable must be non-empty') from e + raise ValueError("iterable must be non-empty") from e -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def is_tournament(G): """Returns True if and only if `G` is a tournament. @@ -84,12 +90,14 @@ def is_tournament(G): """ # In a tournament, there is exactly one directed edge joining each pair. - return (all((v in G[u]) ^ (u in G[v]) for u, v in combinations(G, 2)) and - nx.number_of_selfloops(G) == 0) + return ( + all((v in G[u]) ^ (u in G[v]) for u, v in combinations(G, 2)) + and nx.number_of_selfloops(G) == 0 + ) -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def hamiltonian_path(G): """Returns a Hamiltonian path in the given tournament graph. @@ -159,8 +167,8 @@ def random_tournament(n, seed=None): return nx.DiGraph(edges) -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def score_sequence(G): """Returns the score sequence for the given tournament graph. @@ -181,8 +189,8 @@ def score_sequence(G): return sorted(d for v, d in G.out_degree()) -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def tournament_matrix(G): r"""Returns the tournament matrix for the given tournament graph. @@ -223,8 +231,8 @@ def tournament_matrix(G): return A - A.T -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def is_reachable(G, s, t): """Decides whether there is a path from `s` to `t` in the tournament. @@ -282,9 +290,9 @@ def is_reachable(G, s, t): """ # TODO This is trivially parallelizable. - return {x for x in G - if x == v or x in G[v] or - any(is_path(G, [v, z, x]) for z in G)} + return { + x for x in G if x == v or x in G[v] or any(is_path(G, [v, z, x]) for z in G) + } def is_closed(G, nodes): """Decides whether the given set of nodes is closed. @@ -299,12 +307,11 @@ def is_reachable(G, s, t): # TODO This is trivially parallelizable. neighborhoods = [two_neighborhood(G, v) for v in G] - return all(not (is_closed(G, S) and s in S and t not in S) - for S in neighborhoods) + return all(not (is_closed(G, S) and s in S and t not in S) for S in neighborhoods) -@not_implemented_for('undirected') -@not_implemented_for('multigraph') +@not_implemented_for("undirected") +@not_implemented_for("multigraph") def is_strongly_connected(G): """Decides whether the given tournament is strongly connected. diff --git a/networkx/algorithms/traversal/beamsearch.py b/networkx/algorithms/traversal/beamsearch.py index 0840ed1b..101d68ea 100644 --- a/networkx/algorithms/traversal/beamsearch.py +++ b/networkx/algorithms/traversal/beamsearch.py @@ -2,7 +2,7 @@ from .breadth_first_search import generic_bfs_edges -__all__ = ['bfs_beam_edges'] +__all__ = ["bfs_beam_edges"] def bfs_beam_edges(G, source, value, width=None): diff --git a/networkx/algorithms/traversal/breadth_first_search.py b/networkx/algorithms/traversal/breadth_first_search.py index a8c11351..d2b6915c 100644 --- a/networkx/algorithms/traversal/breadth_first_search.py +++ b/networkx/algorithms/traversal/breadth_first_search.py @@ -3,8 +3,11 @@ import networkx as nx from collections import deque __all__ = [ - 'bfs_edges', 'bfs_tree', 'bfs_predecessors', 'bfs_successors', - 'descendants_at_distance' + "bfs_edges", + "bfs_tree", + "bfs_predecessors", + "bfs_successors", + "descendants_at_distance", ] @@ -216,8 +219,13 @@ def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): """ T = nx.DiGraph() T.add_node(source) - edges_gen = bfs_edges(G, source, reverse=reverse, depth_limit=depth_limit, - sort_neighbors=sort_neighbors) + edges_gen = bfs_edges( + G, + source, + reverse=reverse, + depth_limit=depth_limit, + sort_neighbors=sort_neighbors, + ) T.add_edges_from(edges_gen) return T @@ -276,8 +284,9 @@ def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None): bfs_edges edge_bfs """ - for s, t in bfs_edges(G, source, depth_limit=depth_limit, - sort_neighbors=sort_neighbors): + for s, t in bfs_edges( + G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ): yield (t, s) @@ -337,8 +346,9 @@ def bfs_successors(G, source, depth_limit=None, sort_neighbors=None): """ parent = source children = [] - for p, c in bfs_edges(G, source, depth_limit=depth_limit, - sort_neighbors=sort_neighbors): + for p, c in bfs_edges( + G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ): if p == parent: children.append(c) continue diff --git a/networkx/algorithms/traversal/depth_first_search.py b/networkx/algorithms/traversal/depth_first_search.py index 2b0487a6..0a5b9871 100644 --- a/networkx/algorithms/traversal/depth_first_search.py +++ b/networkx/algorithms/traversal/depth_first_search.py @@ -2,10 +2,15 @@ import networkx as nx from collections import defaultdict -__all__ = ['dfs_edges', 'dfs_tree', - 'dfs_predecessors', 'dfs_successors', - 'dfs_preorder_nodes', 'dfs_postorder_nodes', - 'dfs_labeled_edges'] +__all__ = [ + "dfs_edges", + "dfs_tree", + "dfs_predecessors", + "dfs_successors", + "dfs_preorder_nodes", + "dfs_postorder_nodes", + "dfs_labeled_edges", +] def dfs_edges(G, source=None, depth_limit=None): @@ -282,7 +287,7 @@ def dfs_postorder_nodes(G, source=None, depth_limit=None): bfs_tree """ edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit) - return (v for u, v, d in edges if d == 'reverse') + return (v for u, v, d in edges if d == "reverse") def dfs_preorder_nodes(G, source=None, depth_limit=None): @@ -333,7 +338,7 @@ def dfs_preorder_nodes(G, source=None, depth_limit=None): bfs_edges """ edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit) - return (v for u, v, d in edges if d == 'forward') + return (v for u, v, d in edges if d == "forward") def dfs_labeled_edges(G, source=None, depth_limit=None): @@ -413,7 +418,7 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): for start in nodes: if start in visited: continue - yield start, start, 'forward' + yield start, start, "forward" visited.add(start) stack = [(start, depth_limit, iter(G[start]))] while stack: @@ -421,14 +426,14 @@ def dfs_labeled_edges(G, source=None, depth_limit=None): try: child = next(children) if child in visited: - yield parent, child, 'nontree' + yield parent, child, "nontree" else: - yield parent, child, 'forward' + yield parent, child, "forward" visited.add(child) if depth_now > 1: stack.append((child, depth_now - 1, iter(G[child]))) except StopIteration: stack.pop() if stack: - yield stack[-1][0], parent, 'reverse' - yield start, start, 'reverse' + yield stack[-1][0], parent, "reverse" + yield start, start, "reverse" diff --git a/networkx/algorithms/traversal/edgebfs.py b/networkx/algorithms/traversal/edgebfs.py index 28ce4f1a..281c6d70 100644 --- a/networkx/algorithms/traversal/edgebfs.py +++ b/networkx/algorithms/traversal/edgebfs.py @@ -9,10 +9,10 @@ Algorithms for a breadth-first traversal of edges in a graph. from collections import deque import networkx as nx -FORWARD = 'forward' -REVERSE = 'reverse' +FORWARD = "forward" +REVERSE = "reverse" -__all__ = ['edge_bfs'] +__all__ = ["edge_bfs"] def edge_bfs(G, source=None, orientation=None): @@ -108,28 +108,36 @@ def edge_bfs(G, source=None, orientation=None): return directed = G.is_directed() - kwds = {'data': False} + kwds = {"data": False} if G.is_multigraph() is True: - kwds['keys'] = True + kwds["keys"] = True # set up edge lookup if orientation is None: + def edges_from(node): return iter(G.edges(node, **kwds)) - elif not directed or orientation == 'original': + + elif not directed or orientation == "original": + def edges_from(node): for e in G.edges(node, **kwds): yield e + (FORWARD,) - elif orientation == 'reverse': + + elif orientation == "reverse": + def edges_from(node): for e in G.in_edges(node, **kwds): yield e + (REVERSE,) - elif orientation == 'ignore': + + elif orientation == "ignore": + def edges_from(node): for e in G.edges(node, **kwds): yield e + (FORWARD,) for e in G.in_edges(node, **kwds): yield e + (REVERSE,) + else: raise nx.NetworkXError("invalid orientation argument.") @@ -146,7 +154,7 @@ def edge_bfs(G, source=None, orientation=None): def edge_id(edge): return (frozenset(edge[:2]),) + edge[2:] - check_reverse = directed and orientation in ('reverse', 'ignore') + check_reverse = directed and orientation in ("reverse", "ignore") # start BFS visited_nodes = {n for n in nodes} diff --git a/networkx/algorithms/traversal/edgedfs.py b/networkx/algorithms/traversal/edgedfs.py index e13f5719..01a38754 100644 --- a/networkx/algorithms/traversal/edgedfs.py +++ b/networkx/algorithms/traversal/edgedfs.py @@ -8,10 +8,10 @@ Algorithms for a depth-first traversal of edges in a graph. """ import networkx as nx -FORWARD = 'forward' -REVERSE = 'reverse' +FORWARD = "forward" +REVERSE = "reverse" -__all__ = ['edge_dfs'] +__all__ = ["edge_dfs"] def edge_dfs(G, source=None, orientation=None): @@ -96,43 +96,54 @@ def edge_dfs(G, source=None, orientation=None): return directed = G.is_directed() - kwds = {'data': False} + kwds = {"data": False} if G.is_multigraph() is True: - kwds['keys'] = True + kwds["keys"] = True # set up edge lookup if orientation is None: + def edges_from(node): return iter(G.edges(node, **kwds)) - elif not directed or orientation == 'original': + + elif not directed or orientation == "original": + def edges_from(node): for e in G.edges(node, **kwds): yield e + (FORWARD,) - elif orientation == 'reverse': + + elif orientation == "reverse": + def edges_from(node): for e in G.in_edges(node, **kwds): yield e + (REVERSE,) - elif orientation == 'ignore': + + elif orientation == "ignore": + def edges_from(node): for e in G.edges(node, **kwds): yield e + (FORWARD,) for e in G.in_edges(node, **kwds): yield e + (REVERSE,) + else: raise nx.NetworkXError("invalid orientation argument.") # set up formation of edge_id to easily look up if edge already returned if directed: + def edge_id(edge): # remove direction indicator return edge[:-1] if orientation is not None else edge + else: + def edge_id(edge): # single id for undirected requires frozenset on nodes return (frozenset(edge[:2]),) + edge[2:] # Basic setup - check_reverse = directed and orientation in ('reverse', 'ignore') + check_reverse = directed and orientation in ("reverse", "ignore") visited_edges = set() visited_nodes = set() diff --git a/networkx/algorithms/traversal/tests/test_bfs.py b/networkx/algorithms/traversal/tests/test_bfs.py index 068a65f8..b450de2f 100644 --- a/networkx/algorithms/traversal/tests/test_bfs.py +++ b/networkx/algorithms/traversal/tests/test_bfs.py @@ -3,7 +3,6 @@ import networkx as nx class TestBFS: - @classmethod def setup_class(cls): # simple graph @@ -12,12 +11,10 @@ class TestBFS: cls.G = G def test_successor(self): - assert (dict(nx.bfs_successors(self.G, source=0)) == - {0: [1], 1: [2, 3], 2: [4]}) + assert dict(nx.bfs_successors(self.G, source=0)) == {0: [1], 1: [2, 3], 2: [4]} def test_predecessor(self): - assert (dict(nx.bfs_predecessors(self.G, source=0)) == - {1: 0, 2: 1, 3: 1, 4: 2}) + assert dict(nx.bfs_predecessors(self.G, source=0)) == {1: 0, 2: 1, 3: 1, 4: 2} def test_bfs_tree(self): T = nx.bfs_tree(self.G, source=0) @@ -53,7 +50,6 @@ class TestBFS: class TestBreadthLimitedSearch: - @classmethod def setup_class(cls): # a tree @@ -68,19 +64,32 @@ class TestBreadthLimitedSearch: cls.D = D def test_limited_bfs_successor(self): - assert (dict(nx.bfs_successors(self.G, source=1, depth_limit=3)) == - {1: [0, 2], 2: [3, 7], 3: [4], 7: [8]}) - result = {n: sorted(s) for n, s in nx.bfs_successors(self.D, source=7, - depth_limit=2)} + assert dict(nx.bfs_successors(self.G, source=1, depth_limit=3)) == { + 1: [0, 2], + 2: [3, 7], + 3: [4], + 7: [8], + } + result = { + n: sorted(s) for n, s in nx.bfs_successors(self.D, source=7, depth_limit=2) + } assert result == {8: [9], 2: [3], 7: [2, 8]} def test_limited_bfs_predecessor(self): - assert (dict(nx.bfs_predecessors(self.G, source=1, - depth_limit=3)) == - {0: 1, 2: 1, 3: 2, 4: 3, 7: 2, 8: 7}) - assert (dict(nx.bfs_predecessors(self.D, source=7, - depth_limit=2)) == - {2: 7, 3: 2, 8: 7, 9: 8}) + assert dict(nx.bfs_predecessors(self.G, source=1, depth_limit=3)) == { + 0: 1, + 2: 1, + 3: 2, + 4: 3, + 7: 2, + 8: 7, + } + assert dict(nx.bfs_predecessors(self.D, source=7, depth_limit=2)) == { + 2: 7, + 3: 2, + 8: 7, + 9: 8, + } def test_limited_bfs_tree(self): T = nx.bfs_tree(self.G, source=3, depth_limit=1) @@ -88,6 +97,4 @@ class TestBreadthLimitedSearch: def test_limited_bfs_edges(self): edges = nx.bfs_edges(self.G, source=9, depth_limit=4) - assert list(edges) == [(9, 8), (9, 10), (8, 7), - (7, 2), (2, 1), (2, 3)] - + assert list(edges) == [(9, 8), (9, 10), (8, 7), (7, 2), (2, 1), (2, 3)] diff --git a/networkx/algorithms/traversal/tests/test_dfs.py b/networkx/algorithms/traversal/tests/test_dfs.py index eb35582a..1be3b4aa 100644 --- a/networkx/algorithms/traversal/tests/test_dfs.py +++ b/networkx/algorithms/traversal/tests/test_dfs.py @@ -2,7 +2,6 @@ import networkx as nx class TestDFS: - @classmethod def setup_class(cls): # simple graph @@ -15,23 +14,19 @@ class TestDFS: cls.D = D def test_preorder_nodes(self): - assert (list(nx.dfs_preorder_nodes(self.G, source=0)) == - [0, 1, 2, 4, 3]) + assert list(nx.dfs_preorder_nodes(self.G, source=0)) == [0, 1, 2, 4, 3] assert list(nx.dfs_preorder_nodes(self.D)) == [0, 1, 2, 3] def test_postorder_nodes(self): - assert (list(nx.dfs_postorder_nodes(self.G, source=0)) == - [3, 4, 2, 1, 0]) + assert list(nx.dfs_postorder_nodes(self.G, source=0)) == [3, 4, 2, 1, 0] assert list(nx.dfs_postorder_nodes(self.D)) == [1, 0, 3, 2] def test_successor(self): - assert (nx.dfs_successors(self.G, source=0) == - {0: [1], 1: [2], 2: [4], 4: [3]}) + assert nx.dfs_successors(self.G, source=0) == {0: [1], 1: [2], 2: [4], 4: [3]} assert nx.dfs_successors(self.D) == {0: [1], 2: [3]} def test_predecessor(self): - assert (nx.dfs_predecessors(self.G, source=0) == - {1: 0, 2: 1, 3: 4, 4: 2}) + assert nx.dfs_predecessors(self.G, source=0) == {1: 0, 2: 1, 3: 4, 4: 2} assert nx.dfs_predecessors(self.D) == {1: 0, 3: 2} def test_dfs_tree(self): @@ -58,12 +53,12 @@ class TestDFS: def test_dfs_labeled_edges(self): edges = list(nx.dfs_labeled_edges(self.G, source=0)) - forward = [(u, v) for (u, v, d) in edges if d == 'forward'] + forward = [(u, v) for (u, v, d) in edges if d == "forward"] assert forward == [(0, 0), (0, 1), (1, 2), (2, 4), (4, 3)] def test_dfs_labeled_disconnected_edges(self): edges = list(nx.dfs_labeled_edges(self.D)) - forward = [(u, v) for (u, v, d) in edges if d == 'forward'] + forward = [(u, v) for (u, v, d) in edges if d == "forward"] assert forward == [(0, 0), (0, 1), (2, 2), (2, 3)] def test_dfs_tree_isolates(self): @@ -79,7 +74,6 @@ class TestDFS: class TestDepthLimitedSearch: - @classmethod def setup_class(cls): # a tree @@ -94,30 +88,46 @@ class TestDepthLimitedSearch: cls.D = D def test_dls_preorder_nodes(self): - assert list(nx.dfs_preorder_nodes(self.G, source=0, - depth_limit=2)) == [0, 1, 2] - assert list(nx.dfs_preorder_nodes(self.D, source=1, - depth_limit=2)) == ([1, 0]) + assert list(nx.dfs_preorder_nodes(self.G, source=0, depth_limit=2)) == [0, 1, 2] + assert list(nx.dfs_preorder_nodes(self.D, source=1, depth_limit=2)) == ([1, 0]) def test_dls_postorder_nodes(self): - assert list(nx.dfs_postorder_nodes(self.G, - source=3, depth_limit=3)) == [1, 7, 2, 5, 4, 3] - assert list(nx.dfs_postorder_nodes(self.D, - source=2, depth_limit=2)) == ([3, 7, 2]) + assert list(nx.dfs_postorder_nodes(self.G, source=3, depth_limit=3)) == [ + 1, + 7, + 2, + 5, + 4, + 3, + ] + assert list(nx.dfs_postorder_nodes(self.D, source=2, depth_limit=2)) == ( + [3, 7, 2] + ) def test_dls_successor(self): result = nx.dfs_successors(self.G, source=4, depth_limit=3) - assert ({n: set(v) for n, v in result.items()} == - {2: {1, 7}, 3: {2}, 4: {3, 5}, 5: {6}}) + assert {n: set(v) for n, v in result.items()} == { + 2: {1, 7}, + 3: {2}, + 4: {3, 5}, + 5: {6}, + } result = nx.dfs_successors(self.D, source=7, depth_limit=2) - assert ({n: set(v) for n, v in result.items()} == - {8: {9}, 2: {3}, 7: {8, 2}}) + assert {n: set(v) for n, v in result.items()} == {8: {9}, 2: {3}, 7: {8, 2}} def test_dls_predecessor(self): - assert (nx.dfs_predecessors(self.G, source=0, depth_limit=3) == - {1: 0, 2: 1, 3: 2, 7: 2}) - assert (nx.dfs_predecessors(self.D, source=2, depth_limit=3) == - {8: 7, 9: 8, 3: 2, 7: 2}) + assert nx.dfs_predecessors(self.G, source=0, depth_limit=3) == { + 1: 0, + 2: 1, + 3: 2, + 7: 2, + } + assert nx.dfs_predecessors(self.D, source=2, depth_limit=3) == { + 8: 7, + 9: 8, + 3: 2, + 7: 2, + } def test_dls_tree(self): T = nx.dfs_tree(self.G, source=3, depth_limit=1) @@ -125,15 +135,14 @@ class TestDepthLimitedSearch: def test_dls_edges(self): edges = nx.dfs_edges(self.G, source=9, depth_limit=4) - assert list(edges) == [(9, 8), (8, 7), - (7, 2), (2, 1), (2, 3), (9, 10)] + assert list(edges) == [(9, 8), (8, 7), (7, 2), (2, 1), (2, 3), (9, 10)] def test_dls_labeled_edges(self): edges = list(nx.dfs_labeled_edges(self.G, source=5, depth_limit=1)) - forward = [(u, v) for (u, v, d) in edges if d == 'forward'] + forward = [(u, v) for (u, v, d) in edges if d == "forward"] assert forward == [(5, 5), (5, 4), (5, 6)] def test_dls_labeled_disconnected_edges(self): edges = list(nx.dfs_labeled_edges(self.G, source=6, depth_limit=2)) - forward = [(u, v) for (u, v, d) in edges if d == 'forward'] + forward = [(u, v) for (u, v, d) in edges if d == "forward"] assert forward == [(6, 6), (6, 5), (5, 4)] diff --git a/networkx/algorithms/traversal/tests/test_edgebfs.py b/networkx/algorithms/traversal/tests/test_edgebfs.py index 2628a0f1..170be25c 100644 --- a/networkx/algorithms/traversal/tests/test_edgebfs.py +++ b/networkx/algorithms/traversal/tests/test_edgebfs.py @@ -40,7 +40,7 @@ class TestEdgeBFS: def test_digraph_orientation_invalid(self): G = nx.DiGraph(self.edges) - edge_iterator = edge_bfs(G, self.nodes, orientation='hello') + edge_iterator = edge_bfs(G, self.nodes, orientation="hello") pytest.raises(nx.NetworkXError, list, edge_iterator) def test_digraph_orientation_none(self): @@ -51,9 +51,14 @@ class TestEdgeBFS: def test_digraph_orientation_original(self): G = nx.DiGraph(self.edges) - x = list(edge_bfs(G, self.nodes, orientation='original')) - x_ = [(0, 1, FORWARD), (1, 0, FORWARD), (2, 0, FORWARD), - (2, 1, FORWARD), (3, 1, FORWARD)] + x = list(edge_bfs(G, self.nodes, orientation="original")) + x_ = [ + (0, 1, FORWARD), + (1, 0, FORWARD), + (2, 0, FORWARD), + (2, 1, FORWARD), + (3, 1, FORWARD), + ] assert x == x_ def test_digraph2(self): @@ -65,15 +70,20 @@ class TestEdgeBFS: def test_digraph_rev(self): G = nx.DiGraph(self.edges) - x = list(edge_bfs(G, self.nodes, orientation='reverse')) - x_ = [(1, 0, REVERSE), (2, 0, REVERSE), (0, 1, REVERSE), - (2, 1, REVERSE), (3, 1, REVERSE)] + x = list(edge_bfs(G, self.nodes, orientation="reverse")) + x_ = [ + (1, 0, REVERSE), + (2, 0, REVERSE), + (0, 1, REVERSE), + (2, 1, REVERSE), + (3, 1, REVERSE), + ] assert x == x_ def test_digraph_rev2(self): G = nx.DiGraph() nx.add_path(G, range(4)) - x = list(edge_bfs(G, [3], orientation='reverse')) + x = list(edge_bfs(G, [3], orientation="reverse")) x_ = [(2, 3, REVERSE), (1, 2, REVERSE), (0, 1, REVERSE)] assert x == x_ @@ -97,34 +107,45 @@ class TestEdgeBFS: def test_multidigraph_rev(self): G = nx.MultiDiGraph(self.edges) - x = list(edge_bfs(G, self.nodes, orientation='reverse')) - x_ = [(1, 0, 0, REVERSE), - (1, 0, 1, REVERSE), - (2, 0, 0, REVERSE), - (0, 1, 0, REVERSE), - (2, 1, 0, REVERSE), - (3, 1, 0, REVERSE)] + x = list(edge_bfs(G, self.nodes, orientation="reverse")) + x_ = [ + (1, 0, 0, REVERSE), + (1, 0, 1, REVERSE), + (2, 0, 0, REVERSE), + (0, 1, 0, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] assert x == x_ def test_digraph_ignore(self): G = nx.DiGraph(self.edges) - x = list(edge_bfs(G, self.nodes, orientation='ignore')) - x_ = [(0, 1, FORWARD), (1, 0, REVERSE), (2, 0, REVERSE), - (2, 1, REVERSE), (3, 1, REVERSE)] + x = list(edge_bfs(G, self.nodes, orientation="ignore")) + x_ = [ + (0, 1, FORWARD), + (1, 0, REVERSE), + (2, 0, REVERSE), + (2, 1, REVERSE), + (3, 1, REVERSE), + ] assert x == x_ def test_digraph_ignore2(self): G = nx.DiGraph() nx.add_path(G, range(4)) - x = list(edge_bfs(G, [0], orientation='ignore')) + x = list(edge_bfs(G, [0], orientation="ignore")) x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 3, FORWARD)] assert x == x_ def test_multidigraph_ignore(self): G = nx.MultiDiGraph(self.edges) - x = list(edge_bfs(G, self.nodes, orientation='ignore')) - x_ = [(0, 1, 0, FORWARD), (1, 0, 0, REVERSE), - (1, 0, 1, REVERSE), (2, 0, 0, REVERSE), - (2, 1, 0, REVERSE), (3, 1, 0, REVERSE), - ] + x = list(edge_bfs(G, self.nodes, orientation="ignore")) + x_ = [ + (0, 1, 0, FORWARD), + (1, 0, 0, REVERSE), + (1, 0, 1, REVERSE), + (2, 0, 0, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] assert x == x_ diff --git a/networkx/algorithms/traversal/tests/test_edgedfs.py b/networkx/algorithms/traversal/tests/test_edgedfs.py index 95abc80d..6c12ae21 100644 --- a/networkx/algorithms/traversal/tests/test_edgedfs.py +++ b/networkx/algorithms/traversal/tests/test_edgedfs.py @@ -43,7 +43,7 @@ class TestEdgeDFS: def test_digraph_orientation_invalid(self): G = nx.DiGraph(self.edges) - edge_iterator = edge_dfs(G, self.nodes, orientation='hello') + edge_iterator = edge_dfs(G, self.nodes, orientation="hello") pytest.raises(nx.NetworkXError, list, edge_iterator) def test_digraph_orientation_none(self): @@ -54,9 +54,8 @@ class TestEdgeDFS: def test_digraph_orientation_original(self): G = nx.DiGraph(self.edges) - x = list(edge_dfs(G, self.nodes, orientation='original')) - x_ = [(0, 1, FORWARD), (1, 0, FORWARD), - (2, 1, FORWARD), (3, 1, FORWARD)] + x = list(edge_dfs(G, self.nodes, orientation="original")) + x_ = [(0, 1, FORWARD), (1, 0, FORWARD), (2, 1, FORWARD), (3, 1, FORWARD)] assert x == x_ def test_digraph2(self): @@ -68,15 +67,14 @@ class TestEdgeDFS: def test_digraph_rev(self): G = nx.DiGraph(self.edges) - x = list(edge_dfs(G, self.nodes, orientation='reverse')) - x_ = [(1, 0, REVERSE), (0, 1, REVERSE), - (2, 1, REVERSE), (3, 1, REVERSE)] + x = list(edge_dfs(G, self.nodes, orientation="reverse")) + x_ = [(1, 0, REVERSE), (0, 1, REVERSE), (2, 1, REVERSE), (3, 1, REVERSE)] assert x == x_ def test_digraph_rev2(self): G = nx.DiGraph() nx.add_path(G, range(4)) - x = list(edge_dfs(G, [3], orientation='reverse')) + x = list(edge_dfs(G, [3], orientation="reverse")) x_ = [(2, 3, REVERSE), (1, 2, REVERSE), (0, 1, REVERSE)] assert x == x_ @@ -100,32 +98,37 @@ class TestEdgeDFS: def test_multidigraph_rev(self): G = nx.MultiDiGraph(self.edges) - x = list(edge_dfs(G, self.nodes, orientation='reverse')) - x_ = [(1, 0, 0, REVERSE), - (0, 1, 0, REVERSE), - (1, 0, 1, REVERSE), - (2, 1, 0, REVERSE), - (3, 1, 0, REVERSE)] + x = list(edge_dfs(G, self.nodes, orientation="reverse")) + x_ = [ + (1, 0, 0, REVERSE), + (0, 1, 0, REVERSE), + (1, 0, 1, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] assert x == x_ def test_digraph_ignore(self): G = nx.DiGraph(self.edges) - x = list(edge_dfs(G, self.nodes, orientation='ignore')) - x_ = [(0, 1, FORWARD), (1, 0, FORWARD), - (2, 1, REVERSE), (3, 1, REVERSE)] + x = list(edge_dfs(G, self.nodes, orientation="ignore")) + x_ = [(0, 1, FORWARD), (1, 0, FORWARD), (2, 1, REVERSE), (3, 1, REVERSE)] assert x == x_ def test_digraph_ignore2(self): G = nx.DiGraph() nx.add_path(G, range(4)) - x = list(edge_dfs(G, [0], orientation='ignore')) + x = list(edge_dfs(G, [0], orientation="ignore")) x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 3, FORWARD)] assert x == x_ def test_multidigraph_ignore(self): G = nx.MultiDiGraph(self.edges) - x = list(edge_dfs(G, self.nodes, orientation='ignore')) - x_ = [(0, 1, 0, FORWARD), (1, 0, 0, FORWARD), - (1, 0, 1, REVERSE), (2, 1, 0, REVERSE), - (3, 1, 0, REVERSE)] + x = list(edge_dfs(G, self.nodes, orientation="ignore")) + x_ = [ + (0, 1, 0, FORWARD), + (1, 0, 0, FORWARD), + (1, 0, 1, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] assert x == x_ diff --git a/networkx/algorithms/tree/branchings.py b/networkx/algorithms/tree/branchings.py index 3322e423..c68564e0 100644 --- a/networkx/algorithms/tree/branchings.py +++ b/networkx/algorithms/tree/branchings.py @@ -38,26 +38,29 @@ from .recognition import is_arborescence, is_branching __all__ = [ - 'branching_weight', 'greedy_branching', - 'maximum_branching', 'minimum_branching', - 'maximum_spanning_arborescence', 'minimum_spanning_arborescence', - 'Edmonds' + "branching_weight", + "greedy_branching", + "maximum_branching", + "minimum_branching", + "maximum_spanning_arborescence", + "minimum_spanning_arborescence", + "Edmonds", ] -KINDS = {'max', 'min'} +KINDS = {"max", "min"} STYLES = { - 'branching': 'branching', - 'arborescence': 'arborescence', - 'spanning arborescence': 'arborescence' + "branching": "branching", + "arborescence": "arborescence", + "spanning arborescence": "arborescence", } -INF = float('inf') +INF = float("inf") @py_random_state(1) def random_string(L=15, seed=None): - return ''.join([seed.choice(string.ascii_letters) for n in range(L)]) + return "".join([seed.choice(string.ascii_letters) for n in range(L)]) def _min_weight(weight): @@ -68,7 +71,7 @@ def _max_weight(weight): return weight -def branching_weight(G, attr='weight', default=1): +def branching_weight(G, attr="weight", default=1): """ Returns the total weight of a branching. @@ -77,7 +80,7 @@ def branching_weight(G, attr='weight', default=1): @py_random_state(4) -def greedy_branching(G, attr='weight', default=1, kind='max', seed=None): +def greedy_branching(G, attr="weight", default=1, kind="max", seed=None): """ Returns a branching obtained through a greedy algorithm. @@ -113,7 +116,7 @@ def greedy_branching(G, attr='weight', default=1, kind='max', seed=None): if kind not in KINDS: raise nx.NetworkXException("Unknown value for `kind`.") - if kind == 'min': + if kind == "min": reverse = False else: reverse = True @@ -122,8 +125,7 @@ def greedy_branching(G, attr='weight', default=1, kind='max', seed=None): # Generate a random string the graph probably won't have. attr = random_string(seed=seed) - edges = [(u, v, data.get(attr, default)) - for (u, v, data) in G.edges(data=True)] + edges = [(u, v, data.get(attr, default)) for (u, v, data) in G.edges(data=True)] # We sort by weight, but also by nodes to normalize behavior across runs. try: @@ -219,7 +221,7 @@ class MultiDiGraph_EdgeKey(nx.MultiDiGraph): try: u, v, _ = self.edge_index[key] except KeyError as e: - raise KeyError(f'Invalid edge key {key!r}') from e + raise KeyError(f"Invalid edge key {key!r}") from e else: del self.edge_index[key] self._cls.remove_edge(u, v, key) @@ -268,7 +270,7 @@ class Edmonds: # Since we will be creating graphs with new nodes, we need to make # sure that our node names do not conflict with the real node names. - self.template = random_string(seed=seed) + '_{0}' + self.template = random_string(seed=seed) + "_{0}" def _init(self, attr, default, kind, style, preserve_attrs, seed): if kind not in KINDS: @@ -281,7 +283,7 @@ class Edmonds: self.style = style # Determine how we are going to transform the weights. - if kind == 'min': + if kind == "min": self.trans = trans = _min_weight else: self.trans = trans = _max_weight @@ -296,7 +298,7 @@ class Edmonds: # This attribute is used to store whether a particular edge is still # a candidate. We generate a random attr to remove clashes with # preserved edges - self.candidate_attr = 'candidate_' + random_string(seed=seed) + self.candidate_attr = "candidate_" + random_string(seed=seed) # The object we manipulate at each step is a multidigraph. self.G = G = MultiDiGraph_EdgeKey() @@ -321,8 +323,8 @@ class Edmonds: # graph B^i. So we will have strictly more B^i than the paper does. self.B = MultiDiGraph_EdgeKey() self.B.edge_index = {} - self.graphs = [] # G^i - self.branchings = [] # B^i + self.graphs = [] # G^i + self.branchings = [] # B^i self.uf = nx.utils.UnionFind() # A list of lists of edge indexes. Each list is a circuit for graph G^i. @@ -335,8 +337,15 @@ class Edmonds: # in circuit G^0 (depsite their weights being different). self.minedge_circuit = [] - def find_optimum(self, attr='weight', default=1, kind='max', - style='branching', preserve_attrs=False, seed=None): + def find_optimum( + self, + attr="weight", + default=1, + kind="max", + style="branching", + preserve_attrs=False, + seed=None, + ): """ Returns a branching from G. @@ -403,9 +412,9 @@ class Edmonds: # meet the break condition (b) from the paper: # (b) every node of G^i is in D^i and E^i is a branching # Construction guarantees that it's a branching. - assert(len(G) == len(B)) + assert len(G) == len(B) if len(B): - assert(is_branching(B)) + assert is_branching(B) if self.store: self.graphs.append(G.copy()) @@ -451,7 +460,7 @@ class Edmonds: # Conditions for adding the edge. # If weight < 0, then it cannot help in finding a maximum branching. - if self.style == 'branching' and weight <= 0: + if self.style == "branching" and weight <= 0: acceptable = False else: acceptable = True @@ -548,7 +557,7 @@ class Edmonds: """ if u not in G: # print(G.nodes(), u) - raise Exception(f'{u!r} not in G') + raise Exception(f"{u!r} not in G") for v in G.pred[u]: for edgekey in G.pred[u][v]: if edgekey in edgekeys: @@ -578,8 +587,7 @@ class Edmonds: # Note, we ask if it is a root in the full graph, not the branching. # The branching alone doesn't have all the edges. - isroot, edgekey = is_root(self.graphs[self.level + 1], - merged_node, edges) + isroot, edgekey = is_root(self.graphs[self.level + 1], merged_node, edges) edges.update(circuit) if isroot: minedge = self.minedge_circuit[self.level] @@ -630,38 +638,40 @@ class Edmonds: return H -def maximum_branching(G, attr='weight', default=1, preserve_attrs=False): +def maximum_branching(G, attr="weight", default=1, preserve_attrs=False): ed = Edmonds(G) - B = ed.find_optimum(attr, default, kind='max', style='branching', - preserve_attrs=preserve_attrs) + B = ed.find_optimum( + attr, default, kind="max", style="branching", preserve_attrs=preserve_attrs + ) return B -def minimum_branching(G, attr='weight', default=1, preserve_attrs=False): +def minimum_branching(G, attr="weight", default=1, preserve_attrs=False): ed = Edmonds(G) - B = ed.find_optimum(attr, default, kind='min', style='branching', - preserve_attrs=preserve_attrs) + B = ed.find_optimum( + attr, default, kind="min", style="branching", preserve_attrs=preserve_attrs + ) return B -def maximum_spanning_arborescence(G, attr='weight', default=1, - preserve_attrs=False): +def maximum_spanning_arborescence(G, attr="weight", default=1, preserve_attrs=False): ed = Edmonds(G) - B = ed.find_optimum(attr, default, kind='max', style='arborescence', - preserve_attrs=preserve_attrs) + B = ed.find_optimum( + attr, default, kind="max", style="arborescence", preserve_attrs=preserve_attrs + ) if not is_arborescence(B): - msg = 'No maximum spanning arborescence in G.' + msg = "No maximum spanning arborescence in G." raise nx.exception.NetworkXException(msg) return B -def minimum_spanning_arborescence(G, attr='weight', default=1, - preserve_attrs=False): +def minimum_spanning_arborescence(G, attr="weight", default=1, preserve_attrs=False): ed = Edmonds(G) - B = ed.find_optimum(attr, default, kind='min', style='arborescence', - preserve_attrs=preserve_attrs) + B = ed.find_optimum( + attr, default, kind="min", style="arborescence", preserve_attrs=preserve_attrs + ) if not is_arborescence(B): - msg = 'No minimum spanning arborescence in G.' + msg = "No minimum spanning arborescence in G." raise nx.exception.NetworkXException(msg) return B @@ -688,22 +698,29 @@ B : (multi)digraph-like A {kind} {style}. """ -docstring_arborescence = docstring_branching + """ +docstring_arborescence = ( + docstring_branching + + """ Raises ------ NetworkXException If the graph does not contain a {kind} {style}. """ +) -maximum_branching.__doc__ = \ - docstring_branching.format(kind='maximum', style='branching') +maximum_branching.__doc__ = docstring_branching.format( + kind="maximum", style="branching" +) -minimum_branching.__doc__ = \ - docstring_branching.format(kind='minimum', style='branching') +minimum_branching.__doc__ = docstring_branching.format( + kind="minimum", style="branching" +) -maximum_spanning_arborescence.__doc__ = \ - docstring_arborescence.format(kind='maximum', style='spanning arborescence') +maximum_spanning_arborescence.__doc__ = docstring_arborescence.format( + kind="maximum", style="spanning arborescence" +) -minimum_spanning_arborescence.__doc__ = \ - docstring_arborescence.format(kind='minimum', style='spanning arborescence') +minimum_spanning_arborescence.__doc__ = docstring_arborescence.format( + kind="minimum", style="spanning arborescence" +) diff --git a/networkx/algorithms/tree/coding.py b/networkx/algorithms/tree/coding.py index 110bbd60..0147e7ee 100644 --- a/networkx/algorithms/tree/coding.py +++ b/networkx/algorithms/tree/coding.py @@ -14,8 +14,13 @@ from itertools import chain import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['from_nested_tuple', 'from_prufer_sequence', 'NotATree', - 'to_nested_tuple', 'to_prufer_sequence'] +__all__ = [ + "from_nested_tuple", + "from_prufer_sequence", + "NotATree", + "to_nested_tuple", + "to_prufer_sequence", +] class NotATree(nx.NetworkXException): @@ -26,7 +31,7 @@ class NotATree(nx.NetworkXException): """ -@not_implemented_for('directed') +@not_implemented_for("directed") def to_nested_tuple(T, root, canonical_form=False): """Returns a nested tuple representation of the given tree. @@ -115,9 +120,9 @@ def to_nested_tuple(T, root, canonical_form=False): # Do some sanity checks on the input. if not nx.is_tree(T): - raise nx.NotATree('provided graph is not a tree') + raise nx.NotATree("provided graph is not a tree") if root not in T: - raise nx.NodeNotFound(f'Graph {T} contains no node {root}') + raise nx.NodeNotFound(f"Graph {T} contains no node {root}") return _make_tuple(T, root, None) @@ -205,7 +210,7 @@ def from_nested_tuple(sequence, sensible_relabeling=False): return T -@not_implemented_for('directed') +@not_implemented_for("directed") def to_prufer_sequence(T): r"""Returns the Prüfer sequence of the given tree. @@ -281,12 +286,12 @@ def to_prufer_sequence(T): # Perform some sanity checks on the input. n = len(T) if n < 2: - msg = 'Prüfer sequence undefined for trees with fewer than two nodes' + msg = "Prüfer sequence undefined for trees with fewer than two nodes" raise nx.NetworkXPointlessConcept(msg) if not nx.is_tree(T): - raise nx.NotATree('provided graph is not a tree') + raise nx.NotATree("provided graph is not a tree") if set(T) != set(range(n)): - raise KeyError('tree must have node labels {0, ..., n - 1}') + raise KeyError("tree must have node labels {0, ..., n - 1}") degree = dict(T.degree()) diff --git a/networkx/algorithms/tree/mst.py b/networkx/algorithms/tree/mst.py index ecb8615a..f3f352a7 100644 --- a/networkx/algorithms/tree/mst.py +++ b/networkx/algorithms/tree/mst.py @@ -11,14 +11,17 @@ import networkx as nx from networkx.utils import UnionFind, not_implemented_for __all__ = [ - 'minimum_spanning_edges', 'maximum_spanning_edges', - 'minimum_spanning_tree', 'maximum_spanning_tree', + "minimum_spanning_edges", + "maximum_spanning_edges", + "minimum_spanning_tree", + "maximum_spanning_tree", ] -@not_implemented_for('multigraph') -def boruvka_mst_edges(G, minimum=True, weight='weight', - keys=False, data=True, ignore_nan=False): +@not_implemented_for("multigraph") +def boruvka_mst_edges( + G, minimum=True, weight="weight", keys=False, data=True, ignore_nan=False +): """Iterate over edges of a Borůvka's algorithm min/max spanning tree. Parameters @@ -60,7 +63,7 @@ def boruvka_mst_edges(G, minimum=True, weight='weight', """ sign = 1 if minimum else -1 - minwt = float('inf') + minwt = float("inf") boundary = None for e in nx.edge_boundary(G, component, data=True): wt = e[-1].get(weight, 1) * sign @@ -112,8 +115,9 @@ def boruvka_mst_edges(G, minimum=True, weight='weight', forest.union(u, v) -def kruskal_mst_edges(G, minimum, weight='weight', - keys=True, data=True, ignore_nan=False): +def kruskal_mst_edges( + G, minimum, weight="weight", keys=True, data=True, ignore_nan=False +): """Iterate over edges of a Kruskal's algorithm min/max spanning tree. Parameters @@ -155,6 +159,7 @@ def kruskal_mst_edges(G, minimum, weight='weight', msg = f"NaN found as an edge weight. Edge {(u, v, k, d)}" raise ValueError(msg) yield wt, u, v, k, d + else: edges = G.edges(data=True) @@ -168,6 +173,7 @@ def kruskal_mst_edges(G, minimum, weight='weight', msg = f"NaN found as an edge weight. Edge {(u, v, d)}" raise ValueError(msg) yield wt, u, v, d + edges = sorted(filter_nan_edges(), key=itemgetter(0)) # Multigraphs need to handle edge keys in addition to edge data. if G.is_multigraph(): @@ -194,8 +200,7 @@ def kruskal_mst_edges(G, minimum, weight='weight', subtrees.union(u, v) -def prim_mst_edges(G, minimum, weight='weight', - keys=True, data=True, ignore_nan=False): +def prim_mst_edges(G, minimum, weight="weight", keys=True, data=True, ignore_nan=False): """Iterate over edges of Prim's algorithm min/max spanning tree. Parameters @@ -292,16 +297,17 @@ def prim_mst_edges(G, minimum, weight='weight', ALGORITHMS = { - 'boruvka': boruvka_mst_edges, - 'borůvka': boruvka_mst_edges, - 'kruskal': kruskal_mst_edges, - 'prim': prim_mst_edges + "boruvka": boruvka_mst_edges, + "borůvka": boruvka_mst_edges, + "kruskal": kruskal_mst_edges, + "prim": prim_mst_edges, } -@not_implemented_for('directed') -def minimum_spanning_edges(G, algorithm='kruskal', weight='weight', - keys=True, data=True, ignore_nan=False): +@not_implemented_for("directed") +def minimum_spanning_edges( + G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False +): """Generate edges in a minimum spanning forest of an undirected weighted graph. @@ -387,13 +393,15 @@ def minimum_spanning_edges(G, algorithm='kruskal', weight='weight', msg = f"{algorithm} is not a valid choice for an algorithm." raise ValueError(msg) from e - return algo(G, minimum=True, weight=weight, keys=keys, data=data, - ignore_nan=ignore_nan) + return algo( + G, minimum=True, weight=weight, keys=keys, data=data, ignore_nan=ignore_nan + ) -@not_implemented_for('directed') -def maximum_spanning_edges(G, algorithm='kruskal', weight='weight', - keys=True, data=True, ignore_nan=False): +@not_implemented_for("directed") +def maximum_spanning_edges( + G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False +): """Generate edges in a maximum spanning forest of an undirected weighted graph. @@ -478,12 +486,12 @@ def maximum_spanning_edges(G, algorithm='kruskal', weight='weight', msg = f"{algorithm} is not a valid choice for an algorithm." raise ValueError(msg) from e - return algo(G, minimum=False, weight=weight, keys=keys, data=data, - ignore_nan=ignore_nan) + return algo( + G, minimum=False, weight=weight, keys=keys, data=data, ignore_nan=ignore_nan + ) -def minimum_spanning_tree(G, weight='weight', algorithm='kruskal', - ignore_nan=False): +def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a minimum spanning tree or forest on an undirected graph `G`. Parameters @@ -532,8 +540,9 @@ def minimum_spanning_tree(G, weight='weight', algorithm='kruskal', Isolated nodes with self-loops are in the tree as edgeless isolated nodes. """ - edges = minimum_spanning_edges(G, algorithm, weight, keys=True, - data=True, ignore_nan=ignore_nan) + edges = minimum_spanning_edges( + G, algorithm, weight, keys=True, data=True, ignore_nan=ignore_nan + ) T = G.__class__() # Same graph class as G T.graph.update(G.graph) T.add_nodes_from(G.nodes.items()) @@ -541,8 +550,7 @@ def minimum_spanning_tree(G, weight='weight', algorithm='kruskal', return T -def maximum_spanning_tree(G, weight='weight', algorithm='kruskal', - ignore_nan=False): +def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): """Returns a maximum spanning tree or forest on an undirected graph `G`. Parameters @@ -593,8 +601,9 @@ def maximum_spanning_tree(G, weight='weight', algorithm='kruskal', Isolated nodes with self-loops are in the tree as edgeless isolated nodes. """ - edges = maximum_spanning_edges(G, algorithm, weight, keys=True, - data=True, ignore_nan=ignore_nan) + edges = maximum_spanning_edges( + G, algorithm, weight, keys=True, data=True, ignore_nan=ignore_nan + ) edges = list(edges) T = G.__class__() # Same graph class as G T.graph.update(G.graph) diff --git a/networkx/algorithms/tree/operations.py b/networkx/algorithms/tree/operations.py index f520d686..1a017a5e 100644 --- a/networkx/algorithms/tree/operations.py +++ b/networkx/algorithms/tree/operations.py @@ -5,7 +5,7 @@ from itertools import chain import networkx as nx from itertools import accumulate -__all__ = ['join'] +__all__ = ["join"] def join(rooted_trees, label_attribute=None): @@ -66,22 +66,27 @@ def join(rooted_trees, label_attribute=None): # Relabel the nodes so that their union is the integers starting at 1. if label_attribute is None: - label_attribute = '_old' - relabel = partial(nx.convert_node_labels_to_integers, - label_attribute=label_attribute) + label_attribute = "_old" + relabel = partial( + nx.convert_node_labels_to_integers, label_attribute=label_attribute + ) lengths = (len(tree) for tree in trees[:-1]) first_labels = chain([0], accumulate(lengths)) - trees = [relabel(tree, first_label=first_label + 1) - for tree, first_label in zip(trees, first_labels)] + trees = [ + relabel(tree, first_label=first_label + 1) + for tree, first_label in zip(trees, first_labels) + ] # Get the relabeled roots. - roots = [next(v for v, d in tree.nodes(data=True) if d.get('_old') == root) - for tree, root in zip(trees, roots)] + roots = [ + next(v for v, d in tree.nodes(data=True) if d.get("_old") == root) + for tree, root in zip(trees, roots) + ] # Remove the old node labels. for tree in trees: for v in tree: - tree.nodes[v].pop('_old') + tree.nodes[v].pop("_old") # Add all sets of nodes and edges, with data. nodes = (tree.nodes(data=True) for tree in trees) diff --git a/networkx/algorithms/tree/recognition.py b/networkx/algorithms/tree/recognition.py index 3f5e5583..5fbff544 100644 --- a/networkx/algorithms/tree/recognition.py +++ b/networkx/algorithms/tree/recognition.py @@ -76,10 +76,10 @@ becomes a useful notion. import networkx as nx -__all__ = ['is_arborescence', 'is_branching', 'is_forest', 'is_tree'] +__all__ = ["is_arborescence", "is_branching", "is_forest", "is_tree"] -@nx.utils.not_implemented_for('undirected') +@nx.utils.not_implemented_for("undirected") def is_arborescence(G): """ Returns True if `G` is an arborescence. @@ -108,7 +108,7 @@ def is_arborescence(G): return is_tree(G) and max(d for n, d in G.in_degree()) <= 1 -@nx.utils.not_implemented_for('undirected') +@nx.utils.not_implemented_for("undirected") def is_branching(G): """ Returns True if `G` is a branching. @@ -168,7 +168,7 @@ def is_forest(G): """ if len(G) == 0: - raise nx.exception.NetworkXPointlessConcept('G has no nodes.') + raise nx.exception.NetworkXPointlessConcept("G has no nodes.") if G.is_directed(): components = (G.subgraph(c) for c in nx.weakly_connected_components(G)) @@ -209,7 +209,7 @@ def is_tree(G): """ if len(G) == 0: - raise nx.exception.NetworkXPointlessConcept('G has no nodes.') + raise nx.exception.NetworkXPointlessConcept("G has no nodes.") if G.is_directed(): is_connected = nx.is_weakly_connected diff --git a/networkx/algorithms/tree/tests/test_branchings.py b/networkx/algorithms/tree/tests/test_branchings.py index f6ca731b..6da3ba1e 100644 --- a/networkx/algorithms/tree/tests/test_branchings.py +++ b/networkx/algorithms/tree/tests/test_branchings.py @@ -1,5 +1,6 @@ import pytest -np = pytest.importorskip('numpy') + +np = pytest.importorskip("numpy") import networkx as nx @@ -54,8 +55,14 @@ def G2(): # also an optimal spanning arborescence. # optimal_arborescence_1 = [ - (0, 2, 12), (2, 1, 17), (2, 3, 21), (1, 5, 13), - (3, 4, 17), (3, 6, 18), (6, 8, 15), (8, 7, 18), + (0, 2, 12), + (2, 1, 17), + (2, 3, 21), + (1, 5, 13), + (3, 4, 17), + (3, 6, 18), + (6, 8, 15), + (8, 7, 18), ] # For G2, the optimal branching of G1 (with shifted weights) is no longer @@ -69,27 +76,55 @@ optimal_arborescence_1 = [ # # These are maximal branchings or arborescences. optimal_branching_2a = [ - (5, 6, 4), (6, 2, 11), (6, 8, 5), (8, 7, 8), - (2, 1, 7), (2, 3, 11), (3, 4, 7), + (5, 6, 4), + (6, 2, 11), + (6, 8, 5), + (8, 7, 8), + (2, 1, 7), + (2, 3, 11), + (3, 4, 7), ] optimal_branching_2b = [ - (8, 7, 8), (7, 3, 9), (3, 4, 7), (3, 6, 8), - (6, 2, 11), (2, 1, 7), (1, 5, 3), + (8, 7, 8), + (7, 3, 9), + (3, 4, 7), + (3, 6, 8), + (6, 2, 11), + (2, 1, 7), + (1, 5, 3), ] optimal_arborescence_2 = [ - (0, 2, 2), (2, 1, 7), (2, 3, 11), (1, 5, 3), - (3, 4, 7), (3, 6, 8), (6, 8, 5), (8, 7, 8), + (0, 2, 2), + (2, 1, 7), + (2, 3, 11), + (1, 5, 3), + (3, 4, 7), + (3, 6, 8), + (6, 8, 5), + (8, 7, 8), ] # Two suboptimal maximal branchings on G1 obtained from a greedy algorithm. # 1a matches what is shown in Figure G in Edmonds's paper. greedy_subopt_branching_1a = [ - (5, 6, 14), (6, 2, 21), (6, 8, 15), (8, 7, 18), - (2, 1, 17), (2, 3, 21), (3, 0, 5), (3, 4, 17), + (5, 6, 14), + (6, 2, 21), + (6, 8, 15), + (8, 7, 18), + (2, 1, 17), + (2, 3, 21), + (3, 0, 5), + (3, 4, 17), ] greedy_subopt_branching_1b = [ - (8, 7, 18), (7, 6, 15), (6, 2, 21), (2, 1, 17), - (2, 3, 21), (1, 5, 13), (3, 0, 5), (3, 4, 17), + (8, 7, 18), + (7, 6, 15), + (6, 2, 21), + (2, 1, 17), + (2, 3, 21), + (1, 5, 13), + (3, 0, 5), + (3, 4, 17), ] @@ -100,13 +135,13 @@ def build_branching(edges): return G -def sorted_edges(G, attr='weight', default=1): +def sorted_edges(G, attr="weight", default=1): edges = [(u, v, data.get(attr, default)) for (u, v, data) in G.edges(data=True)] edges = sorted(edges, key=lambda x: (x[2], x[1], x[0])) return edges -def assert_equal_branchings(G1, G2, attr='weight', default=1): +def assert_equal_branchings(G1, G2, attr="weight", default=1): edges1 = list(G1.edges(data=True)) edges2 = list(G2.edges(data=True)) assert len(edges1) == len(edges2) @@ -127,6 +162,7 @@ def assert_equal_branchings(G1, G2, attr='weight', default=1): ################ + def test_optimal_branching1(): G = build_branching(optimal_arborescence_1) assert recognition.is_arborescence(G), True @@ -178,13 +214,19 @@ def test_greedy_max2(): # Different default weight. # G = G1() - del G[1][0][0]['weight'] + del G[1][0][0]["weight"] B = branchings.greedy_branching(G, default=6) # Chosen so that edge (3,0,5) is not selected and (1,0,6) is instead. edges = [ - (1, 0, 6), (1, 5, 13), (7, 6, 15), (2, 1, 17), - (3, 4, 17), (8, 7, 18), (2, 3, 21), (6, 2, 21), + (1, 0, 6), + (1, 5, 13), + (7, 6, 15), + (2, 1, 17), + (3, 4, 17), + (8, 7, 18), + (2, 3, 21), + (6, 2, 21), ] B_ = build_branching(edges) assert_equal_branchings(B, B_) @@ -198,8 +240,14 @@ def test_greedy_max3(): # This is mostly arbitrary...the output was generated by running the algo. edges = [ - (2, 1, 1), (3, 0, 1), (3, 4, 1), (5, 8, 1), - (6, 2, 1), (7, 3, 1), (7, 6, 1), (8, 7, 1), + (2, 1, 1), + (3, 0, 1), + (3, 4, 1), + (5, 8, 1), + (6, 2, 1), + (7, 3, 1), + (7, 6, 1), + (8, 7, 1), ] B_ = build_branching(edges) assert_equal_branchings(B, B_, default=1) @@ -207,11 +255,17 @@ def test_greedy_max3(): def test_greedy_min(): G = G1() - B = branchings.greedy_branching(G, kind='min') + B = branchings.greedy_branching(G, kind="min") edges = [ - (1, 0, 4), (0, 2, 12), (0, 4, 12), (2, 5, 12), - (4, 7, 12), (5, 8, 12), (5, 6, 14), (7, 3, 19) + (1, 0, 4), + (0, 2, 12), + (0, 4, 12), + (2, 5, 12), + (4, 7, 12), + (5, 8, 12), + (5, 6, 14), + (7, 3, 19), ] B_ = build_branching(edges) assert_equal_branchings(B, B_) @@ -251,8 +305,14 @@ def test_edmonds2_minarbor(): # This was obtained from algorithm. Need to verify it independently. # Branch weight is: 96 edges = [ - (3, 0, 5), (0, 2, 12), (0, 4, 12), (2, 5, 12), - (4, 7, 12), (5, 8, 12), (5, 6, 14), (2, 1, 17) + (3, 0, 5), + (0, 2, 12), + (0, 4, 12), + (2, 5, 12), + (4, 7, 12), + (5, 8, 12), + (5, 6, 14), + (2, 1, 17), ] x_ = build_branching(edges) assert_equal_branchings(x, x_) @@ -274,6 +334,7 @@ def test_edmonds3_minbranch2(): x_ = build_branching(edges) assert_equal_branchings(x, x_) + # Need more tests @@ -281,34 +342,41 @@ def test_mst(): # Make sure we get the same results for undirected graphs. # Example from: https://en.wikipedia.org/wiki/Kruskal's_algorithm G = nx.Graph() - edgelist = [(0, 3, [('weight', 5)]), - (0, 1, [('weight', 7)]), - (1, 3, [('weight', 9)]), - (1, 2, [('weight', 8)]), - (1, 4, [('weight', 7)]), - (3, 4, [('weight', 15)]), - (3, 5, [('weight', 6)]), - (2, 4, [('weight', 5)]), - (4, 5, [('weight', 8)]), - (4, 6, [('weight', 9)]), - (5, 6, [('weight', 11)])] + edgelist = [ + (0, 3, [("weight", 5)]), + (0, 1, [("weight", 7)]), + (1, 3, [("weight", 9)]), + (1, 2, [("weight", 8)]), + (1, 4, [("weight", 7)]), + (3, 4, [("weight", 15)]), + (3, 5, [("weight", 6)]), + (2, 4, [("weight", 5)]), + (4, 5, [("weight", 8)]), + (4, 6, [("weight", 9)]), + (5, 6, [("weight", 11)]), + ] G.add_edges_from(edgelist) G = G.to_directed() x = branchings.minimum_spanning_arborescence(G) - edges = [({0, 1}, 7), ({0, 3}, 5), ({3, 5}, 6), - ({1, 4}, 7), ({4, 2}, 5), ({4, 6}, 9)] + edges = [ + ({0, 1}, 7), + ({0, 3}, 5), + ({3, 5}, 6), + ({1, 4}, 7), + ({4, 2}, 5), + ({4, 6}, 9), + ] assert x.number_of_edges() == len(edges) for u, v, d in x.edges(data=True): - assert ({u, v}, d['weight']) in edges + assert ({u, v}, d["weight"]) in edges def test_mixed_nodetypes(): # Smoke test to make sure no TypeError is raised for mixed node types. G = nx.Graph() - edgelist = [(0, 3, [('weight', 5)]), - (0, '1', [('weight', 5)])] + edgelist = [(0, 3, [("weight", 5)]), (0, "1", [("weight", 5)])] G.add_edges_from(edgelist) G = G.to_directed() x = branchings.minimum_spanning_arborescence(G) @@ -338,16 +406,18 @@ def test_edge_attribute_preservation_normal_graph(): # using the Edmonds class for normal graphs. G = nx.Graph() - edgelist = [(0, 1, [('weight', 5), ('otherattr', 1), ('otherattr2', 3)]), - (0, 2, [('weight', 5), ('otherattr', 2), ('otherattr2', 2)]), - (1, 2, [('weight', 6), ('otherattr', 3), ('otherattr2', 1)])] + edgelist = [ + (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]), + (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]), + (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]), + ] G.add_edges_from(edgelist) ed = branchings.Edmonds(G) - B = ed.find_optimum('weight', preserve_attrs=True, seed=1) + B = ed.find_optimum("weight", preserve_attrs=True, seed=1) - assert B[0][1]['otherattr'] == 1 - assert B[0][1]['otherattr2'] == 3 + assert B[0][1]["otherattr"] == 1 + assert B[0][1]["otherattr2"] == 3 def test_edge_attribute_preservation_multigraph(): @@ -356,30 +426,34 @@ def test_edge_attribute_preservation_multigraph(): # using the Edmonds class for multigraphs. G = nx.MultiGraph() - edgelist = [(0, 1, [('weight', 5), ('otherattr', 1), ('otherattr2', 3)]), - (0, 2, [('weight', 5), ('otherattr', 2), ('otherattr2', 2)]), - (1, 2, [('weight', 6), ('otherattr', 3), ('otherattr2', 1)])] + edgelist = [ + (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]), + (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]), + (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]), + ] G.add_edges_from(edgelist * 2) # Make sure we have duplicate edge paths ed = branchings.Edmonds(G) - B = ed.find_optimum('weight', preserve_attrs=True) + B = ed.find_optimum("weight", preserve_attrs=True) - assert B[0][1][0]['otherattr'] == 1 - assert B[0][1][0]['otherattr2'] == 3 + assert B[0][1][0]["otherattr"] == 1 + assert B[0][1][0]["otherattr2"] == 3 def test_edge_attribute_discard(): # Test that edge attributes are discarded if we do not specify to keep them G = nx.Graph() - edgelist = [(0, 1, [('weight', 5), ('otherattr', 1), ('otherattr2', 3)]), - (0, 2, [('weight', 5), ('otherattr', 2), ('otherattr2', 2)]), - (1, 2, [('weight', 6), ('otherattr', 3), ('otherattr2', 1)])] + edgelist = [ + (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]), + (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]), + (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]), + ] G.add_edges_from(edgelist) ed = branchings.Edmonds(G) - B = ed.find_optimum('weight', preserve_attrs=False) + B = ed.find_optimum("weight", preserve_attrs=False) edge_dict = B[0][1] with pytest.raises(KeyError): - _ = edge_dict['otherattr'] + _ = edge_dict["otherattr"] diff --git a/networkx/algorithms/tree/tests/test_coding.py b/networkx/algorithms/tree/tests/test_coding.py index 45b3333f..0bc2ce94 100644 --- a/networkx/algorithms/tree/tests/test_coding.py +++ b/networkx/algorithms/tree/tests/test_coding.py @@ -28,7 +28,7 @@ class TestPruferSequence: def test_bad_integer_labels(self): with pytest.raises(KeyError): - T = nx.Graph(nx.utils.pairwise('abc')) + T = nx.Graph(nx.utils.pairwise("abc")) nx.to_prufer_sequence(T) def test_encoding(self): @@ -85,7 +85,7 @@ class TestNestedTuple: def test_unknown_root(self): with pytest.raises(nx.NodeNotFound): G = nx.path_graph(2) - nx.to_nested_tuple(G, 'bogus') + nx.to_nested_tuple(G, "bogus") def test_encoding(self): T = nx.full_rary_tree(2, 2 ** 3 - 1) diff --git a/networkx/algorithms/tree/tests/test_mst.py b/networkx/algorithms/tree/tests/test_mst.py index 17fcc201..cc042e05 100644 --- a/networkx/algorithms/tree/tests/test_mst.py +++ b/networkx/algorithms/tree/tests/test_mst.py @@ -8,7 +8,7 @@ from networkx.testing import assert_nodes_equal, assert_edges_equal def test_unknown_algorithm(): with pytest.raises(ValueError): - nx.minimum_spanning_tree(nx.Graph(), algorithm='random') + nx.minimum_spanning_tree(nx.Graph(), algorithm="random") class MinimumSpanningTreeTestBase: @@ -32,23 +32,37 @@ class MinimumSpanningTreeTestBase: self.algo = self.algorithm # This example graph comes from Wikipedia: # https://en.wikipedia.org/wiki/Kruskal's_algorithm - edges = [(0, 1, 7), (0, 3, 5), (1, 2, 8), (1, 3, 9), (1, 4, 7), - (2, 4, 5), (3, 4, 15), (3, 5, 6), (4, 5, 8), (4, 6, 9), - (5, 6, 11)] + edges = [ + (0, 1, 7), + (0, 3, 5), + (1, 2, 8), + (1, 3, 9), + (1, 4, 7), + (2, 4, 5), + (3, 4, 15), + (3, 5, 6), + (4, 5, 8), + (4, 6, 9), + (5, 6, 11), + ] self.G = nx.Graph() self.G.add_weighted_edges_from(edges) - self.minimum_spanning_edgelist = [(0, 1, {'weight': 7}), - (0, 3, {'weight': 5}), - (1, 4, {'weight': 7}), - (2, 4, {'weight': 5}), - (3, 5, {'weight': 6}), - (4, 6, {'weight': 9})] - self.maximum_spanning_edgelist = [(0, 1, {'weight': 7}), - (1, 2, {'weight': 8}), - (1, 3, {'weight': 9}), - (3, 4, {'weight': 15}), - (4, 6, {'weight': 9}), - (5, 6, {'weight': 11})] + self.minimum_spanning_edgelist = [ + (0, 1, {"weight": 7}), + (0, 3, {"weight": 5}), + (1, 4, {"weight": 7}), + (2, 4, {"weight": 5}), + (3, 5, {"weight": 6}), + (4, 6, {"weight": 9}), + ] + self.maximum_spanning_edgelist = [ + (0, 1, {"weight": 7}), + (1, 2, {"weight": 8}), + (1, 3, {"weight": 9}), + (3, 4, {"weight": 15}), + (4, 6, {"weight": 9}), + (5, 6, {"weight": 11}), + ] def test_minimum_edges(self): edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo) @@ -65,8 +79,7 @@ class MinimumSpanningTreeTestBase: assert_edges_equal(actual, self.maximum_spanning_edgelist) def test_without_data(self): - edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo, - data=False) + edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo, data=False) # Edges from the spanning edges functions don't come in sorted # orientation, so we need to sort each edge individually. actual = sorted((min(u, v), max(u, v)) for u, v in edges) @@ -76,15 +89,17 @@ class MinimumSpanningTreeTestBase: def test_nan_weights(self): # Edge weights NaN never appear in the spanning tree. see #2164 G = self.G - G.add_edge(0, 12, weight=float('nan')) - edges = nx.minimum_spanning_edges(G, algorithm=self.algo, - data=False, ignore_nan=True) + G.add_edge(0, 12, weight=float("nan")) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=True + ) actual = sorted((min(u, v), max(u, v)) for u, v in edges) expected = [(u, v) for u, v, d in self.minimum_spanning_edgelist] assert_edges_equal(actual, expected) # Now test for raising exception - edges = nx.minimum_spanning_edges(G, algorithm=self.algo, - data=False, ignore_nan=False) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=False + ) with pytest.raises(ValueError): list(edges) # test default for ignore_nan as False @@ -94,28 +109,50 @@ class MinimumSpanningTreeTestBase: def test_nan_weights_order(self): # now try again with a nan edge at the beginning of G.nodes - edges = [(0, 1, 7), (0, 3, 5), (1, 2, 8), (1, 3, 9), (1, 4, 7), - (2, 4, 5), (3, 4, 15), (3, 5, 6), (4, 5, 8), (4, 6, 9), - (5, 6, 11)] + edges = [ + (0, 1, 7), + (0, 3, 5), + (1, 2, 8), + (1, 3, 9), + (1, 4, 7), + (2, 4, 5), + (3, 4, 15), + (3, 5, 6), + (4, 5, 8), + (4, 6, 9), + (5, 6, 11), + ] G = nx.Graph() G.add_weighted_edges_from([(u + 1, v + 1, wt) for u, v, wt in edges]) - G.add_edge(0, 7, weight=float('nan')) - edges = nx.minimum_spanning_edges(G, algorithm=self.algo, - data=False, ignore_nan=True) + G.add_edge(0, 7, weight=float("nan")) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=True + ) actual = sorted((min(u, v), max(u, v)) for u, v in edges) shift = [(u + 1, v + 1) for u, v, d in self.minimum_spanning_edgelist] assert_edges_equal(actual, shift) def test_isolated_node(self): # now try again with an isolated node - edges = [(0, 1, 7), (0, 3, 5), (1, 2, 8), (1, 3, 9), (1, 4, 7), - (2, 4, 5), (3, 4, 15), (3, 5, 6), (4, 5, 8), (4, 6, 9), - (5, 6, 11)] + edges = [ + (0, 1, 7), + (0, 3, 5), + (1, 2, 8), + (1, 3, 9), + (1, 4, 7), + (2, 4, 5), + (3, 4, 15), + (3, 5, 6), + (4, 5, 8), + (4, 6, 9), + (5, 6, 11), + ] G = nx.Graph() G.add_weighted_edges_from([(u + 1, v + 1, wt) for u, v, wt in edges]) G.add_node(0) - edges = nx.minimum_spanning_edges(G, algorithm=self.algo, - data=False, ignore_nan=True) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=True + ) actual = sorted((min(u, v), max(u, v)) for u, v in edges) shift = [(u + 1, v + 1) for u, v, d in self.minimum_spanning_edgelist] assert_edges_equal(actual, shift) @@ -144,10 +181,10 @@ class MinimumSpanningTreeTestBase: def test_attributes(self): G = nx.Graph() - G.add_edge(1, 2, weight=1, color='red', distance=7) - G.add_edge(2, 3, weight=1, color='green', distance=2) - G.add_edge(1, 3, weight=10, color='blue', distance=1) - G.graph['foo'] = 'bar' + G.add_edge(1, 2, weight=1, color="red", distance=7) + G.add_edge(2, 3, weight=1, color="green", distance=2) + G.add_edge(1, 3, weight=10, color="blue", distance=1) + G.graph["foo"] = "bar" T = nx.minimum_spanning_tree(G, algorithm=self.algo) assert T.graph == G.graph assert_nodes_equal(T, G) @@ -160,10 +197,10 @@ class MinimumSpanningTreeTestBase: G.add_edge(0, 2, weight=30, distance=1) G.add_edge(1, 2, weight=1, distance=1) G.add_node(3) - T = nx.minimum_spanning_tree(G, algorithm=self.algo, weight='distance') + T = nx.minimum_spanning_tree(G, algorithm=self.algo, weight="distance") assert_nodes_equal(sorted(T), list(range(4))) assert_edges_equal(sorted(T.edges()), [(0, 2), (1, 2)]) - T = nx.maximum_spanning_tree(G, algorithm=self.algo, weight='distance') + T = nx.maximum_spanning_tree(G, algorithm=self.algo, weight="distance") assert_nodes_equal(sorted(T), list(range(4))) assert_edges_equal(sorted(T.edges()), [(0, 1), (0, 2)]) @@ -173,14 +210,15 @@ class TestBoruvka(MinimumSpanningTreeTestBase): using Borůvka's algorithm. """ - algorithm = 'boruvka' + + algorithm = "boruvka" def test_unicode_name(self): """Tests that using a Unicode string can correctly indicate Borůvka's algorithm. """ - edges = nx.minimum_spanning_edges(self.G, algorithm='borůvka') + edges = nx.minimum_spanning_edges(self.G, algorithm="borůvka") # Edges from the spanning edges functions don't come in sorted # orientation, so we need to sort each edge individually. actual = sorted((min(u, v), max(u, v), d) for u, v, d in edges) @@ -196,11 +234,11 @@ class MultigraphMSTTestBase(MinimumSpanningTreeTestBase): """ G = nx.MultiGraph() - G.add_edge(0, 1, key='a', weight=2) - G.add_edge(0, 1, key='b', weight=1) + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) min_edges = nx.minimum_spanning_edges mst_edges = min_edges(G, algorithm=self.algo, data=False) - assert_edges_equal([(0, 1, 'b')], list(mst_edges)) + assert_edges_equal([(0, 1, "b")], list(mst_edges)) def test_multigraph_keys_max(self): """Tests that the maximum spanning edges of a multigraph @@ -208,11 +246,11 @@ class MultigraphMSTTestBase(MinimumSpanningTreeTestBase): """ G = nx.MultiGraph() - G.add_edge(0, 1, key='a', weight=2) - G.add_edge(0, 1, key='b', weight=1) + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) max_edges = nx.maximum_spanning_edges mst_edges = max_edges(G, algorithm=self.algo, data=False) - assert_edges_equal([(0, 1, 'a')], list(mst_edges)) + assert_edges_equal([(0, 1, "a")], list(mst_edges)) class TestKruskal(MultigraphMSTTestBase): @@ -220,7 +258,8 @@ class TestKruskal(MultigraphMSTTestBase): using Kruskal's algorithm. """ - algorithm = 'kruskal' + + algorithm = "kruskal" class TestPrim(MultigraphMSTTestBase): @@ -228,18 +267,19 @@ class TestPrim(MultigraphMSTTestBase): using Prim's algorithm. """ - algorithm = 'prim' + + algorithm = "prim" def test_multigraph_keys_tree(self): G = nx.MultiGraph() - G.add_edge(0, 1, key='a', weight=2) - G.add_edge(0, 1, key='b', weight=1) + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) T = nx.minimum_spanning_tree(G) - assert_edges_equal([(0, 1, 1)], list(T.edges(data='weight'))) + assert_edges_equal([(0, 1, 1)], list(T.edges(data="weight"))) def test_multigraph_keys_tree_max(self): G = nx.MultiGraph() - G.add_edge(0, 1, key='a', weight=2) - G.add_edge(0, 1, key='b', weight=1) + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) T = nx.maximum_spanning_tree(G) - assert_edges_equal([(0, 1, 2)], list(T.edges(data='weight'))) + assert_edges_equal([(0, 1, 2)], list(T.edges(data="weight"))) diff --git a/networkx/algorithms/triads.py b/networkx/algorithms/triads.py index 01e08847..cf48932b 100644 --- a/networkx/algorithms/triads.py +++ b/networkx/algorithms/triads.py @@ -11,21 +11,106 @@ from random import sample import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['triadic_census', 'is_triad', 'all_triplets', 'all_triads', - 'triads_by_type', 'triad_type', 'random_triad'] +__all__ = [ + "triadic_census", + "is_triad", + "all_triplets", + "all_triads", + "triads_by_type", + "triad_type", + "random_triad", +] #: The integer codes representing each type of triad. #: #: Triads that are the same up to symmetry have the same code. -TRICODES = (1, 2, 2, 3, 2, 4, 6, 8, 2, 6, 5, 7, 3, 8, 7, 11, 2, 6, 4, 8, 5, 9, - 9, 13, 6, 10, 9, 14, 7, 14, 12, 15, 2, 5, 6, 7, 6, 9, 10, 14, 4, 9, - 9, 12, 8, 13, 14, 15, 3, 7, 8, 11, 7, 12, 14, 15, 8, 14, 13, 15, - 11, 15, 15, 16) +TRICODES = ( + 1, + 2, + 2, + 3, + 2, + 4, + 6, + 8, + 2, + 6, + 5, + 7, + 3, + 8, + 7, + 11, + 2, + 6, + 4, + 8, + 5, + 9, + 9, + 13, + 6, + 10, + 9, + 14, + 7, + 14, + 12, + 15, + 2, + 5, + 6, + 7, + 6, + 9, + 10, + 14, + 4, + 9, + 9, + 12, + 8, + 13, + 14, + 15, + 3, + 7, + 8, + 11, + 7, + 12, + 14, + 15, + 8, + 14, + 13, + 15, + 11, + 15, + 15, + 16, +) #: The names of each type of triad. The order of the elements is #: important: it corresponds to the tricodes given in :data:`TRICODES`. -TRIAD_NAMES = ('003', '012', '102', '021D', '021U', '021C', '111D', '111U', - '030T', '030C', '201', '120D', '120U', '120C', '210', '300') +TRIAD_NAMES = ( + "003", + "012", + "102", + "021D", + "021U", + "021C", + "111D", + "111U", + "030T", + "030C", + "201", + "120D", + "120U", + "120C", + "210", + "300", +) #: A dictionary mapping triad code to triad name. @@ -40,12 +125,11 @@ def _tricode(G, v, u, w): the binary representation of an integer. """ - combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), - (w, u, 32)) + combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), (w, u, 32)) return sum(x for u, v, x in combos if v in G[u]) -@not_implemented_for('undirected') +@not_implemented_for("undirected") def triadic_census(G): """Determines the triadic census of a directed graph. @@ -92,21 +176,21 @@ def triadic_census(G): neighbors = (vnbrs | set(G.succ[u]) | set(G.pred[u])) - {u, v} # Calculate dyadic triads instead of counting them. if v in G[u] and u in G[v]: - census['102'] += n - len(neighbors) - 2 + census["102"] += n - len(neighbors) - 2 else: - census['012'] += n - len(neighbors) - 2 + census["012"] += n - len(neighbors) - 2 # Count connected triads. for w in neighbors: - if m[u] < m[w] or (m[v] < m[w] < m[u] and - v not in G.pred[w] and - v not in G.succ[w]): + if m[u] < m[w] or ( + m[v] < m[w] < m[u] and v not in G.pred[w] and v not in G.succ[w] + ): code = _tricode(G, v, u, w) census[TRICODE_TO_NAME[code]] += 1 # null triads = total number of possible triads - all found triads # # Use integer division here, since we know this formula guarantees an # integral value. - census['003'] = ((n * (n - 1) * (n - 2)) // 6) - sum(census.values()) + census["003"] = ((n * (n - 1) * (n - 2)) // 6) - sum(census.values()) return census @@ -130,7 +214,7 @@ def is_triad(G): return False -@not_implemented_for('undirected') +@not_implemented_for("undirected") def all_triplets(G): """Returns a generator of all possible sets of 3 nodes in a DiGraph. @@ -148,7 +232,7 @@ def all_triplets(G): return triplets -@not_implemented_for('undirected') +@not_implemented_for("undirected") def all_triads(G): """A generator of all possible triads in G. @@ -167,7 +251,7 @@ def all_triads(G): yield G.subgraph(triplet).copy() -@not_implemented_for('undirected') +@not_implemented_for("undirected") def triads_by_type(G): """Returns a list of all triads for each triad type in a directed graph. @@ -191,7 +275,7 @@ def triads_by_type(G): return tri_by_type -@not_implemented_for('undirected') +@not_implemented_for("undirected") def triad_type(G): """Returns the sociological triad type for a triad. @@ -257,8 +341,7 @@ def triad_type(G): # e3[1] in e1: return "111D" elif set(e1).symmetric_difference(set(e2)) == set(e3): - if {e1[0], e2[0], e3[0]} == {e1[0], e2[0], - e3[0]} == set(G.nodes()): + if {e1[0], e2[0], e3[0]} == {e1[0], e2[0], e3[0]} == set(G.nodes()): return "030C" # e3 == (e1[0], e2[1]) and e2 == (e1[1], e3[1]): return "030T" @@ -280,9 +363,9 @@ def triad_type(G): return "300" -@not_implemented_for('undirected') +@not_implemented_for("undirected") def random_triad(G): - '''Returns a random triad from a directed graph. + """Returns a random triad from a directed graph. Parameters ---------- @@ -293,7 +376,7 @@ def random_triad(G): ------- G2 : subgraph A randomly selected triad (order-3 NetworkX DiGraph) - ''' + """ nodes = sample(G.nodes(), 3) G2 = G.subgraph(nodes) return G2 diff --git a/networkx/algorithms/vitality.py b/networkx/algorithms/vitality.py index 6f0a4d3e..88e174d5 100644 --- a/networkx/algorithms/vitality.py +++ b/networkx/algorithms/vitality.py @@ -5,7 +5,7 @@ from functools import partial import networkx as nx -__all__ = ['closeness_vitality'] +__all__ = ["closeness_vitality"] def closeness_vitality(G, node=None, weight=None, wiener_index=None): @@ -70,7 +70,6 @@ def closeness_vitality(G, node=None, weight=None, wiener_index=None): if node is not None: after = nx.wiener_index(G.subgraph(set(G) - {node}), weight=weight) return wiener_index - after - vitality = partial(closeness_vitality, G, weight=weight, - wiener_index=wiener_index) + vitality = partial(closeness_vitality, G, weight=weight, wiener_index=wiener_index) # TODO This can be trivially parallelized. return {v: vitality(node=v) for v in G} diff --git a/networkx/algorithms/voronoi.py b/networkx/algorithms/voronoi.py index bcaa8151..184afa2c 100644 --- a/networkx/algorithms/voronoi.py +++ b/networkx/algorithms/voronoi.py @@ -2,10 +2,10 @@ import networkx as nx from networkx.utils import groups -__all__ = ['voronoi_cells'] +__all__ = ["voronoi_cells"] -def voronoi_cells(G, center_nodes, weight='weight'): +def voronoi_cells(G, center_nodes, weight="weight"): """Returns the Voronoi cells centered at `center_nodes` with respect to the shortest-path distance metric. @@ -81,5 +81,5 @@ def voronoi_cells(G, center_nodes, weight='weight'): # We collect all unreachable nodes under a special key, if there are any. unreachable = set(G) - set(nearest) if unreachable: - cells['unreachable'] = unreachable + cells["unreachable"] = unreachable return cells diff --git a/networkx/algorithms/wiener.py b/networkx/algorithms/wiener.py index ee71479b..a1ebdf00 100644 --- a/networkx/algorithms/wiener.py +++ b/networkx/algorithms/wiener.py @@ -6,7 +6,7 @@ from .components import is_connected from .components import is_strongly_connected from .shortest_paths import shortest_path_length as spl -__all__ = ['wiener_index'] +__all__ = ["wiener_index"] #: Rename the :func:`chain.from_iterable` function for the sake of #: brevity. @@ -69,9 +69,10 @@ def wiener_index(G, weight=None): """ is_directed = G.is_directed() - if (is_directed and not is_strongly_connected(G)) or \ - (not is_directed and not is_connected(G)): - return float('inf') + if (is_directed and not is_strongly_connected(G)) or ( + not is_directed and not is_connected(G) + ): + return float("inf") total = sum(chaini(p.values() for v, p in spl(G, weight=weight))) # Need to account for double counting pairs of nodes in undirected graphs. return total if is_directed else total / 2 diff --git a/networkx/classes/coreviews.py b/networkx/classes/coreviews.py index 1bacea92..e22f6855 100644 --- a/networkx/classes/coreviews.py +++ b/networkx/classes/coreviews.py @@ -2,12 +2,19 @@ """ from collections.abc import Mapping -__all__ = ['AtlasView', 'AdjacencyView', 'MultiAdjacencyView', - 'UnionAtlas', 'UnionAdjacency', - 'UnionMultiInner', 'UnionMultiAdjacency', - 'FilterAtlas', 'FilterAdjacency', - 'FilterMultiInner', 'FilterMultiAdjacency', - ] +__all__ = [ + "AtlasView", + "AdjacencyView", + "MultiAdjacencyView", + "UnionAtlas", + "UnionAdjacency", + "UnionMultiInner", + "UnionMultiAdjacency", + "FilterAtlas", + "FilterAdjacency", + "FilterMultiInner", + "FilterMultiAdjacency", +] class AtlasView(Mapping): @@ -22,13 +29,14 @@ class AtlasView(Mapping): AdjacencyView - View into dict-of-dict-of-dict MultiAdjacencyView - View into dict-of-dict-of-dict-of-dict """ - __slots__ = ('_atlas',) + + __slots__ = ("_atlas",) def __getstate__(self): - return {'_atlas': self._atlas} + return {"_atlas": self._atlas} def __setstate__(self, state): - self._atlas = state['_atlas'] + self._atlas = state["_atlas"] def __init__(self, d): self._atlas = d @@ -49,7 +57,7 @@ class AtlasView(Mapping): return str(self._atlas) # {nbr: self[nbr] for nbr in self}) def __repr__(self): - return f'{self.__class__.__name__}({self._atlas!r})' + return f"{self.__class__.__name__}({self._atlas!r})" class AdjacencyView(AtlasView): @@ -64,7 +72,8 @@ class AdjacencyView(AtlasView): AtlasView - View into dict-of-dict MultiAdjacencyView - View into dict-of-dict-of-dict-of-dict """ - __slots__ = () # Still uses AtlasView slots names _atlas + + __slots__ = () # Still uses AtlasView slots names _atlas def __getitem__(self, name): return AtlasView(self._atlas[name]) @@ -85,7 +94,8 @@ class MultiAdjacencyView(AdjacencyView): AtlasView - View into dict-of-dict AdjacencyView - View into dict-of-dict-of-dict """ - __slots__ = () # Still uses AtlasView slots names _atlas + + __slots__ = () # Still uses AtlasView slots names _atlas def __getitem__(self, name): return AdjacencyView(self._atlas[name]) @@ -107,14 +117,15 @@ class UnionAtlas(Mapping): UnionAdjacency - View into dict-of-dict-of-dict UnionMultiAdjacency - View into dict-of-dict-of-dict-of-dict """ - __slots__ = ('_succ', '_pred') + + __slots__ = ("_succ", "_pred") def __getstate__(self): - return {'_succ': self._succ, '_pred': self._pred} + return {"_succ": self._succ, "_pred": self._pred} def __setstate__(self, state): - self._succ = state['_succ'] - self._pred = state['_pred'] + self._succ = state["_succ"] + self._pred = state["_pred"] def __init__(self, succ, pred): self._succ = succ @@ -145,7 +156,7 @@ class UnionAtlas(Mapping): return str({nbr: self[nbr] for nbr in self}) def __repr__(self): - return f'{self.__class__.__name__}({self._succ!r}, {self._pred!r})' + return f"{self.__class__.__name__}({self._succ!r}, {self._pred!r})" class UnionAdjacency(Mapping): @@ -165,18 +176,19 @@ class UnionAdjacency(Mapping): UnionAtlas - View into dict-of-dict UnionMultiAdjacency - View into dict-of-dict-of-dict-of-dict """ - __slots__ = ('_succ', '_pred') + + __slots__ = ("_succ", "_pred") def __getstate__(self): - return {'_succ': self._succ, '_pred': self._pred} + return {"_succ": self._succ, "_pred": self._pred} def __setstate__(self, state): - self._succ = state['_succ'] - self._pred = state['_pred'] + self._succ = state["_succ"] + self._pred = state["_pred"] def __init__(self, succ, pred): # keys must be the same for two input dicts - assert(len(set(succ.keys()) ^ set(pred.keys())) == 0) + assert len(set(succ.keys()) ^ set(pred.keys())) == 0 self._succ = succ self._pred = pred @@ -196,7 +208,7 @@ class UnionAdjacency(Mapping): return str({nbr: self[nbr] for nbr in self}) def __repr__(self): - return f'{self.__class__.__name__}({self._succ!r}, {self._pred!r})' + return f"{self.__class__.__name__}({self._succ!r}, {self._pred!r})" class UnionMultiInner(UnionAtlas): @@ -213,7 +225,8 @@ class UnionMultiInner(UnionAtlas): UnionAdjacency - View into dict-of-dict-of-dict UnionMultiAdjacency - View into dict-of-dict-of-dict-of-dict """ - __slots__ = () # Still uses UnionAtlas slots names _succ, _pred + + __slots__ = () # Still uses UnionAtlas slots names _succ, _pred def __getitem__(self, node): in_succ = node in self._succ @@ -241,7 +254,8 @@ class UnionMultiAdjacency(UnionAdjacency): UnionAtlas - View into dict-of-dict UnionMultiInner - View into dict-of-dict-of-dict """ - __slots__ = () # Still uses UnionAdjacency slots names _succ, _pred + + __slots__ = () # Still uses UnionAdjacency slots names _succ, _pred def __getitem__(self, node): return UnionMultiInner(self._succ[node], self._pred[node]) @@ -275,10 +289,8 @@ class FilterAtlas(Mapping): # nodedict, nbrdict, keydict except AttributeError: node_ok_shorter = False if node_ok_shorter: - return {u: self._atlas[u] for u in self.NODE_OK.nodes - if u in self._atlas} - return {u: d for u, d in self._atlas.items() - if self.NODE_OK(u)} + return {u: self._atlas[u] for u in self.NODE_OK.nodes if u in self._atlas} + return {u: d for u, d in self._atlas.items() if self.NODE_OK(u)} def __str__(self): return str({nbr: self[nbr] for nbr in self}) @@ -287,7 +299,7 @@ class FilterAtlas(Mapping): # nodedict, nbrdict, keydict return f"{self.__class__.__name__}({self._atlas!r}, {self.NODE_OK!r})" -class FilterAdjacency(Mapping): # edgedict +class FilterAdjacency(Mapping): # edgedict def __init__(self, d, NODE_OK, EDGE_OK): self._atlas = d self.NODE_OK = NODE_OK @@ -307,8 +319,10 @@ class FilterAdjacency(Mapping): # edgedict def __getitem__(self, node): if node in self._atlas and self.NODE_OK(node): + def new_node_ok(nbr): return self.NODE_OK(nbr) and self.EDGE_OK(node, nbr) + return FilterAtlas(self._atlas[node], new_node_ok) raise KeyError(f"Key {node} not found") @@ -318,13 +332,21 @@ class FilterAdjacency(Mapping): # edgedict except AttributeError: node_ok_shorter = False if node_ok_shorter: - return {u: {v: d for v, d in self._atlas[u].items() - if self.NODE_OK(v) if self.EDGE_OK(u, v)} - for u in self.NODE_OK.nodes if u in self._atlas} - return {u: {v: d for v, d in nbrs.items() if self.NODE_OK(v) - if self.EDGE_OK(u, v)} - for u, nbrs in self._atlas.items() - if self.NODE_OK(u)} + return { + u: { + v: d + for v, d in self._atlas[u].items() + if self.NODE_OK(v) + if self.EDGE_OK(u, v) + } + for u in self.NODE_OK.nodes + if u in self._atlas + } + return { + u: {v: d for v, d in nbrs.items() if self.NODE_OK(v) if self.EDGE_OK(u, v)} + for u, nbrs in self._atlas.items() + if self.NODE_OK(u) + } def __str__(self): return str({nbr: self[nbr] for nbr in self}) @@ -355,8 +377,10 @@ class FilterMultiInner(FilterAdjacency): # muliedge_seconddict def __getitem__(self, nbr): if nbr in self._atlas and self.NODE_OK(nbr): + def new_node_ok(key): return self.EDGE_OK(nbr, key) + return FilterAtlas(self._atlas[nbr], new_node_ok) raise KeyError(f"Key {nbr} not found") @@ -366,18 +390,25 @@ class FilterMultiInner(FilterAdjacency): # muliedge_seconddict except AttributeError: node_ok_shorter = False if node_ok_shorter: - return {v: {k: d for k, d in self._atlas[v].items() - if self.EDGE_OK(v, k)} - for v in self.NODE_OK.nodes if v in self._atlas} - return {v: {k: d for k, d in nbrs.items() if self.EDGE_OK(v, k)} - for v, nbrs in self._atlas.items() if self.NODE_OK(v)} + return { + v: {k: d for k, d in self._atlas[v].items() if self.EDGE_OK(v, k)} + for v in self.NODE_OK.nodes + if v in self._atlas + } + return { + v: {k: d for k, d in nbrs.items() if self.EDGE_OK(v, k)} + for v, nbrs in self._atlas.items() + if self.NODE_OK(v) + } class FilterMultiAdjacency(FilterAdjacency): # multiedgedict def __getitem__(self, node): if node in self._atlas and self.NODE_OK(node): + def edge_ok(nbr, key): return self.NODE_OK(nbr) and self.EDGE_OK(node, nbr, key) + return FilterMultiInner(self._atlas[node], self.NODE_OK, edge_ok) raise KeyError(f"Key {node} not found") @@ -388,11 +419,21 @@ class FilterMultiAdjacency(FilterAdjacency): # multiedgedict node_ok_shorter = False if node_ok_shorter: my_nodes = self.NODE_OK.nodes - return {u: {v: {k: d for k, d in kd.items() - if self.EDGE_OK(u, v, k)} - for v, kd in self._atlas[u].items() if v in my_nodes} - for u in my_nodes if u in self._atlas} - return {u: {v: {k: d for k, d in kd.items() - if self.EDGE_OK(u, v, k)} - for v, kd in nbrs.items() if self.NODE_OK(v)} - for u, nbrs in self._atlas.items() if self.NODE_OK(u)} + return { + u: { + v: {k: d for k, d in kd.items() if self.EDGE_OK(u, v, k)} + for v, kd in self._atlas[u].items() + if v in my_nodes + } + for u in my_nodes + if u in self._atlas + } + return { + u: { + v: {k: d for k, d in kd.items() if self.EDGE_OK(u, v, k)} + for v, kd in nbrs.items() + if self.NODE_OK(v) + } + for u, nbrs in self._atlas.items() + if self.NODE_OK(u) + } diff --git a/networkx/classes/digraph.py b/networkx/classes/digraph.py index f8b3c61d..10ab4e0f 100644 --- a/networkx/classes/digraph.py +++ b/networkx/classes/digraph.py @@ -4,8 +4,13 @@ from copy import deepcopy import networkx as nx from networkx.classes.graph import Graph from networkx.classes.coreviews import AdjacencyView -from networkx.classes.reportviews import OutEdgeView, InEdgeView, \ - DiDegreeView, InDegreeView, OutDegreeView +from networkx.classes.reportviews import ( + OutEdgeView, + InEdgeView, + DiDegreeView, + InDegreeView, + OutDegreeView, +) from networkx.exception import NetworkXError import networkx.convert as convert @@ -521,11 +526,11 @@ class DiGraph(Graph): except KeyError as e: # NetworkXError if n not in self raise NetworkXError(f"The node {n} is not in the digraph.") from e for u in nbrs: - del self._pred[u][n] # remove all edges n-u in digraph - del self._succ[n] # remove node from succ + del self._pred[u][n] # remove all edges n-u in digraph + del self._succ[n] # remove node from succ for u in self._pred[n]: - del self._succ[u][n] # remove all edges n-u in digraph - del self._pred[n] # remove node from pred + del self._succ[u][n] # remove all edges n-u in digraph + del self._pred[n] # remove node from pred def remove_nodes_from(self, nodes): """Remove multiple nodes. @@ -556,11 +561,11 @@ class DiGraph(Graph): succs = self._succ[n] del self._node[n] for u in succs: - del self._pred[u][n] # remove all edges n-u in digraph - del self._succ[n] # now remove node + del self._pred[u][n] # remove all edges n-u in digraph + del self._succ[n] # now remove node for u in self._pred[n]: - del self._succ[u][n] # remove all edges n-u in digraph - del self._pred[n] # now remove node + del self._succ[u][n] # remove all edges n-u in digraph + del self._pred[n] # now remove node except KeyError: pass # silent failure on remove @@ -761,14 +766,14 @@ class DiGraph(Graph): This is true if graph has the edge u->v. """ - return (u in self._succ and v in self._succ[u]) + return u in self._succ and v in self._succ[u] def has_predecessor(self, u, v): """Returns True if node u has predecessor v. This is true if graph has the edge u<-v. """ - return (u in self._pred and v in self._pred[u]) + return u in self._pred and v in self._pred[u] def successors(self, n): """Returns an iterator over successor nodes of n. @@ -1176,14 +1181,18 @@ class DiGraph(Graph): G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) if reciprocal is True: - G.add_edges_from((u, v, deepcopy(d)) - for u, nbrs in self._adj.items() - for v, d in nbrs.items() - if v in self._pred[u]) + G.add_edges_from( + (u, v, deepcopy(d)) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + if v in self._pred[u] + ) else: - G.add_edges_from((u, v, deepcopy(d)) - for u, nbrs in self._adj.items() - for v, d in nbrs.items()) + G.add_edges_from( + (u, v, deepcopy(d)) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + ) return G def reverse(self, copy=True): @@ -1203,7 +1212,6 @@ class DiGraph(Graph): H = self.__class__() H.graph.update(deepcopy(self.graph)) H.add_nodes_from((n, deepcopy(d)) for n, d in self.nodes.items()) - H.add_edges_from((v, u, deepcopy(d)) for u, v, d - in self.edges(data=True)) + H.add_edges_from((v, u, deepcopy(d)) for u, v, d in self.edges(data=True)) return H return nx.graphviews.reverse_view(self) diff --git a/networkx/classes/filters.py b/networkx/classes/filters.py index 067da1d1..aefcbdf3 100644 --- a/networkx/classes/filters.py +++ b/networkx/classes/filters.py @@ -2,13 +2,19 @@ These filters return the function used when creating `SubGraph`. """ -__all__ = ['no_filter', 'hide_nodes', - 'hide_edges', 'hide_multiedges', - 'hide_diedges', 'hide_multidiedges', - 'show_nodes', - 'show_edges', 'show_multiedges', - 'show_diedges', 'show_multidiedges', - ] +__all__ = [ + "no_filter", + "hide_nodes", + "hide_edges", + "hide_multiedges", + "hide_diedges", + "hide_multidiedges", + "show_nodes", + "show_edges", + "show_multiedges", + "show_diedges", + "show_multidiedges", +] def no_filter(*items): diff --git a/networkx/classes/function.py b/networkx/classes/function.py index 8e199e9a..a24b3926 100644 --- a/networkx/classes/function.py +++ b/networkx/classes/function.py @@ -9,20 +9,46 @@ from networkx.utils import pairwise, not_implemented_for from networkx.classes.graphviews import subgraph_view, reverse_view -__all__ = ['nodes', 'edges', 'degree', 'degree_histogram', 'neighbors', - 'number_of_nodes', 'number_of_edges', 'density', - 'is_directed', 'info', 'freeze', 'is_frozen', - 'subgraph', 'subgraph_view', 'induced_subgraph', 'reverse_view', - 'edge_subgraph', 'restricted_view', - 'to_directed', 'to_undirected', - 'add_star', 'add_path', 'add_cycle', - 'create_empty_copy', 'set_node_attributes', - 'get_node_attributes', 'set_edge_attributes', - 'get_edge_attributes', 'all_neighbors', 'non_neighbors', - 'non_edges', 'common_neighbors', 'is_weighted', - 'is_negatively_weighted', 'is_empty', - 'selfloop_edges', 'nodes_with_selfloops', 'number_of_selfloops', - ] +__all__ = [ + "nodes", + "edges", + "degree", + "degree_histogram", + "neighbors", + "number_of_nodes", + "number_of_edges", + "density", + "is_directed", + "info", + "freeze", + "is_frozen", + "subgraph", + "subgraph_view", + "induced_subgraph", + "reverse_view", + "edge_subgraph", + "restricted_view", + "to_directed", + "to_undirected", + "add_star", + "add_path", + "add_cycle", + "create_empty_copy", + "set_node_attributes", + "get_node_attributes", + "set_edge_attributes", + "get_edge_attributes", + "all_neighbors", + "non_neighbors", + "non_edges", + "common_neighbors", + "is_weighted", + "is_negatively_weighted", + "is_empty", + "selfloop_edges", + "nodes_with_selfloops", + "number_of_selfloops", +] def nodes(G): @@ -293,7 +319,9 @@ def add_cycle(G_to_add_to, nodes_for_cycle, **attr): except StopIteration: return G_to_add_to.add_node(first_node) - G_to_add_to.add_edges_from(pairwise(chain((first_node,), nlist), cyclic=True), **attr) + G_to_add_to.add_edges_from( + pairwise(chain((first_node,), nlist), cyclic=True), **attr + ) def subgraph(G, nbunch): @@ -548,7 +576,7 @@ def info(G, n=None): If n is not in the graph G """ - info = '' # append this all to a string + info = "" # append this all to a string if n is None: info += f"Name: {G.name}\n" type_name = [type(G).__name__] @@ -571,7 +599,7 @@ def info(G, n=None): info += f"Node {n} has the following properties:\n" info += f"Degree: {G.degree(n)}\n" info += "Neighbors: " - info += ' '.join(str(nbr) for nbr in G.neighbors(n)) + info += " ".join(str(nbr) for nbr in G.neighbors(n)) return info @@ -892,7 +920,7 @@ def non_edges(graph): yield (u, v) -@not_implemented_for('directed') +@not_implemented_for("directed") def common_neighbors(G, u, v): """Returns the common neighbors of two nodes in a graph. @@ -921,16 +949,16 @@ def common_neighbors(G, u, v): [2, 3, 4] """ if u not in G: - raise nx.NetworkXError('u is not in the graph.') + raise nx.NetworkXError("u is not in the graph.") if v not in G: - raise nx.NetworkXError('v is not in the graph.') + raise nx.NetworkXError("v is not in the graph.") # Return a generator explicitly instead of yielding so that the above # checks are executed eagerly. return (w for w in G[u] if w in G[v] and w not in (u, v)) -def is_weighted(G, edge=None, weight='weight'): +def is_weighted(G, edge=None, weight="weight"): """Returns True if `G` has weighted edges. Parameters @@ -972,7 +1000,7 @@ def is_weighted(G, edge=None, weight='weight'): if edge is not None: data = G.get_edge_data(*edge) if data is None: - msg = f'Edge {edge!r} does not exist.' + msg = f"Edge {edge!r} does not exist." raise nx.NetworkXError(msg) return weight in data @@ -983,7 +1011,7 @@ def is_weighted(G, edge=None, weight='weight'): return all(weight in data for u, v, data in G.edges(data=True)) -def is_negatively_weighted(G, edge=None, weight='weight'): +def is_negatively_weighted(G, edge=None, weight="weight"): """Returns True if `G` has negatively weighted edges. Parameters @@ -1029,12 +1057,11 @@ def is_negatively_weighted(G, edge=None, weight='weight'): if edge is not None: data = G.get_edge_data(*edge) if data is None: - msg = f'Edge {edge!r} does not exist.' + msg = f"Edge {edge!r} does not exist." raise nx.NetworkXError(msg) return weight in data and data[weight] < 0 - return any(weight in data and data[weight] < 0 - for u, v, data in G.edges(data=True)) + return any(weight in data and data[weight] < 0 for u, v, data in G.edges(data=True)) def is_empty(G): @@ -1130,38 +1157,56 @@ def selfloop_edges(G, data=False, keys=False, default=None): if data is True: if G.is_multigraph(): if keys is True: - return ((n, n, k, d) - for n, nbrs in G.adj.items() - if n in nbrs for k, d in nbrs[n].items()) + return ( + (n, n, k, d) + for n, nbrs in G.adj.items() + if n in nbrs + for k, d in nbrs[n].items() + ) else: - return ((n, n, d) - for n, nbrs in G.adj.items() - if n in nbrs for d in nbrs[n].values()) + return ( + (n, n, d) + for n, nbrs in G.adj.items() + if n in nbrs + for d in nbrs[n].values() + ) else: return ((n, n, nbrs[n]) for n, nbrs in G.adj.items() if n in nbrs) elif data is not False: if G.is_multigraph(): if keys is True: - return ((n, n, k, d.get(data, default)) - for n, nbrs in G.adj.items() - if n in nbrs for k, d in nbrs[n].items()) + return ( + (n, n, k, d.get(data, default)) + for n, nbrs in G.adj.items() + if n in nbrs + for k, d in nbrs[n].items() + ) else: - return ((n, n, d.get(data, default)) - for n, nbrs in G.adj.items() - if n in nbrs for d in nbrs[n].values()) + return ( + (n, n, d.get(data, default)) + for n, nbrs in G.adj.items() + if n in nbrs + for d in nbrs[n].values() + ) else: - return ((n, n, nbrs[n].get(data, default)) - for n, nbrs in G.adj.items() if n in nbrs) + return ( + (n, n, nbrs[n].get(data, default)) + for n, nbrs in G.adj.items() + if n in nbrs + ) else: if G.is_multigraph(): if keys is True: - return ((n, n, k) - for n, nbrs in G.adj.items() - if n in nbrs for k in nbrs[n]) + return ( + (n, n, k) for n, nbrs in G.adj.items() if n in nbrs for k in nbrs[n] + ) else: - return ((n, n) - for n, nbrs in G.adj.items() - if n in nbrs for d in nbrs[n].values()) + return ( + (n, n) + for n, nbrs in G.adj.items() + if n in nbrs + for d in nbrs[n].values() + ) else: return ((n, n) for n, nbrs in G.adj.items() if n in nbrs) diff --git a/networkx/classes/graph.py b/networkx/classes/graph.py index 8731525f..a25eafb8 100644 --- a/networkx/classes/graph.py +++ b/networkx/classes/graph.py @@ -261,6 +261,7 @@ class Graph: creating graph subclasses by overwriting the base class `dict` with a dictionary-like object. """ + node_dict_factory = dict node_attr_dict_factory = dict adjlist_outer_dict_factory = dict @@ -324,7 +325,7 @@ class Graph: self.adjlist_inner_dict_factory = self.adjlist_inner_dict_factory self.edge_attr_dict_factory = self.edge_attr_dict_factory - self.graph = self.graph_attr_dict_factory() # dictionary for graph attributes + self.graph = self.graph_attr_dict_factory() # dictionary for graph attributes self._node = self.node_dict_factory() # empty node attribute dict self._adj = self.adjlist_outer_dict_factory() # empty adjacency dict # attempt to load graph with data @@ -360,11 +361,11 @@ class Graph: keyed by the string `"name"`. as well as an attribute (technically a property) `G.name`. This is entirely user controlled. """ - return self.graph.get('name', '') + return self.graph.get("name", "") @name.setter def name(self, s): - self.graph['name'] = s + self.graph["name"] = s def __str__(self): """Returns the graph name. @@ -612,8 +613,8 @@ class Graph: except KeyError as e: # NetworkXError if n not in self raise NetworkXError(f"The node {n} is not in the graph.") from e for u in nbrs: - del adj[u][n] # remove all edges n-u in graph - del adj[n] # now remove node + del adj[u][n] # remove all edges n-u in graph + del adj[n] # now remove node def remove_nodes_from(self, nodes): """Remove multiple nodes. @@ -644,7 +645,7 @@ class Graph: for n in nodes: try: del self._node[n] - for u in list(adj[n]): # list handles self-loops + for u in list(adj[n]): # list handles self-loops del adj[u][n] # (allows mutation of dict in loop) del adj[n] except KeyError: @@ -745,7 +746,7 @@ class Graph: # Lazy View creation: overload the (class) property on the instance # Then future G.nodes use the existing View # setattr doesn't work because attribute already exists - self.__dict__['nodes'] = nodes + self.__dict__["nodes"] = nodes return nodes def number_of_nodes(self): @@ -937,7 +938,7 @@ class Graph: self._adj[u][v] = datadict self._adj[v][u] = datadict - def add_weighted_edges_from(self, ebunch_to_add, weight='weight', **attr): + def add_weighted_edges_from(self, ebunch_to_add, weight="weight", **attr): """Add weighted edges in `ebunch_to_add` with specified weight attr Parameters @@ -967,8 +968,7 @@ class Graph: >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_weighted_edges_from([(0, 1, 3.0), (1, 2, 7.5)]) """ - self.add_edges_from(((u, v, {weight: d}) for u, v, d in ebunch_to_add), - **attr) + self.add_edges_from(((u, v, {weight: d}) for u, v, d in ebunch_to_add), **attr) def remove_edge(self, u, v): """Remove the edge between u and v. @@ -1511,9 +1511,11 @@ class Graph: G = self.__class__() G.graph.update(self.graph) G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) - G.add_edges_from((u, v, datadict.copy()) - for u, nbrs in self._adj.items() - for v, datadict in nbrs.items()) + G.add_edges_from( + (u, v, datadict.copy()) + for u, nbrs in self._adj.items() + for v, datadict in nbrs.items() + ) return G def to_directed(self, as_view=False): @@ -1565,9 +1567,11 @@ class Graph: G = graph_class() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - G.add_edges_from((u, v, deepcopy(data)) - for u, nbrs in self._adj.items() - for v, data in nbrs.items()) + G.add_edges_from( + (u, v, deepcopy(data)) + for u, nbrs in self._adj.items() + for v, data in nbrs.items() + ) return G def to_undirected(self, as_view=False): @@ -1620,9 +1624,11 @@ class Graph: G = graph_class() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - G.add_edges_from((u, v, deepcopy(d)) - for u, nbrs in self._adj.items() - for v, d in nbrs.items()) + G.add_edges_from( + (u, v, deepcopy(d)) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + ) return G def subgraph(self, nodes): @@ -1685,7 +1691,7 @@ class Graph: induced_nodes = nx.filters.show_nodes(self.nbunch_iter(nodes)) # if already a subgraph, don't make a chain subgraph = nx.graphviews.subgraph_view - if hasattr(self, '_NODE_OK'): + if hasattr(self, "_NODE_OK"): return subgraph(self._graph, induced_nodes, self._EDGE_OK) return subgraph(self, induced_nodes) @@ -1864,11 +1870,12 @@ class Graph: or None, a :exc:`NetworkXError` is raised. Also, if any object in nbunch is not hashable, a :exc:`NetworkXError` is raised. """ - if nbunch is None: # include all nodes via iterator + if nbunch is None: # include all nodes via iterator bunch = iter(self._adj) elif nbunch in self: # if nbunch is a single node bunch = iter([nbunch]) - else: # if nbunch is a sequence of nodes + else: # if nbunch is a sequence of nodes + def bunch_iter(nlist, adj): try: for n in nlist: @@ -1877,14 +1884,15 @@ class Graph: except TypeError as e: message = e.args[0] # capture error for non-sequence/iterator nbunch. - if 'iter' in message: + if "iter" in message: msg = "nbunch is not a node or a sequence of nodes." raise NetworkXError(msg) from e # capture error for unhashable node. - elif 'hashable' in message: + elif "hashable" in message: msg = f"Node {n} in sequence nbunch is not a valid node." raise NetworkXError(msg) from e else: raise + bunch = bunch_iter(nbunch, self._adj) return bunch diff --git a/networkx/classes/graphviews.py b/networkx/classes/graphviews.py index b0633404..6d565a69 100644 --- a/networkx/classes/graphviews.py +++ b/networkx/classes/graphviews.py @@ -23,15 +23,20 @@ the chain is tricky and much harder with restricted_views than with induced subgraphs. Often it is easiest to use .copy() to avoid chains. """ -from networkx.classes.coreviews import UnionAdjacency, UnionMultiAdjacency, \ - FilterAtlas, FilterAdjacency, FilterMultiAdjacency +from networkx.classes.coreviews import ( + UnionAdjacency, + UnionMultiAdjacency, + FilterAtlas, + FilterAdjacency, + FilterMultiAdjacency, +) from networkx.classes.filters import no_filter from networkx.exception import NetworkXError from networkx.utils import not_implemented_for import networkx as nx -__all__ = ['generic_graph_view', 'subgraph_view', 'reverse_view'] +__all__ = ["generic_graph_view", "subgraph_view", "reverse_view"] def generic_graph_view(G, create_using=None): @@ -159,11 +164,15 @@ def subgraph_view(G, filter_node=no_filter, filter_edge=no_filter): if G.is_multigraph(): Adj = FilterMultiAdjacency - def reverse_edge(u, v, k): return filter_edge(v, u, k) + def reverse_edge(u, v, k): + return filter_edge(v, u, k) + else: Adj = FilterAdjacency - def reverse_edge(u, v): return filter_edge(v, u) + def reverse_edge(u, v): + return filter_edge(v, u) + if G.is_directed(): newG._succ = Adj(G._succ, filter_node, filter_edge) newG._pred = Adj(G._pred, filter_node, reverse_edge) @@ -173,7 +182,7 @@ def subgraph_view(G, filter_node=no_filter, filter_edge=no_filter): return newG -@not_implemented_for('undirected') +@not_implemented_for("undirected") def reverse_view(G): """ View of `G` with edge directions reversed diff --git a/networkx/classes/multidigraph.py b/networkx/classes/multidigraph.py index bff422d4..59a3f44d 100644 --- a/networkx/classes/multidigraph.py +++ b/networkx/classes/multidigraph.py @@ -5,8 +5,13 @@ import networkx as nx from networkx.classes.digraph import DiGraph from networkx.classes.multigraph import MultiGraph from networkx.classes.coreviews import MultiAdjacencyView -from networkx.classes.reportviews import OutMultiEdgeView, InMultiEdgeView, \ - DiMultiDegreeView, OutMultiDegreeView, InMultiDegreeView +from networkx.classes.reportviews import ( + OutMultiEdgeView, + InMultiEdgeView, + DiMultiDegreeView, + OutMultiDegreeView, + InMultiDegreeView, +) from networkx.exception import NetworkXError @@ -254,6 +259,7 @@ class MultiDiGraph(MultiGraph, DiGraph): creating graph subclasses by overwriting the base class `dict` with a dictionary-like object. """ + # node_dict_factory = dict # already assigned in Graph # adjlist_outer_dict_factory = dict # adjlist_inner_dict_factory = dict @@ -824,16 +830,20 @@ class MultiDiGraph(MultiGraph, DiGraph): G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) if reciprocal is True: - G.add_edges_from((u, v, key, deepcopy(data)) - for u, nbrs in self._adj.items() - for v, keydict in nbrs.items() - for key, data in keydict.items() - if v in self._pred[u] and key in self._pred[u][v]) + G.add_edges_from( + (u, v, key, deepcopy(data)) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, data in keydict.items() + if v in self._pred[u] and key in self._pred[u][v] + ) else: - G.add_edges_from((u, v, key, deepcopy(data)) - for u, nbrs in self._adj.items() - for v, keydict in nbrs.items() - for key, data in keydict.items()) + G.add_edges_from( + (u, v, key, deepcopy(data)) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, data in keydict.items() + ) return G def reverse(self, copy=True): @@ -853,7 +863,9 @@ class MultiDiGraph(MultiGraph, DiGraph): H = self.__class__() H.graph.update(deepcopy(self.graph)) H.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - H.add_edges_from((v, u, k, deepcopy(d)) for u, v, k, d - in self.edges(keys=True, data=True)) + H.add_edges_from( + (v, u, k, deepcopy(d)) + for u, v, k, d in self.edges(keys=True, data=True) + ) return H return nx.graphviews.reverse_view(self) diff --git a/networkx/classes/multigraph.py b/networkx/classes/multigraph.py index ebf70f44..d3685b14 100644 --- a/networkx/classes/multigraph.py +++ b/networkx/classes/multigraph.py @@ -252,6 +252,7 @@ class MultiGraph(Graph): creating graph subclasses by overwriting the base class `dict` with a dictionary-like object. """ + # node_dict_factory = dict # already assigned in Graph # adjlist_outer_dict_factory = dict # adjlist_inner_dict_factory = dict @@ -956,10 +957,12 @@ class MultiGraph(Graph): G = self.__class__() G.graph.update(self.graph) G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) - G.add_edges_from((u, v, key, datadict.copy()) - for u, nbrs in self._adj.items() - for v, keydict in nbrs.items() - for key, datadict in keydict.items()) + G.add_edges_from( + (u, v, key, datadict.copy()) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, datadict in keydict.items() + ) return G def to_directed(self, as_view=False): @@ -1011,10 +1014,12 @@ class MultiGraph(Graph): G = graph_class() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - G.add_edges_from((u, v, key, deepcopy(datadict)) - for u, nbrs in self.adj.items() - for v, keydict in nbrs.items() - for key, datadict in keydict.items()) + G.add_edges_from( + (u, v, key, deepcopy(datadict)) + for u, nbrs in self.adj.items() + for v, keydict in nbrs.items() + for key, datadict in keydict.items() + ) return G def to_undirected(self, as_view=False): @@ -1062,10 +1067,12 @@ class MultiGraph(Graph): G = graph_class() G.graph.update(deepcopy(self.graph)) G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) - G.add_edges_from((u, v, key, deepcopy(datadict)) - for u, nbrs in self._adj.items() - for v, keydict in nbrs.items() - for key, datadict in keydict.items()) + G.add_edges_from( + (u, v, key, deepcopy(datadict)) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, datadict in keydict.items() + ) return G def number_of_edges(self, u=None, v=None): diff --git a/networkx/classes/ordered.py b/networkx/classes/ordered.py index ba8e6d78..2cb03c71 100644 --- a/networkx/classes/ordered.py +++ b/networkx/classes/ordered.py @@ -36,16 +36,14 @@ from .multidigraph import MultiDiGraph __all__ = [] -__all__.extend([ - 'OrderedGraph', - 'OrderedDiGraph', - 'OrderedMultiGraph', - 'OrderedMultiDiGraph', -]) +__all__.extend( + ["OrderedGraph", "OrderedDiGraph", "OrderedMultiGraph", "OrderedMultiDiGraph",] +) class OrderedGraph(Graph): """Consistently ordered variant of :class:`~networkx.Graph`.""" + node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict @@ -54,6 +52,7 @@ class OrderedGraph(Graph): class OrderedDiGraph(DiGraph): """Consistently ordered variant of :class:`~networkx.DiGraph`.""" + node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict @@ -62,6 +61,7 @@ class OrderedDiGraph(DiGraph): class OrderedMultiGraph(MultiGraph): """Consistently ordered variant of :class:`~networkx.MultiGraph`.""" + node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict @@ -71,6 +71,7 @@ class OrderedMultiGraph(MultiGraph): class OrderedMultiDiGraph(MultiDiGraph): """Consistently ordered variant of :class:`~networkx.MultiDiGraph`.""" + node_dict_factory = OrderedDict adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict diff --git a/networkx/classes/reportviews.py b/networkx/classes/reportviews.py index 73f1eeb1..2b867f29 100644 --- a/networkx/classes/reportviews.py +++ b/networkx/classes/reportviews.py @@ -84,14 +84,30 @@ EdgeDataView """ from collections.abc import Mapping, Set -__all__ = ['NodeView', 'NodeDataView', - 'EdgeView', 'OutEdgeView', 'InEdgeView', - 'EdgeDataView', 'OutEdgeDataView', 'InEdgeDataView', - 'MultiEdgeView', 'OutMultiEdgeView', 'InMultiEdgeView', - 'MultiEdgeDataView', 'OutMultiEdgeDataView', 'InMultiEdgeDataView', - 'DegreeView', 'DiDegreeView', 'InDegreeView', 'OutDegreeView', - 'MultiDegreeView', 'DiMultiDegreeView', - 'InMultiDegreeView', 'OutMultiDegreeView'] +__all__ = [ + "NodeView", + "NodeDataView", + "EdgeView", + "OutEdgeView", + "InEdgeView", + "EdgeDataView", + "OutEdgeDataView", + "InEdgeDataView", + "MultiEdgeView", + "OutMultiEdgeView", + "InMultiEdgeView", + "MultiEdgeDataView", + "OutMultiEdgeDataView", + "InMultiEdgeDataView", + "DegreeView", + "DiDegreeView", + "InDegreeView", + "OutDegreeView", + "MultiDegreeView", + "DiMultiDegreeView", + "InMultiDegreeView", + "OutMultiDegreeView", +] # NodeViews @@ -145,13 +161,14 @@ class NodeView(Mapping, Set): >>> NVdata[2] == NV[2] # NVdata gets 'color', NV gets datadict False """ - __slots__ = '_nodes', + + __slots__ = ("_nodes",) def __getstate__(self): - return {'_nodes': self._nodes} + return {"_nodes": self._nodes} def __setstate__(self, state): - self._nodes = state['_nodes'] + self._nodes = state["_nodes"] def __init__(self, graph): self._nodes = graph._node @@ -209,17 +226,16 @@ class NodeDataView(Set): data : bool or string (default=False) default : object (default=None) """ - __slots__ = ('_nodes', '_data', '_default') + + __slots__ = ("_nodes", "_data", "_default") def __getstate__(self): - return {'_nodes': self._nodes, - '_data': self._data, - '_default': self._default} + return {"_nodes": self._nodes, "_data": self._data, "_default": self._default} def __setstate__(self, state): - self._nodes = state['_nodes'] - self._data = state['_data'] - self._default = state['_default'] + self._nodes = state["_nodes"] + self._data = state["_data"] + self._default = state["_default"] def __init__(self, nodedict, data=False, default=None): self._nodes = nodedict @@ -245,8 +261,10 @@ class NodeDataView(Set): return iter(self._nodes) if data is True: return iter(self._nodes.items()) - return ((n, dd[data] if data in dd else self._default) - for n, dd in self._nodes.items()) + return ( + (n, dd[data] if data in dd else self._default) + for n, dd in self._nodes.items() + ) def __contains__(self, n): try: @@ -324,8 +342,7 @@ class DiDegreeView: self._graph = G self._succ = G._succ if hasattr(G, "_succ") else G._adj self._pred = G._pred if hasattr(G, "_pred") else G._adj - self._nodes = self._succ if nbunch is None \ - else list(G.nbunch_iter(nbunch)) + self._nodes = self._succ if nbunch is None else list(G.nbunch_iter(nbunch)) self._weight = weight def __call__(self, nbunch=None, weight=None): @@ -348,8 +365,9 @@ class DiDegreeView: preds = self._pred[n] if weight is None: return len(succs) + len(preds) - return sum(dd.get(weight, 1) for dd in succs.values()) + \ - sum(dd.get(weight, 1) for dd in preds.values()) + return sum(dd.get(weight, 1) for dd in succs.values()) + sum( + dd.get(weight, 1) for dd in preds.values() + ) def __iter__(self): weight = self._weight @@ -362,8 +380,9 @@ class DiDegreeView: for n in self._nodes: succs = self._succ[n] preds = self._pred[n] - deg = sum(dd.get(weight, 1) for dd in succs.values()) \ - + sum(dd.get(weight, 1) for dd in preds.values()) + deg = sum(dd.get(weight, 1) for dd in succs.values()) + sum( + dd.get(weight, 1) for dd in preds.values() + ) yield (n, deg) def __len__(self): @@ -425,8 +444,9 @@ class DegreeView(DiDegreeView): nbrs = self._succ[n] if weight is None: return len(nbrs) + (n in nbrs) - return sum(dd.get(weight, 1) for dd in nbrs.values()) + \ - (n in nbrs and nbrs[n].get(weight, 1)) + return sum(dd.get(weight, 1) for dd in nbrs.values()) + ( + n in nbrs and nbrs[n].get(weight, 1) + ) def __iter__(self): weight = self._weight @@ -437,8 +457,9 @@ class DegreeView(DiDegreeView): else: for n in self._nodes: nbrs = self._succ[n] - deg = sum(dd.get(weight, 1) for dd in nbrs.values()) + \ - (n in nbrs and nbrs[n].get(weight, 1)) + deg = sum(dd.get(weight, 1) for dd in nbrs.values()) + ( + n in nbrs and nbrs[n].get(weight, 1) + ) yield (n, deg) @@ -495,11 +516,13 @@ class MultiDegreeView(DiDegreeView): weight = self._weight nbrs = self._succ[n] if weight is None: - return sum(len(keys) for keys in nbrs.values()) + \ - (n in nbrs and len(nbrs[n])) + return sum(len(keys) for keys in nbrs.values()) + ( + n in nbrs and len(nbrs[n]) + ) # edge weighted graph - degree is sum of nbr edge weights - deg = sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values() + ) if n in nbrs: deg += sum(d.get(weight, 1) for d in nbrs[n].values()) return deg @@ -509,14 +532,18 @@ class MultiDegreeView(DiDegreeView): if weight is None: for n in self._nodes: nbrs = self._succ[n] - deg = sum(len(keys) for keys in nbrs.values()) + \ - (n in nbrs and len(nbrs[n])) + deg = sum(len(keys) for keys in nbrs.values()) + ( + n in nbrs and len(nbrs[n]) + ) yield (n, deg) else: for n in self._nodes: nbrs = self._succ[n] - deg = sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) + for key_dict in nbrs.values() + for d in key_dict.values() + ) if n in nbrs: deg += sum(d.get(weight, 1) for d in nbrs[n].values()) yield (n, deg) @@ -530,13 +557,15 @@ class DiMultiDegreeView(DiDegreeView): succs = self._succ[n] preds = self._pred[n] if weight is None: - return sum(len(keys) for keys in succs.values()) + \ - sum(len(keys) for keys in preds.values()) + return sum(len(keys) for keys in succs.values()) + sum( + len(keys) for keys in preds.values() + ) # edge weighted graph - degree is sum of nbr edge weights - deg = sum(d.get(weight, 1) for key_dict in succs.values() - for d in key_dict.values()) + \ - sum(d.get(weight, 1) for key_dict in preds.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) for key_dict in succs.values() for d in key_dict.values() + ) + sum( + d.get(weight, 1) for key_dict in preds.values() for d in key_dict.values() + ) return deg def __iter__(self): @@ -545,17 +574,23 @@ class DiMultiDegreeView(DiDegreeView): for n in self._nodes: succs = self._succ[n] preds = self._pred[n] - deg = sum(len(keys) for keys in succs.values()) + \ - sum(len(keys) for keys in preds.values()) + deg = sum(len(keys) for keys in succs.values()) + sum( + len(keys) for keys in preds.values() + ) yield (n, deg) else: for n in self._nodes: succs = self._succ[n] preds = self._pred[n] - deg = sum(d.get(weight, 1) for key_dict in succs.values() - for d in key_dict.values()) + \ - sum(d.get(weight, 1) for key_dict in preds.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) + for key_dict in succs.values() + for d in key_dict.values() + ) + sum( + d.get(weight, 1) + for key_dict in preds.values() + for d in key_dict.values() + ) yield (n, deg) @@ -568,8 +603,9 @@ class InMultiDegreeView(DiDegreeView): if weight is None: return sum(len(data) for data in nbrs.values()) # edge weighted graph - degree is sum of nbr edge weights - return sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + return sum( + d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values() + ) def __iter__(self): weight = self._weight @@ -581,8 +617,11 @@ class InMultiDegreeView(DiDegreeView): else: for n in self._nodes: nbrs = self._pred[n] - deg = sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) + for key_dict in nbrs.values() + for d in key_dict.values() + ) yield (n, deg) @@ -595,8 +634,9 @@ class OutMultiDegreeView(DiDegreeView): if weight is None: return sum(len(data) for data in nbrs.values()) # edge weighted graph - degree is sum of nbr edge weights - return sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + return sum( + d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values() + ) def __iter__(self): weight = self._weight @@ -608,22 +648,35 @@ class OutMultiDegreeView(DiDegreeView): else: for n in self._nodes: nbrs = self._succ[n] - deg = sum(d.get(weight, 1) for key_dict in nbrs.values() - for d in key_dict.values()) + deg = sum( + d.get(weight, 1) + for key_dict in nbrs.values() + for d in key_dict.values() + ) yield (n, deg) # EdgeDataViews class OutEdgeDataView: """EdgeDataView for outward edges of DiGraph; See EdgeDataView""" - __slots__ = ('_viewer', '_nbunch', '_data', '_default', - '_adjdict', '_nodes_nbrs', '_report') + + __slots__ = ( + "_viewer", + "_nbunch", + "_data", + "_default", + "_adjdict", + "_nodes_nbrs", + "_report", + ) def __getstate__(self): - return {'viewer': self._viewer, - 'nbunch': self._nbunch, - 'data': self._data, - 'default': self._default} + return { + "viewer": self._viewer, + "nbunch": self._nbunch, + "data": self._data, + "default": self._default, + } def __setstate__(self, state): self.__init__(**state) @@ -646,15 +699,21 @@ class OutEdgeDataView: elif data is False: self._report = lambda n, nbr, dd: (n, nbr) else: # data is attribute name - self._report = lambda n, nbr, dd: \ - (n, nbr, dd[data]) if data in dd else (n, nbr, default) + self._report = ( + lambda n, nbr, dd: (n, nbr, dd[data]) + if data in dd + else (n, nbr, default) + ) def __len__(self): return sum(len(nbrs) for n, nbrs in self._nodes_nbrs()) def __iter__(self): - return (self._report(n, nbr, dd) for n, nbrs in self._nodes_nbrs() - for nbr, dd in nbrs.items()) + return ( + self._report(n, nbr, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, dd in nbrs.items() + ) def __contains__(self, e): u, v = e[:2] @@ -701,6 +760,7 @@ class EdgeDataView(OutEdgeDataView): [(0, 1, 'biz'), (1, 2, 'bar')] >>> assert((0, 1, 'biz') in G.edges(data='foo', default='biz')) """ + __slots__ = () def __len__(self): @@ -728,11 +788,15 @@ class EdgeDataView(OutEdgeDataView): class InEdgeDataView(OutEdgeDataView): """An EdgeDataView class for outward edges of DiGraph; See EdgeDataView""" + __slots__ = () def __iter__(self): - return (self._report(nbr, n, dd) for n, nbrs in self._nodes_nbrs() - for nbr, dd in nbrs.items()) + return ( + self._report(nbr, n, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, dd in nbrs.items() + ) def __contains__(self, e): u, v = e[:2] @@ -745,23 +809,24 @@ class InEdgeDataView(OutEdgeDataView): return e == self._report(u, v, ddict) - class OutMultiEdgeDataView(OutEdgeDataView): """An EdgeDataView for outward edges of MultiDiGraph; See EdgeDataView""" - __slots__ = ('keys',) + + __slots__ = ("keys",) def __getstate__(self): - return {'viewer': self._viewer, - 'nbunch': self._nbunch, - 'keys': self.keys, - 'data': self._data, - 'default': self._default} + return { + "viewer": self._viewer, + "nbunch": self._nbunch, + "keys": self.keys, + "data": self._data, + "default": self._default, + } def __setstate__(self, state): self.__init__(**state) - def __init__(self, viewer, nbunch=None, - data=False, keys=False, default=None): + def __init__(self, viewer, nbunch=None, data=False, keys=False, default=None): self._viewer = viewer adjdict = self._adjdict = viewer._adjdict self.keys = keys @@ -787,18 +852,28 @@ class OutMultiEdgeDataView(OutEdgeDataView): self._report = lambda n, nbr, k, dd: (n, nbr) else: # data is attribute name if keys is True: - self._report = lambda n, nbr, k, dd: (n, nbr, k, dd[data]) \ - if data in dd else (n, nbr, k, default) + self._report = ( + lambda n, nbr, k, dd: (n, nbr, k, dd[data]) + if data in dd + else (n, nbr, k, default) + ) else: - self._report = lambda n, nbr, k, dd: (n, nbr, dd[data]) \ - if data in dd else (n, nbr, default) + self._report = ( + lambda n, nbr, k, dd: (n, nbr, dd[data]) + if data in dd + else (n, nbr, default) + ) def __len__(self): return sum(1 for e in self) def __iter__(self): - return (self._report(n, nbr, k, dd) for n, nbrs in self._nodes_nbrs() - for nbr, kd in nbrs.items() for k, dd in kd.items()) + return ( + self._report(n, nbr, k, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, kd in nbrs.items() + for k, dd in kd.items() + ) def __contains__(self, e): u, v = e[:2] @@ -823,6 +898,7 @@ class OutMultiEdgeDataView(OutEdgeDataView): class MultiEdgeDataView(OutMultiEdgeDataView): """An EdgeDataView class for edges of MultiGraph; See EdgeDataView""" + __slots__ = () def __iter__(self): @@ -861,11 +937,16 @@ class MultiEdgeDataView(OutMultiEdgeDataView): class InMultiEdgeDataView(OutMultiEdgeDataView): """An EdgeDataView for inward edges of MultiDiGraph; See EdgeDataView""" + __slots__ = () def __iter__(self): - return (self._report(nbr, n, k, dd) for n, nbrs in self._nodes_nbrs() - for nbr, kd in nbrs.items() for k, dd in kd.items()) + return ( + self._report(nbr, n, k, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, kd in nbrs.items() + for k, dd in kd.items() + ) def __contains__(self, e): u, v = e[:2] @@ -888,13 +969,14 @@ class InMultiEdgeDataView(OutMultiEdgeDataView): # EdgeViews have set operations and no data reported class OutEdgeView(Set, Mapping): """A EdgeView class for outward edges of a DiGraph""" - __slots__ = ('_adjdict', '_graph', '_nodes_nbrs') + + __slots__ = ("_adjdict", "_graph", "_nodes_nbrs") def __getstate__(self): - return {'_graph': self._graph} + return {"_graph": self._graph} def __setstate__(self, state): - self._graph = G = state['_graph'] + self._graph = G = state["_graph"] self._adjdict = G._succ if hasattr(G, "succ") else G._adj self._nodes_nbrs = self._adjdict.items @@ -1015,6 +1097,7 @@ class EdgeView(OutEdgeView): (2, 3, 0) (2, 3, 1) """ + __slots__ = () dataview = EdgeDataView @@ -1042,10 +1125,11 @@ class EdgeView(OutEdgeView): class InEdgeView(OutEdgeView): """A EdgeView class for inward edges of a DiGraph""" + __slots__ = () def __setstate__(self, state): - self._graph = G = state['_graph'] + self._graph = G = state["_graph"] self._adjdict = G._pred if hasattr(G, "pred") else G._adj self._nodes_nbrs = self._adjdict.items @@ -1075,13 +1159,15 @@ class InEdgeView(OutEdgeView): class OutMultiEdgeView(OutEdgeView): """A EdgeView class for outward edges of a MultiDiGraph""" + __slots__ = () dataview = OutMultiEdgeDataView def __len__(self): - return sum(len(kdict) for n, nbrs in self._nodes_nbrs() - for nbr, kdict in nbrs.items()) + return sum( + len(kdict) for n, nbrs in self._nodes_nbrs() for nbr, kdict in nbrs.items() + ) def __iter__(self): for n, nbrs in self._nodes_nbrs(): @@ -1120,6 +1206,7 @@ class OutMultiEdgeView(OutEdgeView): class MultiEdgeView(OutMultiEdgeView): """A EdgeView class for edges of a MultiGraph""" + __slots__ = () dataview = MultiEdgeDataView @@ -1140,10 +1227,11 @@ class MultiEdgeView(OutMultiEdgeView): class InMultiEdgeView(OutMultiEdgeView): """A EdgeView class for inward edges of a MultiDiGraph""" + __slots__ = () def __setstate__(self, state): - self._graph = G = state['_graph'] + self._graph = G = state["_graph"] self._adjdict = G._pred if hasattr(G, "pred") else G._adj self._nodes_nbrs = self._adjdict.items diff --git a/networkx/classes/tests/historical_tests.py b/networkx/classes/tests/historical_tests.py index 85e69f4d..8f53c4c5 100644 --- a/networkx/classes/tests/historical_tests.py +++ b/networkx/classes/tests/historical_tests.py @@ -6,7 +6,6 @@ from networkx.testing import assert_edges_equal, assert_nodes_equal class HistoricalTests: - @classmethod def setup_class(cls): cls.null = nx.null_graph() @@ -22,19 +21,19 @@ class HistoricalTests: def test_name(self): G = self.G(name="test") - assert str(G) == 'test' - assert G.name == 'test' + assert str(G) == "test" + assert G.name == "test" H = self.G() - assert H.name == '' + assert H.name == "" # Nodes def test_add_remove_node(self): G = self.G() - G.add_node('A') - assert G.has_node('A') - G.remove_node('A') - assert not G.has_node('A') + G.add_node("A") + assert G.has_node("A") + G.remove_node("A") + assert not G.has_node("A") def test_nonhashable_node(self): # Test if a non-hashable object is in the Graph. A python dict will @@ -42,47 +41,55 @@ class HistoricalTests: # returned (see Graph __contains__). If it cannot be a node then it is # not a node. G = self.G() - assert not G.has_node(['A']) - assert not G.has_node({'A': 1}) + assert not G.has_node(["A"]) + assert not G.has_node({"A": 1}) def test_add_nodes_from(self): G = self.G() G.add_nodes_from(list("ABCDEFGHIJKL")) assert G.has_node("L") - G.remove_nodes_from(['H', 'I', 'J', 'K', 'L']) + G.remove_nodes_from(["H", "I", "J", "K", "L"]) G.add_nodes_from([1, 2, 3, 4]) - assert (sorted(G.nodes(), key=str) == - [1, 2, 3, 4, 'A', 'B', 'C', 'D', 'E', 'F', 'G']) + assert sorted(G.nodes(), key=str) == [ + 1, + 2, + 3, + 4, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + ] # test __iter__ - assert (sorted(G, key=str) == - [1, 2, 3, 4, 'A', 'B', 'C', 'D', 'E', 'F', 'G']) + assert sorted(G, key=str) == [1, 2, 3, 4, "A", "B", "C", "D", "E", "F", "G"] def test_contains(self): G = self.G() - G.add_node('A') - assert 'A' in G + G.add_node("A") + assert "A" in G assert not [] in G # never raise a Key or TypeError in this test assert not {1: 1} in G def test_add_remove(self): # Test add_node and remove_node acting for various nbunch G = self.G() - G.add_node('m') - assert G.has_node('m') - G.add_node('m') # no complaints - pytest.raises(nx.NetworkXError, G.remove_node, 'j') - G.remove_node('m') + G.add_node("m") + assert G.has_node("m") + G.add_node("m") # no complaints + pytest.raises(nx.NetworkXError, G.remove_node, "j") + G.remove_node("m") assert list(G) == [] def test_nbunch_is_list(self): G = self.G() G.add_nodes_from(list("ABCD")) G.add_nodes_from(self.P3) # add nbunch of nodes (nbunch=Graph) - assert (sorted(G.nodes(), key=str) == - [1, 2, 3, 'A', 'B', 'C', 'D']) + assert sorted(G.nodes(), key=str) == [1, 2, 3, "A", "B", "C", "D"] G.remove_nodes_from(self.P3) # remove nbunch of nodes (nbunch=Graph) - assert (sorted(G.nodes(), key=str) == - ['A', 'B', 'C', 'D']) + assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D"] def test_nbunch_is_set(self): G = self.G() @@ -95,172 +102,191 @@ class HistoricalTests: G = self.G() nbunch = set("ABCDEFGHIJKL") G.add_nodes_from(nbunch) - nbunch = {'I': "foo", 'J': 2, 'K': True, 'L': "spam"} + nbunch = {"I": "foo", "J": 2, "K": True, "L": "spam"} G.remove_nodes_from(nbunch) - assert sorted(G.nodes(), key=str), ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + assert sorted(G.nodes(), key=str), ["A", "B", "C", "D", "E", "F", "G", "H"] def test_nbunch_iterator(self): G = self.G() - G.add_nodes_from(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"]) n_iter = self.P3.nodes() G.add_nodes_from(n_iter) - assert (sorted(G.nodes(), key=str) == - [1, 2, 3, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + assert sorted(G.nodes(), key=str) == [ + 1, + 2, + 3, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + "H", + ] n_iter = self.P3.nodes() # rebuild same iterator G.remove_nodes_from(n_iter) # remove nbunch of nodes (nbunch=iterator) - assert (sorted(G.nodes(), key=str) == - ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D", "E", "F", "G", "H"] def test_nbunch_graph(self): G = self.G() - G.add_nodes_from(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"]) nbunch = self.K3 G.add_nodes_from(nbunch) - assert sorted(G.nodes(), key=str), [1, 2, 3, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + assert sorted(G.nodes(), key=str), [ + 1, + 2, + 3, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + "H", + ] # Edges def test_add_edge(self): G = self.G() - pytest.raises(TypeError, G.add_edge, 'A') + pytest.raises(TypeError, G.add_edge, "A") - G.add_edge('A', 'B') # testing add_edge() - G.add_edge('A', 'B') # should fail silently - assert G.has_edge('A', 'B') - assert not G.has_edge('A', 'C') - assert G.has_edge(*('A', 'B')) + G.add_edge("A", "B") # testing add_edge() + G.add_edge("A", "B") # should fail silently + assert G.has_edge("A", "B") + assert not G.has_edge("A", "C") + assert G.has_edge(*("A", "B")) if G.is_directed(): - assert not G.has_edge('B', 'A') + assert not G.has_edge("B", "A") else: # G is undirected, so B->A is an edge - assert G.has_edge('B', 'A') + assert G.has_edge("B", "A") - G.add_edge('A', 'C') # test directedness - G.add_edge('C', 'A') - G.remove_edge('C', 'A') + G.add_edge("A", "C") # test directedness + G.add_edge("C", "A") + G.remove_edge("C", "A") if G.is_directed(): - assert G.has_edge('A', 'C') + assert G.has_edge("A", "C") else: - assert not G.has_edge('A', 'C') - assert not G.has_edge('C', 'A') + assert not G.has_edge("A", "C") + assert not G.has_edge("C", "A") def test_self_loop(self): G = self.G() - G.add_edge('A', 'A') # test self loops - assert G.has_edge('A', 'A') - G.remove_edge('A', 'A') - G.add_edge('X', 'X') - assert G.has_node('X') - G.remove_node('X') - G.add_edge('A', 'Z') # should add the node silently - assert G.has_node('Z') + G.add_edge("A", "A") # test self loops + assert G.has_edge("A", "A") + G.remove_edge("A", "A") + G.add_edge("X", "X") + assert G.has_node("X") + G.remove_node("X") + G.add_edge("A", "Z") # should add the node silently + assert G.has_node("Z") def test_add_edges_from(self): G = self.G() - G.add_edges_from([('B', 'C')]) # test add_edges_from() - assert G.has_edge('B', 'C') + G.add_edges_from([("B", "C")]) # test add_edges_from() + assert G.has_edge("B", "C") if G.is_directed(): - assert not G.has_edge('C', 'B') + assert not G.has_edge("C", "B") else: - assert G.has_edge('C', 'B') # undirected + assert G.has_edge("C", "B") # undirected - G.add_edges_from([('D', 'F'), ('B', 'D')]) - assert G.has_edge('D', 'F') - assert G.has_edge('B', 'D') + G.add_edges_from([("D", "F"), ("B", "D")]) + assert G.has_edge("D", "F") + assert G.has_edge("B", "D") if G.is_directed(): - assert not G.has_edge('D', 'B') + assert not G.has_edge("D", "B") else: - assert G.has_edge('D', 'B') # undirected + assert G.has_edge("D", "B") # undirected def test_add_edges_from2(self): G = self.G() # after failing silently, should add 2nd edge - G.add_edges_from([tuple('IJ'), list('KK'), tuple('JK')]) - assert G.has_edge(*('I', 'J')) - assert G.has_edge(*('K', 'K')) - assert G.has_edge(*('J', 'K')) + G.add_edges_from([tuple("IJ"), list("KK"), tuple("JK")]) + assert G.has_edge(*("I", "J")) + assert G.has_edge(*("K", "K")) + assert G.has_edge(*("J", "K")) if G.is_directed(): - assert not G.has_edge(*('K', 'J')) + assert not G.has_edge(*("K", "J")) else: - assert G.has_edge(*('K', 'J')) + assert G.has_edge(*("K", "J")) def test_add_edges_from3(self): G = self.G() - G.add_edges_from(zip(list('ACD'), list('CDE'))) - assert G.has_edge('D', 'E') - assert not G.has_edge('E', 'C') + G.add_edges_from(zip(list("ACD"), list("CDE"))) + assert G.has_edge("D", "E") + assert not G.has_edge("E", "C") def test_remove_edge(self): G = self.G() - G.add_nodes_from([1, 2, 3, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) + G.add_nodes_from([1, 2, 3, "A", "B", "C", "D", "E", "F", "G", "H"]) - G.add_edges_from(zip(list('MNOP'), list('NOPM'))) - assert G.has_edge('O', 'P') - assert G.has_edge('P', 'M') - G.remove_node('P') # tests remove_node()'s handling of edges. - assert not G.has_edge('P', 'M') - pytest.raises(TypeError, G.remove_edge, 'M') + G.add_edges_from(zip(list("MNOP"), list("NOPM"))) + assert G.has_edge("O", "P") + assert G.has_edge("P", "M") + G.remove_node("P") # tests remove_node()'s handling of edges. + assert not G.has_edge("P", "M") + pytest.raises(TypeError, G.remove_edge, "M") - G.add_edge('N', 'M') - assert G.has_edge('M', 'N') - G.remove_edge('M', 'N') - assert not G.has_edge('M', 'N') + G.add_edge("N", "M") + assert G.has_edge("M", "N") + G.remove_edge("M", "N") + assert not G.has_edge("M", "N") # self loop fails silently - G.remove_edges_from([list('HI'), list('DF'), - tuple('KK'), tuple('JK')]) - assert not G.has_edge('H', 'I') - assert not G.has_edge('J', 'K') - G.remove_edges_from([list('IJ'), list('KK'), list('JK')]) - assert not G.has_edge('I', 'J') - G.remove_nodes_from(set('ZEFHIMNO')) - G.add_edge('J', 'K') + G.remove_edges_from([list("HI"), list("DF"), tuple("KK"), tuple("JK")]) + assert not G.has_edge("H", "I") + assert not G.has_edge("J", "K") + G.remove_edges_from([list("IJ"), list("KK"), list("JK")]) + assert not G.has_edge("I", "J") + G.remove_nodes_from(set("ZEFHIMNO")) + G.add_edge("J", "K") def test_edges_nbunch(self): # Test G.edges(nbunch) with various forms of nbunch G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) # node not in nbunch should be quietly ignored pytest.raises(nx.NetworkXError, G.edges, 6) - assert list(G.edges('Z')) == [] # iterable non-node + assert list(G.edges("Z")) == [] # iterable non-node # nbunch can be an empty list assert list(G.edges([])) == [] if G.is_directed(): - elist = [('A', 'B'), ('A', 'C'), ('B', 'D')] + elist = [("A", "B"), ("A", "C"), ("B", "D")] else: - elist = [('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')] + elist = [("A", "B"), ("A", "C"), ("B", "C"), ("B", "D")] # nbunch can be a list - assert_edges_equal(list(G.edges(['A', 'B'])), elist) + assert_edges_equal(list(G.edges(["A", "B"])), elist) # nbunch can be a set - assert_edges_equal(G.edges({'A', 'B'}), elist) + assert_edges_equal(G.edges({"A", "B"}), elist) # nbunch can be a graph G1 = self.G() - G1.add_nodes_from('AB') + G1.add_nodes_from("AB") assert_edges_equal(G.edges(G1), elist) # nbunch can be a dict with nodes as keys - ndict = {'A': "thing1", 'B': "thing2"} + ndict = {"A": "thing1", "B": "thing2"} assert_edges_equal(G.edges(ndict), elist) # nbunch can be a single node - assert_edges_equal(list(G.edges('A')), [('A', 'B'), ('A', 'C')]) - assert_nodes_equal(sorted(G), ['A', 'B', 'C', 'D']) + assert_edges_equal(list(G.edges("A")), [("A", "B"), ("A", "C")]) + assert_nodes_equal(sorted(G), ["A", "B", "C", "D"]) # nbunch can be nothing (whole graph) assert_edges_equal( list(G.edges()), - [('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')] + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")], ) def test_degree(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - assert G.degree('A') == 2 + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + assert G.degree("A") == 2 # degree of single node in iterable container must return dict - assert list(G.degree(['A'])) == [('A', 2)] - assert sorted(d for n, d in G.degree(['A', 'B'])) == [2, 3] + assert list(G.degree(["A"])) == [("A", 2)] + assert sorted(d for n, d in G.degree(["A", "B"])) == [2, 3] assert sorted(d for n, d in G.degree()) == [2, 2, 3, 3] def test_degree2(self): @@ -272,7 +298,7 @@ class HistoricalTests: P3 = nx.path_graph(3) P5 = nx.path_graph(5) # silently ignore nodes not in P3 - assert dict(d for n, d in P3.degree(['A', 'B'])) == {} + assert dict(d for n, d in P3.degree(["A", "B"])) == {} # nbunch can be a graph assert sorted(d for n, d in P5.degree(P3)) == [1, 2, 2] # nbunch can be a graph that's way too big @@ -287,34 +313,33 @@ class HistoricalTests: def test_order_size(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) assert G.order() == 4 assert G.size() == 5 assert G.number_of_edges() == 5 - assert G.number_of_edges('A', 'B') == 1 - assert G.number_of_edges('A', 'D') == 0 + assert G.number_of_edges("A", "B") == 1 + assert G.number_of_edges("A", "D") == 0 def test_copy(self): G = self.G() - H = G.copy() # copy + H = G.copy() # copy assert H.adj == G.adj assert H.name == G.name assert H != G def test_subgraph(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - SG = G.subgraph(['A', 'B', 'D']) - assert_nodes_equal(list(SG), ['A', 'B', 'D']) - assert_edges_equal(list(SG.edges()), [('A', 'B'), ('B', 'D')]) + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + SG = G.subgraph(["A", "B", "D"]) + assert_nodes_equal(list(SG), ["A", "B", "D"]) + assert_edges_equal(list(SG.edges()), [("A", "B"), ("B", "D")]) def test_to_directed(self): G = self.G() if not G.is_directed(): - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) + G.add_edges_from( + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) DG = G.to_directed() assert DG != G # directed copy or copy @@ -322,60 +347,76 @@ class HistoricalTests: assert DG.is_directed() assert DG.name == G.name assert DG.adj == G.adj - assert (sorted(DG.out_edges(list('AB'))) == - [('A', 'B'), ('A', 'C'), ('B', 'A'), - ('B', 'C'), ('B', 'D')]) - DG.remove_edge('A', 'B') - assert DG.has_edge('B', 'A') # this removes B-A but not A-B - assert not DG.has_edge('A', 'B') + assert sorted(DG.out_edges(list("AB"))) == [ + ("A", "B"), + ("A", "C"), + ("B", "A"), + ("B", "C"), + ("B", "D"), + ] + DG.remove_edge("A", "B") + assert DG.has_edge("B", "A") # this removes B-A but not A-B + assert not DG.has_edge("A", "B") def test_to_undirected(self): G = self.G() if G.is_directed(): - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - UG = G.to_undirected() # to_undirected + G.add_edges_from( + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + UG = G.to_undirected() # to_undirected assert UG != G assert not UG.is_directed() assert G.is_directed() assert UG.name == G.name assert UG.adj != G.adj - assert (sorted(UG.edges(list('AB'))) == - [('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')]) - assert (sorted(UG.edges(['A', 'B'])) == - [('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')]) - UG.remove_edge('A', 'B') - assert not UG.has_edge('B', 'A') - assert not UG.has_edge('A', 'B') + assert sorted(UG.edges(list("AB"))) == [ + ("A", "B"), + ("A", "C"), + ("B", "C"), + ("B", "D"), + ] + assert sorted(UG.edges(["A", "B"])) == [ + ("A", "B"), + ("A", "C"), + ("B", "C"), + ("B", "D"), + ] + UG.remove_edge("A", "B") + assert not UG.has_edge("B", "A") + assert not UG.has_edge("A", "B") def test_neighbors(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - G.add_nodes_from('GJK') - assert sorted(G['A']) == ['B', 'C'] - assert sorted(G.neighbors('A')) == ['B', 'C'] - assert sorted(G.neighbors('A')) == ['B', 'C'] - assert sorted(G.neighbors('G')) == [] - pytest.raises(nx.NetworkXError, G.neighbors, 'j') + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + G.add_nodes_from("GJK") + assert sorted(G["A"]) == ["B", "C"] + assert sorted(G.neighbors("A")) == ["B", "C"] + assert sorted(G.neighbors("A")) == ["B", "C"] + assert sorted(G.neighbors("G")) == [] + pytest.raises(nx.NetworkXError, G.neighbors, "j") def test_iterators(self): G = self.G() - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('C', 'B'), ('C', 'D')]) - G.add_nodes_from('GJK') - assert (sorted(G.nodes()) == - ['A', 'B', 'C', 'D', 'G', 'J', 'K']) - assert_edges_equal(G.edges(), - [('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) - - assert (sorted([v for k, v in G.degree()]) == - [0, 0, 0, 2, 2, 3, 3]) - assert (sorted(G.degree(), key=str) == - [('A', 2), ('B', 3), ('C', 3), ('D', 2), - ('G', 0), ('J', 0), ('K', 0)]) - assert sorted(G.neighbors('A')) == ['B', 'C'] - pytest.raises(nx.NetworkXError, G.neighbors, 'X') + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + G.add_nodes_from("GJK") + assert sorted(G.nodes()) == ["A", "B", "C", "D", "G", "J", "K"] + assert_edges_equal( + G.edges(), [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + + assert sorted([v for k, v in G.degree()]) == [0, 0, 0, 2, 2, 3, 3] + assert sorted(G.degree(), key=str) == [ + ("A", 2), + ("B", 3), + ("C", 3), + ("D", 2), + ("G", 0), + ("J", 0), + ("K", 0), + ] + assert sorted(G.neighbors("A")) == ["B", "C"] + pytest.raises(nx.NetworkXError, G.neighbors, "X") G.clear() assert nx.number_of_nodes(G) == 0 assert nx.number_of_edges(G) == 0 diff --git a/networkx/classes/tests/test_coreviews.py b/networkx/classes/tests/test_coreviews.py index 5cad6836..c9b259a0 100644 --- a/networkx/classes/tests/test_coreviews.py +++ b/networkx/classes/tests/test_coreviews.py @@ -7,7 +7,7 @@ import networkx as nx class TestAtlasView: # node->data def setup(self): - self.d = {0: {'color': 'blue', 'weight': 1.2}, 1: {}, 2: {'color': 1}} + self.d = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}} self.av = nx.classes.coreviews.AtlasView(self.d) def test_pickle(self): @@ -27,7 +27,7 @@ class TestAtlasView: def test_getitem(self): assert self.av[1] is self.d[1] - assert self.av[2]['color'] == 1 + assert self.av[2]["color"] == 1 pytest.raises(KeyError, self.av.__getitem__, 3) def test_copy(self): @@ -39,13 +39,13 @@ class TestAtlasView: avcopy[5] = {} assert avcopy != self.av - avcopy[0]['ht'] = 4 + avcopy[0]["ht"] = 4 assert avcopy[0] != self.av[0] - self.av[0]['ht'] = 4 + self.av[0]["ht"] = 4 assert avcopy[0] == self.av[0] - del self.av[0]['ht'] + del self.av[0]["ht"] - assert not hasattr(self.av, '__setitem__') + assert not hasattr(self.av, "__setitem__") def test_items(self): assert sorted(self.av.items()) == sorted(self.d.items()) @@ -62,9 +62,9 @@ class TestAtlasView: class TestAdjacencyView: # node->nbr->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.nd = {0: dd, 1: {}, 2: {'color': 1}} - self.adj = {3: self.nd, 0: {3: dd}, 1: {}, 2: {3: {'color': 1}}} + dd = {"color": "blue", "weight": 1.2} + self.nd = {0: dd, 1: {}, 2: {"color": 1}} + self.adj = {3: self.nd, 0: {3: dd}, 1: {}, 2: {3: {"color": 1}}} self.adjview = nx.classes.coreviews.AdjacencyView(self.adj) def test_pickle(self): @@ -82,7 +82,7 @@ class TestAdjacencyView: def test_getitem(self): assert self.adjview[1] is not self.adj[1] assert self.adjview[3][0] is self.adjview[0][3] - assert self.adjview[2][3]['color'] == 1 + assert self.adjview[2][3]["color"] == 1 pytest.raises(KeyError, self.adjview.__getitem__, 4) def test_copy(self): @@ -90,13 +90,13 @@ class TestAdjacencyView: assert avcopy[0] == self.adjview[0] assert avcopy[0] is not self.adjview[0] - avcopy[2][3]['ht'] = 4 + avcopy[2][3]["ht"] = 4 assert avcopy[2] != self.adjview[2] - self.adjview[2][3]['ht'] = 4 + self.adjview[2][3]["ht"] = 4 assert avcopy[2] == self.adjview[2] - del self.adjview[2][3]['ht'] + del self.adjview[2][3]["ht"] - assert not hasattr(self.adjview, '__setitem__') + assert not hasattr(self.adjview, "__setitem__") def test_items(self): view_items = sorted((n, dict(d)) for n, d in self.adjview.items()) @@ -114,16 +114,16 @@ class TestAdjacencyView: class TestMultiAdjacencyView(TestAdjacencyView): # node->nbr->key->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.kd = {0: dd, 1: {}, 2: {'color': 1}} - self.nd = {3: self.kd, 0: {3: dd}, 1: {0: {}}, 2: {3: {'color': 1}}} + dd = {"color": "blue", "weight": 1.2} + self.kd = {0: dd, 1: {}, 2: {"color": 1}} + self.nd = {3: self.kd, 0: {3: dd}, 1: {0: {}}, 2: {3: {"color": 1}}} self.adj = {3: self.nd, 0: {3: {3: dd}}, 1: {}, 2: {3: {8: {}}}} self.adjview = nx.classes.coreviews.MultiAdjacencyView(self.adj) def test_getitem(self): assert self.adjview[1] is not self.adj[1] assert self.adjview[3][0][3] is self.adjview[0][3][3] - assert self.adjview[3][2][3]['color'] == 1 + assert self.adjview[3][2][3]["color"] == 1 pytest.raises(KeyError, self.adjview.__getitem__, 4) def test_copy(self): @@ -131,20 +131,20 @@ class TestMultiAdjacencyView(TestAdjacencyView): assert avcopy[0] == self.adjview[0] assert avcopy[0] is not self.adjview[0] - avcopy[2][3][8]['ht'] = 4 + avcopy[2][3][8]["ht"] = 4 assert avcopy[2] != self.adjview[2] - self.adjview[2][3][8]['ht'] = 4 + self.adjview[2][3][8]["ht"] = 4 assert avcopy[2] == self.adjview[2] - del self.adjview[2][3][8]['ht'] + del self.adjview[2][3][8]["ht"] - assert not hasattr(self.adjview, '__setitem__') + assert not hasattr(self.adjview, "__setitem__") class TestUnionAtlas: # node->data def setup(self): - self.s = {0: {'color': 'blue', 'weight': 1.2}, 1: {}, 2: {'color': 1}} - self.p = {3: {'color': 'blue', 'weight': 1.2}, 4: {}, 2: {'watch': 2}} + self.s = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}} + self.p = {3: {"color": "blue", "weight": 1.2}, 4: {}, 2: {"watch": 2}} self.av = nx.classes.coreviews.UnionAtlas(self.s, self.p) def test_pickle(self): @@ -162,8 +162,8 @@ class TestUnionAtlas: def test_getitem(self): assert self.av[0] is self.s[0] assert self.av[4] is self.p[4] - assert self.av[2]['color'] == 1 - pytest.raises(KeyError, self.av[2].__getitem__, 'watch') + assert self.av[2]["color"] == 1 + pytest.raises(KeyError, self.av[2].__getitem__, "watch") pytest.raises(KeyError, self.av.__getitem__, 8) def test_copy(self): @@ -174,13 +174,13 @@ class TestUnionAtlas: avcopy[5] = {} assert avcopy != self.av - avcopy[0]['ht'] = 4 + avcopy[0]["ht"] = 4 assert avcopy[0] != self.av[0] - self.av[0]['ht'] = 4 + self.av[0]["ht"] = 4 assert avcopy[0] == self.av[0] - del self.av[0]['ht'] + del self.av[0]["ht"] - assert not hasattr(self.av, '__setitem__') + assert not hasattr(self.av, "__setitem__") def test_items(self): expected = dict(self.p.items()) @@ -199,10 +199,10 @@ class TestUnionAtlas: class TestUnionAdjacency: # node->nbr->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.nd = {0: dd, 1: {}, 2: {'color': 1}} - self.s = {3: self.nd, 0: {}, 1: {}, 2: {3: {'color': 1}}} - self.p = {3: {}, 0: {3: dd}, 1: {0: {}}, 2: {1: {'color': 1}}} + dd = {"color": "blue", "weight": 1.2} + self.nd = {0: dd, 1: {}, 2: {"color": 1}} + self.s = {3: self.nd, 0: {}, 1: {}, 2: {3: {"color": 1}}} + self.p = {3: {}, 0: {3: dd}, 1: {0: {}}, 2: {1: {"color": 1}}} self.adjview = nx.classes.coreviews.UnionAdjacency(self.s, self.p) def test_pickle(self): @@ -220,7 +220,7 @@ class TestUnionAdjacency: def test_getitem(self): assert self.adjview[1] is not self.s[1] assert self.adjview[3][0] is self.adjview[0][3] - assert self.adjview[2][3]['color'] == 1 + assert self.adjview[2][3]["color"] == 1 pytest.raises(KeyError, self.adjview.__getitem__, 4) def test_copy(self): @@ -228,13 +228,13 @@ class TestUnionAdjacency: assert avcopy[0] == self.adjview[0] assert avcopy[0] is not self.adjview[0] - avcopy[2][3]['ht'] = 4 + avcopy[2][3]["ht"] = 4 assert avcopy[2] != self.adjview[2] - self.adjview[2][3]['ht'] = 4 + self.adjview[2][3]["ht"] = 4 assert avcopy[2] == self.adjview[2] - del self.adjview[2][3]['ht'] + del self.adjview[2][3]["ht"] - assert not hasattr(self.adjview, '__setitem__') + assert not hasattr(self.adjview, "__setitem__") def test_str(self): out = str(dict(self.adjview)) @@ -249,10 +249,10 @@ class TestUnionAdjacency: class TestUnionMultiInner(TestUnionAdjacency): # nbr->key->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.kd = {7: {}, 'ekey': {}, 9: {'color': 1}} - self.s = {3: self.kd, 0: {7: dd}, 1: {}, 2: {'key': {'color': 1}}} - self.p = {3: {}, 0: {3: dd}, 1: {}, 2: {1: {'span': 2}}} + dd = {"color": "blue", "weight": 1.2} + self.kd = {7: {}, "ekey": {}, 9: {"color": 1}} + self.s = {3: self.kd, 0: {7: dd}, 1: {}, 2: {"key": {"color": 1}}} + self.p = {3: {}, 0: {3: dd}, 1: {}, 2: {1: {"span": 2}}} self.adjview = nx.classes.coreviews.UnionMultiInner(self.s, self.p) def test_len(self): @@ -261,32 +261,32 @@ class TestUnionMultiInner(TestUnionAdjacency): def test_getitem(self): assert self.adjview[1] is not self.s[1] assert self.adjview[0][7] is self.adjview[0][3] - assert self.adjview[2]['key']['color'] == 1 - assert self.adjview[2][1]['span'] == 2 + assert self.adjview[2]["key"]["color"] == 1 + assert self.adjview[2][1]["span"] == 2 pytest.raises(KeyError, self.adjview.__getitem__, 4) - pytest.raises(KeyError, self.adjview[1].__getitem__, 'key') + pytest.raises(KeyError, self.adjview[1].__getitem__, "key") def test_copy(self): avcopy = self.adjview.copy() assert avcopy[0] == self.adjview[0] assert avcopy[0] is not self.adjview[0] - avcopy[2][1]['width'] = 8 + avcopy[2][1]["width"] = 8 assert avcopy[2] != self.adjview[2] - self.adjview[2][1]['width'] = 8 + self.adjview[2][1]["width"] = 8 assert avcopy[2] == self.adjview[2] - del self.adjview[2][1]['width'] + del self.adjview[2][1]["width"] - assert not hasattr(self.adjview, '__setitem__') - assert hasattr(avcopy, '__setitem__') + assert not hasattr(self.adjview, "__setitem__") + assert hasattr(avcopy, "__setitem__") class TestUnionMultiAdjacency(TestUnionAdjacency): # node->nbr->key->data def setup(self): - dd = {'color': 'blue', 'weight': 1.2} - self.kd = {7: {}, 8: {}, 9: {'color': 1}} - self.nd = {3: self.kd, 0: {9: dd}, 1: {8: {}}, 2: {9: {'color': 1}}} + dd = {"color": "blue", "weight": 1.2} + self.kd = {7: {}, 8: {}, 9: {"color": 1}} + self.nd = {3: self.kd, 0: {9: dd}, 1: {8: {}}, 2: {9: {"color": 1}}} self.s = {3: self.nd, 0: {3: {7: dd}}, 1: {}, 2: {3: {8: {}}}} self.p = {3: {}, 0: {3: {9: dd}}, 1: {}, 2: {1: {8: {}}}} self.adjview = nx.classes.coreviews.UnionMultiAdjacency(self.s, self.p) @@ -294,7 +294,7 @@ class TestUnionMultiAdjacency(TestUnionAdjacency): def test_getitem(self): assert self.adjview[1] is not self.s[1] assert self.adjview[3][0][9] is self.adjview[0][3][9] - assert self.adjview[3][2][9]['color'] == 1 + assert self.adjview[3][2][9]["color"] == 1 pytest.raises(KeyError, self.adjview.__getitem__, 4) def test_copy(self): @@ -302,22 +302,19 @@ class TestUnionMultiAdjacency(TestUnionAdjacency): assert avcopy[0] == self.adjview[0] assert avcopy[0] is not self.adjview[0] - avcopy[2][3][8]['ht'] = 4 + avcopy[2][3][8]["ht"] = 4 assert avcopy[2] != self.adjview[2] - self.adjview[2][3][8]['ht'] = 4 + self.adjview[2][3][8]["ht"] = 4 assert avcopy[2] == self.adjview[2] - del self.adjview[2][3][8]['ht'] + del self.adjview[2][3][8]["ht"] - assert not hasattr(self.adjview, '__setitem__') - assert hasattr(avcopy, '__setitem__') + assert not hasattr(self.adjview, "__setitem__") + assert hasattr(avcopy, "__setitem__") class TestFilteredGraphs: def setup(self): - self.Graphs = [nx.Graph, - nx.DiGraph, - nx.MultiGraph, - nx.MultiDiGraph] + self.Graphs = [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] self.SubGraphs = [nx.graphviews.subgraph_view] * 4 def test_hide_show_nodes(self): diff --git a/networkx/classes/tests/test_digraph.py b/networkx/classes/tests/test_digraph.py index ea475b60..76702827 100644 --- a/networkx/classes/tests/test_digraph.py +++ b/networkx/classes/tests/test_digraph.py @@ -52,11 +52,11 @@ class BaseDiGraphTester(BaseGraphTester): assert sorted(G.out_edges(2)) == [] def test_out_edges_data(self): - G = nx.DiGraph([(0, 1, {'data': 0}), (1, 0, {})]) - assert sorted(G.out_edges(data=True)) == [(0, 1, {'data': 0}), (1, 0, {})] - assert sorted(G.out_edges(0, data=True)) == [(0, 1, {'data': 0})] - assert sorted(G.out_edges(data='data')) == [(0, 1, 0), (1, 0, None)] - assert sorted(G.out_edges(0, data='data')) == [(0, 1, 0)] + G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) + assert sorted(G.out_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] + assert sorted(G.out_edges(0, data=True)) == [(0, 1, {"data": 0})] + assert sorted(G.out_edges(data="data")) == [(0, 1, 0), (1, 0, None)] + assert sorted(G.out_edges(0, data="data")) == [(0, 1, 0)] def test_in_edges_dir(self): G = self.P3 @@ -65,19 +65,18 @@ class BaseDiGraphTester(BaseGraphTester): assert sorted(G.in_edges(2)) == [(1, 2)] def test_in_edges_data(self): - G = nx.DiGraph([(0, 1, {'data': 0}), (1, 0, {})]) - assert sorted(G.in_edges(data=True)) == [(0, 1, {'data': 0}), (1, 0, {})] - assert sorted(G.in_edges(1, data=True)) == [(0, 1, {'data': 0})] - assert sorted(G.in_edges(data='data')) == [(0, 1, 0), (1, 0, None)] - assert sorted(G.in_edges(1, data='data')) == [(0, 1, 0)] + G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) + assert sorted(G.in_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] + assert sorted(G.in_edges(1, data=True)) == [(0, 1, {"data": 0})] + assert sorted(G.in_edges(data="data")) == [(0, 1, 0), (1, 0, None)] + assert sorted(G.in_edges(1, data="data")) == [(0, 1, 0)] def test_degree(self): G = self.K3 assert sorted(G.degree()) == [(0, 4), (1, 4), (2, 4)] assert dict(G.degree()) == {0: 4, 1: 4, 2: 4} assert G.degree(0) == 4 - assert list(G.degree(iter([0]))) == [ - (0, 4)] # run through iterator + assert list(G.degree(iter([0]))) == [(0, 4)] # run through iterator def test_in_degree(self): G = self.K3 @@ -124,6 +123,7 @@ class BaseDiGraphTester(BaseGraphTester): def test_reverse_hashable(self): class Foo: pass + x = Foo() y = Foo() G = nx.DiGraph() @@ -135,8 +135,14 @@ class BaseDiGraphTester(BaseGraphTester): class BaseAttrDiGraphTester(BaseDiGraphTester, BaseAttrGraphTester): def test_edges_data(self): G = self.K3 - all_edges = [(0, 1, {}), (0, 2, {}), (1, 0, {}), - (1, 2, {}), (2, 0, {}), (2, 1, {})] + all_edges = [ + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + ] assert sorted(G.edges(data=True)) == all_edges assert sorted(G.edges(0, data=True)) == all_edges[:2] assert sorted(G.edges([0, 1], data=True)) == all_edges[:4] @@ -146,24 +152,24 @@ class BaseAttrDiGraphTester(BaseDiGraphTester, BaseAttrGraphTester): def test_in_degree_weighted(self): G = self.K3.copy() G.add_edge(0, 1, weight=0.3, other=1.2) - assert sorted(G.in_degree(weight='weight')) == [(0, 2), (1, 1.3), (2, 2)] - assert dict(G.in_degree(weight='weight')) == {0: 2, 1: 1.3, 2: 2} - assert G.in_degree(1, weight='weight') == 1.3 - assert sorted(G.in_degree(weight='other')) == [(0, 2), (1, 2.2), (2, 2)] - assert dict(G.in_degree(weight='other')) == {0: 2, 1: 2.2, 2: 2} - assert G.in_degree(1, weight='other') == 2.2 - assert list(G.in_degree(iter([1]), weight='other')) == [(1, 2.2)] + assert sorted(G.in_degree(weight="weight")) == [(0, 2), (1, 1.3), (2, 2)] + assert dict(G.in_degree(weight="weight")) == {0: 2, 1: 1.3, 2: 2} + assert G.in_degree(1, weight="weight") == 1.3 + assert sorted(G.in_degree(weight="other")) == [(0, 2), (1, 2.2), (2, 2)] + assert dict(G.in_degree(weight="other")) == {0: 2, 1: 2.2, 2: 2} + assert G.in_degree(1, weight="other") == 2.2 + assert list(G.in_degree(iter([1]), weight="other")) == [(1, 2.2)] def test_out_degree_weighted(self): G = self.K3.copy() G.add_edge(0, 1, weight=0.3, other=1.2) - assert sorted(G.out_degree(weight='weight')) == [(0, 1.3), (1, 2), (2, 2)] - assert dict(G.out_degree(weight='weight')) == {0: 1.3, 1: 2, 2: 2} - assert G.out_degree(0, weight='weight') == 1.3 - assert sorted(G.out_degree(weight='other')) == [(0, 2.2), (1, 2), (2, 2)] - assert dict(G.out_degree(weight='other')) == {0: 2.2, 1: 2, 2: 2} - assert G.out_degree(0, weight='other') == 2.2 - assert list(G.out_degree(iter([0]), weight='other')) == [(0, 2.2)] + assert sorted(G.out_degree(weight="weight")) == [(0, 1.3), (1, 2), (2, 2)] + assert dict(G.out_degree(weight="weight")) == {0: 1.3, 1: 2, 2: 2} + assert G.out_degree(0, weight="weight") == 1.3 + assert sorted(G.out_degree(weight="other")) == [(0, 2.2), (1, 2), (2, 2)] + assert dict(G.out_degree(weight="other")) == {0: 2.2, 1: 2, 2: 2} + assert G.out_degree(0, weight="other") == 2.2 + assert list(G.out_degree(iter([0]), weight="other")) == [(0, 2.2)] class TestDiGraph(BaseAttrDiGraphTester, _TestGraph): @@ -215,10 +221,10 @@ class TestDiGraph(BaseAttrDiGraphTester, _TestGraph): def test_add_edges_from(self): G = self.Graph() - G.add_edges_from([(0, 1), (0, 2, {'data': 3})], data=2) - assert G.adj == {0: {1: {'data': 2}, 2: {'data': 3}}, 1: {}, 2: {}} - assert G.succ == {0: {1: {'data': 2}, 2: {'data': 3}}, 1: {}, 2: {}} - assert G.pred == {0: {}, 1: {0: {'data': 2}}, 2: {0: {'data': 3}}} + G.add_edges_from([(0, 1), (0, 2, {"data": 3})], data=2) + assert G.adj == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} + assert G.succ == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} + assert G.pred == {0: {}, 1: {0: {"data": 2}}, 2: {0: {"data": 3}}} with pytest.raises(nx.NetworkXError): G.add_edges_from([(0,)]) # too few in tuple @@ -244,7 +250,7 @@ class TestDiGraph(BaseAttrDiGraphTester, _TestGraph): def test_clear(self): G = self.K3 - G.graph['name'] = 'K3' + G.graph["name"] = "K3" G.clear() assert list(G.nodes) == [] assert G.succ == {} @@ -253,7 +259,7 @@ class TestDiGraph(BaseAttrDiGraphTester, _TestGraph): def test_clear_edges(self): G = self.K3 - G.graph['name'] = 'K3' + G.graph["name"] = "K3" nodes = list(G.nodes) G.clear_edges() assert list(G.nodes) == nodes @@ -261,7 +267,7 @@ class TestDiGraph(BaseAttrDiGraphTester, _TestGraph): assert G.succ == expected assert G.pred == expected assert list(G.edges) == [] - assert G.graph['name'] == 'K3' + assert G.graph["name"] == "K3" class TestEdgeSubgraph(_TestGraphEdgeSubgraph): @@ -272,10 +278,10 @@ class TestEdgeSubgraph(_TestGraphEdgeSubgraph): G = nx.DiGraph(nx.path_graph(5)) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = f'node{i}' - G.edges[0, 1]['name'] = 'edge01' - G.edges[3, 4]['name'] = 'edge34' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" # Get the subgraph induced by the first and last edges. self.G = G self.H = G.edge_subgraph([(0, 1), (3, 4)]) diff --git a/networkx/classes/tests/test_digraph_historical.py b/networkx/classes/tests/test_digraph_historical.py index 459da10f..7047bbf3 100644 --- a/networkx/classes/tests/test_digraph_historical.py +++ b/networkx/classes/tests/test_digraph_historical.py @@ -7,7 +7,6 @@ from .historical_tests import HistoricalTests class TestDiGraphHistorical(HistoricalTests): - @classmethod def setup_class(cls): HistoricalTests.setup_class() @@ -15,23 +14,34 @@ class TestDiGraphHistorical(HistoricalTests): def test_in_degree(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) assert sorted(d for n, d in G.in_degree()) == [0, 0, 0, 0, 1, 2, 2] - assert (dict(G.in_degree()) == - {'A': 0, 'C': 2, 'B': 1, 'D': 2, 'G': 0, 'K': 0, 'J': 0}) + assert dict(G.in_degree()) == { + "A": 0, + "C": 2, + "B": 1, + "D": 2, + "G": 0, + "K": 0, + "J": 0, + } def test_out_degree(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) - assert (sorted([v for k, v in G.in_degree()]) == - [0, 0, 0, 0, 1, 2, 2]) - assert (dict(G.out_degree()) == - {'A': 2, 'C': 1, 'B': 2, 'D': 0, 'G': 0, 'K': 0, 'J': 0}) + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted([v for k, v in G.in_degree()]) == [0, 0, 0, 0, 1, 2, 2] + assert dict(G.out_degree()) == { + "A": 2, + "C": 1, + "B": 2, + "D": 0, + "G": 0, + "K": 0, + "J": 0, + } def test_degree_digraph(self): H = nx.DiGraph() @@ -42,44 +52,41 @@ class TestDiGraphHistorical(HistoricalTests): def test_neighbors(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) - assert sorted(G.neighbors('C')) == ['D'] - assert sorted(G['C']) == ['D'] - assert sorted(G.neighbors('A')) == ['B', 'C'] - pytest.raises(nx.NetworkXError, G.neighbors, 'j') - pytest.raises(nx.NetworkXError, G.neighbors, 'j') + assert sorted(G.neighbors("C")) == ["D"] + assert sorted(G["C"]) == ["D"] + assert sorted(G.neighbors("A")) == ["B", "C"] + pytest.raises(nx.NetworkXError, G.neighbors, "j") + pytest.raises(nx.NetworkXError, G.neighbors, "j") def test_successors(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) - assert sorted(G.successors('A')) == ['B', 'C'] - assert sorted(G.successors('A')) == ['B', 'C'] - assert sorted(G.successors('G')) == [] - assert sorted(G.successors('D')) == [] - assert sorted(G.successors('G')) == [] - pytest.raises(nx.NetworkXError, G.successors, 'j') - pytest.raises(nx.NetworkXError, G.successors, 'j') + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(G.successors("A")) == ["B", "C"] + assert sorted(G.successors("A")) == ["B", "C"] + assert sorted(G.successors("G")) == [] + assert sorted(G.successors("D")) == [] + assert sorted(G.successors("G")) == [] + pytest.raises(nx.NetworkXError, G.successors, "j") + pytest.raises(nx.NetworkXError, G.successors, "j") def test_predecessors(self): G = self.G() - G.add_nodes_from('GJK') - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), - ('B', 'C'), ('C', 'D')]) - assert sorted(G.predecessors('C')) == ['A', 'B'] - assert sorted(G.predecessors('C')) == ['A', 'B'] - assert sorted(G.predecessors('G')) == [] - assert sorted(G.predecessors('A')) == [] - assert sorted(G.predecessors('G')) == [] - assert sorted(G.predecessors('A')) == [] - assert sorted(G.successors('D')) == [] - - pytest.raises(nx.NetworkXError, G.predecessors, 'j') - pytest.raises(nx.NetworkXError, G.predecessors, 'j') + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(G.predecessors("C")) == ["A", "B"] + assert sorted(G.predecessors("C")) == ["A", "B"] + assert sorted(G.predecessors("G")) == [] + assert sorted(G.predecessors("A")) == [] + assert sorted(G.predecessors("G")) == [] + assert sorted(G.predecessors("A")) == [] + assert sorted(G.successors("D")) == [] + + pytest.raises(nx.NetworkXError, G.predecessors, "j") + pytest.raises(nx.NetworkXError, G.predecessors, "j") def test_reverse(self): G = nx.complete_graph(10) diff --git a/networkx/classes/tests/test_filters.py b/networkx/classes/tests/test_filters.py index 104d9779..b8fe40b6 100644 --- a/networkx/classes/tests/test_filters.py +++ b/networkx/classes/tests/test_filters.py @@ -16,7 +16,7 @@ class TestFilterFactory: assert not f(3) assert f(4) assert f(0) - assert f('a') + assert f("a") pytest.raises(TypeError, f, 1, 2) pytest.raises(TypeError, f) @@ -27,7 +27,7 @@ class TestFilterFactory: assert f(3) assert not f(4) assert not f(0) - assert not f('a') + assert not f("a") pytest.raises(TypeError, f, 1, 2) pytest.raises(TypeError, f) @@ -39,7 +39,7 @@ class TestFilterFactory: assert not f(4, 3) assert f(2, 3) assert f(0, -1) - assert f('a', 'b') + assert f("a", "b") pytest.raises(TypeError, f, 1, 2, 3) pytest.raises(TypeError, f, 1) pytest.raises(TypeError, f) @@ -54,7 +54,7 @@ class TestFilterFactory: assert f(4, 3) assert not f(2, 3) assert not f(0, -1) - assert not f('a', 'b') + assert not f("a", "b") pytest.raises(TypeError, f, 1, 2, 3) pytest.raises(TypeError, f, 1) pytest.raises(TypeError, f) @@ -69,7 +69,7 @@ class TestFilterFactory: assert f(4, 3) assert f(2, 3) assert f(0, -1) - assert f('a', 'b') + assert f("a", "b") pytest.raises(TypeError, f, 1, 2, 3) pytest.raises(TypeError, f, 1) pytest.raises(TypeError, f) @@ -84,7 +84,7 @@ class TestFilterFactory: assert not f(4, 3) assert not f(2, 3) assert not f(0, -1) - assert not f('a', 'b') + assert not f("a", "b") pytest.raises(TypeError, f, 1, 2, 3) pytest.raises(TypeError, f, 1) pytest.raises(TypeError, f) @@ -103,7 +103,7 @@ class TestFilterFactory: assert f(4, 3, 0) assert f(2, 3, 0) assert f(0, -1, 0) - assert f('a', 'b', 0) + assert f("a", "b", 0) pytest.raises(TypeError, f, 1, 2, 3, 4) pytest.raises(TypeError, f, 1, 2) pytest.raises(TypeError, f, 1) @@ -124,7 +124,7 @@ class TestFilterFactory: assert not f(4, 3, 0) assert not f(2, 3, 0) assert not f(0, -1, 0) - assert not f('a', 'b', 0) + assert not f("a", "b", 0) pytest.raises(TypeError, f, 1, 2, 3, 4) pytest.raises(TypeError, f, 1, 2) pytest.raises(TypeError, f, 1) @@ -145,7 +145,7 @@ class TestFilterFactory: assert f(4, 3, 0) assert f(2, 3, 0) assert f(0, -1, 0) - assert f('a', 'b', 0) + assert f("a", "b", 0) pytest.raises(TypeError, f, 1, 2, 3, 4) pytest.raises(TypeError, f, 1, 2) pytest.raises(TypeError, f, 1) @@ -166,7 +166,7 @@ class TestFilterFactory: assert not f(4, 3, 0) assert not f(2, 3, 0) assert not f(0, -1, 0) - assert not f('a', 'b', 0) + assert not f("a", "b", 0) pytest.raises(TypeError, f, 1, 2, 3, 4) pytest.raises(TypeError, f, 1, 2) pytest.raises(TypeError, f, 1) diff --git a/networkx/classes/tests/test_function.py b/networkx/classes/tests/test_function.py index 5310ef11..ed87e4d7 100644 --- a/networkx/classes/tests/test_function.py +++ b/networkx/classes/tests/test_function.py @@ -6,7 +6,7 @@ from networkx.testing.utils import assert_edges_equal, assert_nodes_equal class TestFunction: def setup_method(self): - self.G = nx.Graph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}, name='Test') + self.G = nx.Graph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}, name="Test") self.Gdegree = {0: 3, 1: 2, 2: 2, 3: 1, 4: 0} self.Gnodes = list(range(5)) self.Gedges = [(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)] @@ -23,22 +23,28 @@ class TestFunction: def test_edges(self): assert_edges_equal(self.G.edges(), list(nx.edges(self.G))) assert sorted(self.DG.edges()) == sorted(nx.edges(self.DG)) - assert_edges_equal(self.G.edges(nbunch=[0, 1, 3]), - list(nx.edges(self.G, nbunch=[0, 1, 3]))) - assert (sorted(self.DG.edges(nbunch=[0, 1, 3])) == - sorted(nx.edges(self.DG, nbunch=[0, 1, 3]))) + assert_edges_equal( + self.G.edges(nbunch=[0, 1, 3]), list(nx.edges(self.G, nbunch=[0, 1, 3])) + ) + assert sorted(self.DG.edges(nbunch=[0, 1, 3])) == sorted( + nx.edges(self.DG, nbunch=[0, 1, 3]) + ) def test_degree(self): assert_edges_equal(self.G.degree(), list(nx.degree(self.G))) assert sorted(self.DG.degree()) == sorted(nx.degree(self.DG)) - assert_edges_equal(self.G.degree(nbunch=[0, 1]), - list(nx.degree(self.G, nbunch=[0, 1]))) - assert (sorted(self.DG.degree(nbunch=[0, 1])) == - sorted(nx.degree(self.DG, nbunch=[0, 1]))) - assert_edges_equal(self.G.degree(weight='weight'), - list(nx.degree(self.G, weight='weight'))) - assert (sorted(self.DG.degree(weight='weight')) == - sorted(nx.degree(self.DG, weight='weight'))) + assert_edges_equal( + self.G.degree(nbunch=[0, 1]), list(nx.degree(self.G, nbunch=[0, 1])) + ) + assert sorted(self.DG.degree(nbunch=[0, 1])) == sorted( + nx.degree(self.DG, nbunch=[0, 1]) + ) + assert_edges_equal( + self.G.degree(weight="weight"), list(nx.degree(self.G, weight="weight")) + ) + assert sorted(self.DG.degree(weight="weight")) == sorted( + nx.degree(self.DG, weight="weight") + ) def test_neighbors(self): assert list(self.G.neighbors(1)) == list(nx.neighbors(self.G, 1)) @@ -64,10 +70,14 @@ class TestFunction: G = self.G.copy() nx.add_star(G, nlist, weight=2.0) - assert_edges_equal(G.edges(nlist, data=True), - [(12, 13, {'weight': 2.}), - (12, 14, {'weight': 2.}), - (12, 15, {'weight': 2.})]) + assert_edges_equal( + G.edges(nlist, data=True), + [ + (12, 13, {"weight": 2.0}), + (12, 14, {"weight": 2.0}), + (12, 15, {"weight": 2.0}), + ], + ) G = self.G.copy() nlist = [12] @@ -87,10 +97,14 @@ class TestFunction: assert_edges_equal(G.edges(nlist), [(12, 13), (13, 14), (14, 15)]) G = self.G.copy() nx.add_path(G, nlist, weight=2.0) - assert_edges_equal(G.edges(nlist, data=True), - [(12, 13, {'weight': 2.}), - (13, 14, {'weight': 2.}), - (14, 15, {'weight': 2.})]) + assert_edges_equal( + G.edges(nlist, data=True), + [ + (12, 13, {"weight": 2.0}), + (13, 14, {"weight": 2.0}), + (14, 15, {"weight": 2.0}), + ], + ) G = self.G.copy() nlist = [None] @@ -131,19 +145,27 @@ class TestFunction: def test_add_cycle(self): G = self.G.copy() nlist = [12, 13, 14, 15] - oklists = [[(12, 13), (12, 15), (13, 14), (14, 15)], - [(12, 13), (13, 14), (14, 15), (15, 12)]] + oklists = [ + [(12, 13), (12, 15), (13, 14), (14, 15)], + [(12, 13), (13, 14), (14, 15), (15, 12)], + ] nx.add_cycle(G, nlist) assert sorted(G.edges(nlist)) in oklists G = self.G.copy() - oklists = [[(12, 13, {'weight': 1.}), - (12, 15, {'weight': 1.}), - (13, 14, {'weight': 1.}), - (14, 15, {'weight': 1.})], - [(12, 13, {'weight': 1.}), - (13, 14, {'weight': 1.}), - (14, 15, {'weight': 1.}), - (15, 12, {'weight': 1.})]] + oklists = [ + [ + (12, 13, {"weight": 1.0}), + (12, 15, {"weight": 1.0}), + (13, 14, {"weight": 1.0}), + (14, 15, {"weight": 1.0}), + ], + [ + (12, 13, {"weight": 1.0}), + (13, 14, {"weight": 1.0}), + (14, 15, {"weight": 1.0}), + (15, 12, {"weight": 1.0}), + ], + ] nx.add_cycle(G, nlist, weight=1.0) assert sorted(G.edges(nlist, data=True)) in oklists @@ -159,24 +181,34 @@ class TestFunction: assert_edges_equal(G.edges, self.G.edges) def test_subgraph(self): - assert (self.G.subgraph([0, 1, 2, 4]).adj == - nx.subgraph(self.G, [0, 1, 2, 4]).adj) - assert (self.DG.subgraph([0, 1, 2, 4]).adj == - nx.subgraph(self.DG, [0, 1, 2, 4]).adj) - assert (self.G.subgraph([0, 1, 2, 4]).adj == - nx.induced_subgraph(self.G, [0, 1, 2, 4]).adj) - assert (self.DG.subgraph([0, 1, 2, 4]).adj == - nx.induced_subgraph(self.DG, [0, 1, 2, 4]).adj) + assert ( + self.G.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.G, [0, 1, 2, 4]).adj + ) + assert ( + self.DG.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.DG, [0, 1, 2, 4]).adj + ) + assert ( + self.G.subgraph([0, 1, 2, 4]).adj + == nx.induced_subgraph(self.G, [0, 1, 2, 4]).adj + ) + assert ( + self.DG.subgraph([0, 1, 2, 4]).adj + == nx.induced_subgraph(self.DG, [0, 1, 2, 4]).adj + ) # subgraph-subgraph chain is allowed in function interface H = nx.induced_subgraph(self.G.subgraph([0, 1, 2, 4]), [0, 1, 4]) assert H._graph is not self.G assert H.adj == self.G.subgraph([0, 1, 4]).adj def test_edge_subgraph(self): - assert (self.G.edge_subgraph([(1, 2), (0, 3)]).adj == - nx.edge_subgraph(self.G, [(1, 2), (0, 3)]).adj) - assert (self.DG.edge_subgraph([(1, 2), (0, 3)]).adj == - nx.edge_subgraph(self.DG, [(1, 2), (0, 3)]).adj) + assert ( + self.G.edge_subgraph([(1, 2), (0, 3)]).adj + == nx.edge_subgraph(self.G, [(1, 2), (0, 3)]).adj + ) + assert ( + self.DG.edge_subgraph([(1, 2), (0, 3)]).adj + == nx.edge_subgraph(self.DG, [(1, 2), (0, 3)]).adj + ) def test_restricted_view(self): H = nx.restricted_view(self.G, [0, 2, 5], [(1, 2), (3, 4)]) @@ -235,41 +267,47 @@ class TestFunction: G = nx.path_graph(5) G.name = "path_graph(5)" info = nx.info(G) - expected_graph_info = '\n'.join(['Name: path_graph(5)', - 'Type: Graph', - 'Number of nodes: 5', - 'Number of edges: 4', - 'Average degree: 1.6000']) + expected_graph_info = "\n".join( + [ + "Name: path_graph(5)", + "Type: Graph", + "Number of nodes: 5", + "Number of edges: 4", + "Average degree: 1.6000", + ] + ) assert info == expected_graph_info info = nx.info(G, n=1) assert type(info) == str - expected_node_info = '\n'.join( - ['Node 1 has the following properties:', - 'Degree: 2', - 'Neighbors: 0 2']) + expected_node_info = "\n".join( + ["Node 1 has the following properties:", "Degree: 2", "Neighbors: 0 2"] + ) assert info == expected_node_info # must raise an error for a non-existent node pytest.raises(nx.NetworkXError, nx.info, G, 1248) def test_info_digraph(self): - G = nx.DiGraph(name='path_graph(5)') + G = nx.DiGraph(name="path_graph(5)") nx.add_path(G, [0, 1, 2, 3, 4]) info = nx.info(G) - expected_graph_info = '\n'.join(['Name: path_graph(5)', - 'Type: DiGraph', - 'Number of nodes: 5', - 'Number of edges: 4', - 'Average in degree: 0.8000', - 'Average out degree: 0.8000']) + expected_graph_info = "\n".join( + [ + "Name: path_graph(5)", + "Type: DiGraph", + "Number of nodes: 5", + "Number of edges: 4", + "Average in degree: 0.8000", + "Average out degree: 0.8000", + ] + ) assert info == expected_graph_info info = nx.info(G, n=1) - expected_node_info = '\n'.join( - ['Node 1 has the following properties:', - 'Degree: 2', - 'Neighbors: 2']) + expected_node_info = "\n".join( + ["Node 1 has the following properties:", "Degree: 2", "Neighbors: 2"] + ) assert info == expected_node_info pytest.raises(nx.NetworkXError, nx.info, G, n=-1) @@ -362,15 +400,22 @@ class TestFunction: assert nx.is_weighted(G, (3, 4)) G = nx.DiGraph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), - ('1', '0', -5), ('0', '2', 2), - ('1', '2', 4), ('2', '3', 1)]) + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) assert nx.is_weighted(G) - assert nx.is_weighted(G, ('1', '0')) + assert nx.is_weighted(G, ("1", "0")) G = G.to_undirected() assert nx.is_weighted(G) - assert nx.is_weighted(G, ('1', '0')) + assert nx.is_weighted(G, ("1", "0")) pytest.raises(nx.NetworkXError, nx.is_weighted, G, (1, 2)) @@ -386,26 +431,33 @@ class TestFunction: assert not nx.is_negatively_weighted(G, (1, 2)) G.add_edges_from([(1, 3), (2, 4), (2, 6)]) - G[1][3]['color'] = 'blue' + G[1][3]["color"] = "blue" assert not nx.is_negatively_weighted(G) assert not nx.is_negatively_weighted(G, (1, 3)) - G[2][4]['weight'] = -2 + G[2][4]["weight"] = -2 assert nx.is_negatively_weighted(G, (2, 4)) assert nx.is_negatively_weighted(G) G = nx.DiGraph() - G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), - ('1', '0', -2), ('0', '2', 2), - ('1', '2', -3), ('2', '3', 1)]) + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -2), + ("0", "2", 2), + ("1", "2", -3), + ("2", "3", 1), + ] + ) assert nx.is_negatively_weighted(G) - assert not nx.is_negatively_weighted(G, ('0', '3')) - assert nx.is_negatively_weighted(G, ('1', '0')) + assert not nx.is_negatively_weighted(G, ("0", "3")) + assert nx.is_negatively_weighted(G, ("1", "0")) pytest.raises(nx.NetworkXError, nx.is_negatively_weighted, G, (1, 4)) -class TestCommonNeighbors(): +class TestCommonNeighbors: @classmethod def setup_class(cls): cls.func = staticmethod(nx.common_neighbors) @@ -413,6 +465,7 @@ class TestCommonNeighbors(): def test_func(G, u, v, expected): result = sorted(cls.func(G, u, v)) assert result == expected + cls.test = staticmethod(test_func) def test_K5(self): @@ -457,7 +510,7 @@ def test_set_node_attributes(): # Test single value G = nx.path_graph(3, create_using=G) vals = 100 - attr = 'hello' + attr = "hello" nx.set_node_attributes(G, vals, attr) assert G.nodes[0][attr] == vals assert G.nodes[1][attr] == vals @@ -466,7 +519,7 @@ def test_set_node_attributes(): # Test dictionary G = nx.path_graph(3, create_using=G) vals = dict(zip(sorted(G.nodes()), range(len(G)))) - attr = 'hi' + attr = "hi" nx.set_node_attributes(G, vals, attr) assert G.nodes[0][attr] == 0 assert G.nodes[1][attr] == 1 @@ -474,7 +527,7 @@ def test_set_node_attributes(): # Test dictionary of dictionaries G = nx.path_graph(3, create_using=G) - d = {'hi': 0, 'hello': 200} + d = {"hi": 0, "hello": 200} vals = dict.fromkeys(G.nodes(), d) vals.pop(0) nx.set_node_attributes(G, vals) @@ -488,7 +541,7 @@ def test_set_edge_attributes(): for G in graphs: # Test single value G = nx.path_graph(3, create_using=G) - attr = 'hello' + attr = "hello" vals = 3 nx.set_edge_attributes(G, vals, attr) assert G[0][1][attr] == vals @@ -496,7 +549,7 @@ def test_set_edge_attributes(): # Test multiple values G = nx.path_graph(3, create_using=G) - attr = 'hi' + attr = "hi" edges = [(0, 1), (1, 2)] vals = dict(zip(edges, range(len(edges)))) nx.set_edge_attributes(G, vals, attr) @@ -505,12 +558,12 @@ def test_set_edge_attributes(): # Test dictionary of dictionaries G = nx.path_graph(3, create_using=G) - d = {'hi': 0, 'hello': 200} + d = {"hi": 0, "hello": 200} edges = [(0, 1)] vals = dict.fromkeys(edges, d) nx.set_edge_attributes(G, vals) - assert G[0][1]['hi'] == 0 - assert G[0][1]['hello'] == 200 + assert G[0][1]["hi"] == 0 + assert G[0][1]["hello"] == 200 assert G[1][2] == {} @@ -519,7 +572,7 @@ def test_set_edge_attributes_multi(): for G in graphs: # Test single value G = nx.path_graph(3, create_using=G) - attr = 'hello' + attr = "hello" vals = 3 nx.set_edge_attributes(G, vals, attr) assert G[0][1][0][attr] == vals @@ -527,7 +580,7 @@ def test_set_edge_attributes_multi(): # Test multiple values G = nx.path_graph(3, create_using=G) - attr = 'hi' + attr = "hi" edges = [(0, 1, 0), (1, 2, 0)] vals = dict(zip(edges, range(len(edges)))) nx.set_edge_attributes(G, vals, attr) @@ -536,12 +589,12 @@ def test_set_edge_attributes_multi(): # Test dictionary of dictionaries G = nx.path_graph(3, create_using=G) - d = {'hi': 0, 'hello': 200} + d = {"hi": 0, "hello": 200} edges = [(0, 1, 0)] vals = dict.fromkeys(edges, d) nx.set_edge_attributes(G, vals) - assert G[0][1][0]['hi'] == 0 - assert G[0][1][0]['hello'] == 200 + assert G[0][1][0]["hi"] == 0 + assert G[0][1][0]["hello"] == 200 assert G[1][2][0] == {} @@ -549,7 +602,7 @@ def test_get_node_attributes(): graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] for G in graphs: G = nx.path_graph(3, create_using=G) - attr = 'hello' + attr = "hello" vals = 100 nx.set_node_attributes(G, vals, attr) attrs = nx.get_node_attributes(G, attr) @@ -562,7 +615,7 @@ def test_get_edge_attributes(): graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] for G in graphs: G = nx.path_graph(3, create_using=G) - attr = 'hello' + attr = "hello" vals = 100 nx.set_edge_attributes(G, vals, attr) attrs = nx.get_edge_attributes(G, attr) @@ -605,7 +658,9 @@ def test_selfloops(): assert nx.number_of_selfloops(G) == 1 # test selfloop attr G.add_edge(1, 1, weight=2) - assert_edges_equal(nx.selfloop_edges(G, data=True), - [(0, 0, {}), (1, 1, {'weight': 2})]) - assert_edges_equal(nx.selfloop_edges(G, data='weight'), - [(0, 0, None), (1, 1, 2)]) + assert_edges_equal( + nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] + ) + assert_edges_equal( + nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)] + ) diff --git a/networkx/classes/tests/test_graph.py b/networkx/classes/tests/test_graph.py index bf5c87f9..03902b09 100644 --- a/networkx/classes/tests/test_graph.py +++ b/networkx/classes/tests/test_graph.py @@ -5,7 +5,7 @@ import networkx as nx from networkx.testing.utils import ( assert_graphs_equal, assert_edges_equal, - assert_nodes_equal + assert_nodes_equal, ) import pytest @@ -16,11 +16,11 @@ class BaseGraphTester: def test_contains(self): G = self.K3 - assert(1 in G) - assert(4 not in G) - assert('b' not in G) - assert([] not in G) # no exception for nonhashable - assert({1: 1} not in G) # no exception for nonhashable + assert 1 in G + assert 4 not in G + assert "b" not in G + assert [] not in G # no exception for nonhashable + assert {1: 1} not in G # no exception for nonhashable def test_order(self): G = self.K3 @@ -35,10 +35,10 @@ class BaseGraphTester: def test_has_node(self): G = self.K3 - assert(G.has_node(1)) - assert(not G.has_node(4)) - assert(not G.has_node([])) # no exception for nonhashable - assert(not G.has_node({1: 1})) # no exception for nonhashable + assert G.has_node(1) + assert not G.has_node(4) + assert not G.has_node([]) # no exception for nonhashable + assert not G.has_node({1: 1}) # no exception for nonhashable def test_has_edge(self): G = self.K3 @@ -124,7 +124,7 @@ class BaseGraphTester: # For more information, see pull request #1813. G = self.Graph() - nbunch = [('x', set())] + nbunch = [("x", set())] with pytest.raises(nx.NetworkXError): list(G.nbunch_iter(nbunch)) @@ -135,7 +135,7 @@ class BaseGraphTester: assert dict(G.degree()) == {1: 2} assert G.degree(1) == 2 assert sorted(G.degree([1])) == [(1, 2)] - assert G.degree(1, weight='weight') == 2 + assert G.degree(1, weight="weight") == 2 def test_selfloops(self): G = self.K3.copy() @@ -160,29 +160,28 @@ class BaseAttrGraphTester(BaseGraphTester): G = self.Graph() G.add_edge(1, 2, weight=2, other=3) G.add_edge(2, 3, weight=3, other=4) - assert (sorted(d for n, d in G.degree(weight='weight')) == - [2, 3, 5]) - assert dict(G.degree(weight='weight')) == {1: 2, 2: 5, 3: 3} - assert G.degree(1, weight='weight') == 2 - assert_nodes_equal((G.degree([1], weight='weight')), [(1, 2)]) + assert sorted(d for n, d in G.degree(weight="weight")) == [2, 3, 5] + assert dict(G.degree(weight="weight")) == {1: 2, 2: 5, 3: 3} + assert G.degree(1, weight="weight") == 2 + assert_nodes_equal((G.degree([1], weight="weight")), [(1, 2)]) - assert_nodes_equal((d for n, d in G.degree(weight='other')), [3, 7, 4]) - assert dict(G.degree(weight='other')) == {1: 3, 2: 7, 3: 4} - assert G.degree(1, weight='other') == 3 - assert_edges_equal((G.degree([1], weight='other')), [(1, 3)]) + assert_nodes_equal((d for n, d in G.degree(weight="other")), [3, 7, 4]) + assert dict(G.degree(weight="other")) == {1: 3, 2: 7, 3: 4} + assert G.degree(1, weight="other") == 3 + assert_edges_equal((G.degree([1], weight="other")), [(1, 3)]) def add_attributes(self, G): - G.graph['foo'] = [] - G.nodes[0]['foo'] = [] + G.graph["foo"] = [] + G.nodes[0]["foo"] = [] G.remove_edge(1, 2) ll = [] G.add_edge(1, 2, foo=ll) G.add_edge(2, 1, foo=ll) def test_name(self): - G = self.Graph(name='') + G = self.Graph(name="") assert G.name == "" - G = self.Graph(name='test') + G = self.Graph(name="test") assert G.__str__() == "test" assert G.name == "test" @@ -244,19 +243,19 @@ class BaseAttrGraphTester(BaseGraphTester): self.deepcopy_edge_attr(H, G) def deepcopy_graph_attr(self, H, G): - assert G.graph['foo'] == H.graph['foo'] - G.graph['foo'].append(1) - assert G.graph['foo'] != H.graph['foo'] + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] != H.graph["foo"] def deepcopy_node_attr(self, H, G): - assert G.nodes[0]['foo'] == H.nodes[0]['foo'] - G.nodes[0]['foo'].append(1) - assert G.nodes[0]['foo'] != H.nodes[0]['foo'] + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] != H.nodes[0]["foo"] def deepcopy_edge_attr(self, H, G): - assert G[1][2]['foo'] == H[1][2]['foo'] - G[1][2]['foo'].append(1) - assert G[1][2]['foo'] != H[1][2]['foo'] + assert G[1][2]["foo"] == H[1][2]["foo"] + G[1][2]["foo"].append(1) + assert G[1][2]["foo"] != H[1][2]["foo"] def is_shallow_copy(self, H, G): self.graphs_equal(H, G) @@ -268,44 +267,44 @@ class BaseAttrGraphTester(BaseGraphTester): self.shallow_copy_edge_attr(H, G) def shallow_copy_graph_attr(self, H, G): - assert G.graph['foo'] == H.graph['foo'] - G.graph['foo'].append(1) - assert G.graph['foo'] == H.graph['foo'] + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] == H.graph["foo"] def shallow_copy_node_attr(self, H, G): - assert G.nodes[0]['foo'] == H.nodes[0]['foo'] - G.nodes[0]['foo'].append(1) - assert G.nodes[0]['foo'] == H.nodes[0]['foo'] + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] def shallow_copy_edge_attr(self, H, G): - assert G[1][2]['foo'] == H[1][2]['foo'] - G[1][2]['foo'].append(1) - assert G[1][2]['foo'] == H[1][2]['foo'] + assert G[1][2]["foo"] == H[1][2]["foo"] + G[1][2]["foo"].append(1) + assert G[1][2]["foo"] == H[1][2]["foo"] def same_attrdict(self, H, G): - old_foo = H[1][2]['foo'] - H.adj[1][2]['foo'] = 'baz' + old_foo = H[1][2]["foo"] + H.adj[1][2]["foo"] = "baz" assert G.edges == H.edges - H.adj[1][2]['foo'] = old_foo + H.adj[1][2]["foo"] = old_foo assert G.edges == H.edges - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" assert G.nodes == H.nodes - H.nodes[0]['foo'] = old_foo + H.nodes[0]["foo"] = old_foo assert G.nodes == H.nodes def different_attrdict(self, H, G): - old_foo = H[1][2]['foo'] - H.adj[1][2]['foo'] = 'baz' + old_foo = H[1][2]["foo"] + H.adj[1][2]["foo"] = "baz" assert G._adj != H._adj - H.adj[1][2]['foo'] = old_foo + H.adj[1][2]["foo"] = old_foo assert G._adj == H._adj - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" assert G._node != H._node - H.nodes[0]['foo'] = old_foo + H.nodes[0]["foo"] = old_foo assert G._node == H._node def graphs_equal(self, H, G): @@ -330,86 +329,98 @@ class BaseAttrGraphTester(BaseGraphTester): def test_graph_attr(self): G = self.K3.copy() - G.graph['foo'] = 'bar' - assert G.graph['foo'] == 'bar' - del G.graph['foo'] + G.graph["foo"] = "bar" + assert G.graph["foo"] == "bar" + del G.graph["foo"] assert G.graph == {} - H = self.Graph(foo='bar') - assert H.graph['foo'] == 'bar' + H = self.Graph(foo="bar") + assert H.graph["foo"] == "bar" def test_node_attr(self): G = self.K3.copy() - G.add_node(1, foo='bar') + G.add_node(1, foo="bar") assert_nodes_equal(G.nodes(), [0, 1, 2]) - assert_nodes_equal(G.nodes(data=True), - [(0, {}), (1, {'foo': 'bar'}), (2, {})]) - G.nodes[1]['foo'] = 'baz' - assert_nodes_equal(G.nodes(data=True), - [(0, {}), (1, {'foo': 'baz'}), (2, {})]) - assert_nodes_equal(G.nodes(data='foo'), - [(0, None), (1, 'baz'), (2, None)]) - assert_nodes_equal(G.nodes(data='foo', default='bar'), - [(0, 'bar'), (1, 'baz'), (2, 'bar')]) + assert_nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "bar"}), (2, {})]) + G.nodes[1]["foo"] = "baz" + assert_nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "baz"}), (2, {})]) + assert_nodes_equal(G.nodes(data="foo"), [(0, None), (1, "baz"), (2, None)]) + assert_nodes_equal( + G.nodes(data="foo", default="bar"), [(0, "bar"), (1, "baz"), (2, "bar")] + ) def test_node_attr2(self): G = self.K3.copy() - a = {'foo': 'bar'} + a = {"foo": "bar"} G.add_node(3, **a) assert_nodes_equal(G.nodes(), [0, 1, 2, 3]) - assert_nodes_equal(G.nodes(data=True), - [(0, {}), (1, {}), (2, {}), (3, {'foo': 'bar'})]) + assert_nodes_equal( + G.nodes(data=True), [(0, {}), (1, {}), (2, {}), (3, {"foo": "bar"})] + ) def test_edge_lookup(self): G = self.Graph() - G.add_edge(1, 2, foo='bar') - assert_edges_equal(G.edges[1, 2], {'foo': 'bar'}) + G.add_edge(1, 2, foo="bar") + assert_edges_equal(G.edges[1, 2], {"foo": "bar"}) def test_edge_attr(self): G = self.Graph() - G.add_edge(1, 2, foo='bar') - assert_edges_equal(G.edges(data=True), [(1, 2, {'foo': 'bar'})]) - assert_edges_equal(G.edges(data='foo'), [(1, 2, 'bar')]) + G.add_edge(1, 2, foo="bar") + assert_edges_equal(G.edges(data=True), [(1, 2, {"foo": "bar"})]) + assert_edges_equal(G.edges(data="foo"), [(1, 2, "bar")]) def test_edge_attr2(self): G = self.Graph() - G.add_edges_from([(1, 2), (3, 4)], foo='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'foo': 'foo'}), (3, 4, {'foo': 'foo'})]) - assert_edges_equal(G.edges(data='foo'), - [(1, 2, 'foo'), (3, 4, 'foo')]) + G.add_edges_from([(1, 2), (3, 4)], foo="foo") + assert_edges_equal( + G.edges(data=True), [(1, 2, {"foo": "foo"}), (3, 4, {"foo": "foo"})] + ) + assert_edges_equal(G.edges(data="foo"), [(1, 2, "foo"), (3, 4, "foo")]) def test_edge_attr3(self): G = self.Graph() - G.add_edges_from([(1, 2, {'weight': 32}), - (3, 4, {'weight': 64})], foo='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'foo': 'foo', 'weight': 32}), - (3, 4, {'foo': 'foo', 'weight': 64})]) + G.add_edges_from([(1, 2, {"weight": 32}), (3, 4, {"weight": 64})], foo="foo") + assert_edges_equal( + G.edges(data=True), + [ + (1, 2, {"foo": "foo", "weight": 32}), + (3, 4, {"foo": "foo", "weight": 64}), + ], + ) G.remove_edges_from([(1, 2), (3, 4)]) - G.add_edge(1, 2, data=7, spam='bar', bar='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 7, 'spam': 'bar', 'bar': 'foo'})]) + G.add_edge(1, 2, data=7, spam="bar", bar="foo") + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) def test_edge_attr4(self): G = self.Graph() - G.add_edge(1, 2, data=7, spam='bar', bar='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 7, 'spam': 'bar', 'bar': 'foo'})]) - G[1][2]['data'] = 10 # OK to set data like this - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 10, 'spam': 'bar', 'bar': 'foo'})]) - - G.adj[1][2]['data'] = 20 - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 20, 'spam': 'bar', 'bar': 'foo'})]) - G.edges[1, 2]['data'] = 21 # another spelling, "edge" - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 21, 'spam': 'bar', 'bar': 'foo'})]) - G.adj[1][2]['listdata'] = [20, 200] - G.adj[1][2]['weight'] = 20 - dd = {'data': 21, 'spam': 'bar', 'bar': 'foo', - 'listdata': [20, 200], 'weight': 20} + G.add_edge(1, 2, data=7, spam="bar", bar="foo") + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) + G[1][2]["data"] = 10 # OK to set data like this + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 10, "spam": "bar", "bar": "foo"})] + ) + + G.adj[1][2]["data"] = 20 + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 20, "spam": "bar", "bar": "foo"})] + ) + G.edges[1, 2]["data"] = 21 # another spelling, "edge" + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 21, "spam": "bar", "bar": "foo"})] + ) + G.adj[1][2]["listdata"] = [20, 200] + G.adj[1][2]["weight"] = 20 + dd = { + "data": 21, + "spam": "bar", + "bar": "foo", + "listdata": [20, 200], + "weight": 20, + } assert_edges_equal(G.edges(data=True), [(1, 2, dd)]) def test_to_undirected(self): @@ -448,10 +459,12 @@ class BaseAttrGraphTester(BaseGraphTester): G = self.K3.copy() G.add_edge(0, 0) G.add_edge(1, 1, weight=2) - assert_edges_equal(nx.selfloop_edges(G, data=True), - [(0, 0, {}), (1, 1, {'weight': 2})]) - assert_edges_equal(nx.selfloop_edges(G, data='weight'), - [(0, 0, None), (1, 1, 2)]) + assert_edges_equal( + nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] + ) + assert_edges_equal( + nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)] + ) class TestGraph(BaseAttrGraphTester): @@ -461,9 +474,7 @@ class TestGraph(BaseAttrGraphTester): self.Graph = nx.Graph # build dict-of-dict-of-dict K3 ed1, ed2, ed3 = ({}, {}, {}) - self.k3adj = {0: {1: ed1, 2: ed2}, - 1: {0: ed1, 2: ed3}, - 2: {0: ed2, 1: ed3}} + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} self.k3edges = [(0, 1), (0, 2), (1, 2)] self.k3nodes = [0, 1, 2] self.K3 = self.Graph() @@ -490,63 +501,66 @@ class TestGraph(BaseAttrGraphTester): def test_adjacency(self): G = self.K3 - assert (dict(G.adjacency()) == - {0: {1: {}, 2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}}) + assert dict(G.adjacency()) == { + 0: {1: {}, 2: {}}, + 1: {0: {}, 2: {}}, + 2: {0: {}, 1: {}}, + } def test_getitem(self): G = self.K3 assert G[0] == {1: {}, 2: {}} with pytest.raises(KeyError): - G.__getitem__('j') + G.__getitem__("j") with pytest.raises(TypeError): - G.__getitem__(['A']) + G.__getitem__(["A"]) def test_add_node(self): G = self.Graph() G.add_node(0) assert G.adj == {0: {}} # test add attributes - G.add_node(1, c='red') - G.add_node(2, c='blue') - G.add_node(3, c='red') - assert G.nodes[1]['c'] == 'red' - assert G.nodes[2]['c'] == 'blue' - assert G.nodes[3]['c'] == 'red' + G.add_node(1, c="red") + G.add_node(2, c="blue") + G.add_node(3, c="red") + assert G.nodes[1]["c"] == "red" + assert G.nodes[2]["c"] == "blue" + assert G.nodes[3]["c"] == "red" # test updating attributes - G.add_node(1, c='blue') - G.add_node(2, c='red') - G.add_node(3, c='blue') - assert G.nodes[1]['c'] == 'blue' - assert G.nodes[2]['c'] == 'red' - assert G.nodes[3]['c'] == 'blue' + G.add_node(1, c="blue") + G.add_node(2, c="red") + G.add_node(3, c="blue") + assert G.nodes[1]["c"] == "blue" + assert G.nodes[2]["c"] == "red" + assert G.nodes[3]["c"] == "blue" def test_add_nodes_from(self): G = self.Graph() G.add_nodes_from([0, 1, 2]) assert G.adj == {0: {}, 1: {}, 2: {}} # test add attributes - G.add_nodes_from([0, 1, 2], c='red') - assert G.nodes[0]['c'] == 'red' - assert G.nodes[2]['c'] == 'red' + G.add_nodes_from([0, 1, 2], c="red") + assert G.nodes[0]["c"] == "red" + assert G.nodes[2]["c"] == "red" # test that attribute dicts are not the same - assert(G.nodes[0] is not G.nodes[1]) + assert G.nodes[0] is not G.nodes[1] # test updating attributes - G.add_nodes_from([0, 1, 2], c='blue') - assert G.nodes[0]['c'] == 'blue' - assert G.nodes[2]['c'] == 'blue' - assert(G.nodes[0] is not G.nodes[1]) + G.add_nodes_from([0, 1, 2], c="blue") + assert G.nodes[0]["c"] == "blue" + assert G.nodes[2]["c"] == "blue" + assert G.nodes[0] is not G.nodes[1] # test tuple input H = self.Graph() H.add_nodes_from(G.nodes(data=True)) - assert H.nodes[0]['c'] == 'blue' - assert H.nodes[2]['c'] == 'blue' - assert(H.nodes[0] is not H.nodes[1]) + assert H.nodes[0]["c"] == "blue" + assert H.nodes[2]["c"] == "blue" + assert H.nodes[0] is not H.nodes[1] # specific overrides general - H.add_nodes_from([0, (1, {'c': 'green'}), (3, {'c': 'cyan'})], c='red') - assert H.nodes[0]['c'] == 'red' - assert H.nodes[1]['c'] == 'green' - assert H.nodes[2]['c'] == 'blue' - assert H.nodes[3]['c'] == 'cyan' + H.add_nodes_from([0, (1, {"c": "green"}), (3, {"c": "cyan"})], c="red") + assert H.nodes[0]["c"] == "red" + assert H.nodes[1]["c"] == "green" + assert H.nodes[2]["c"] == "blue" + assert H.nodes[3]["c"] == "cyan" def test_remove_node(self): G = self.K3.copy() @@ -556,6 +570,7 @@ class TestGraph(BaseAttrGraphTester): G.remove_node(-1) # generator here to implement list,set,string... + def test_remove_nodes_from(self): G = self.K3.copy() G.remove_nodes_from([0, 1]) @@ -572,16 +587,18 @@ class TestGraph(BaseAttrGraphTester): def test_add_edges_from(self): G = self.Graph() - G.add_edges_from([(0, 1), (0, 2, {'weight': 3})]) - assert G.adj == {0: {1: {}, 2: {'weight': 3}}, 1: {0: {}}, - 2: {0: {'weight': 3}}} + G.add_edges_from([(0, 1), (0, 2, {"weight": 3})]) + assert G.adj == { + 0: {1: {}, 2: {"weight": 3}}, + 1: {0: {}}, + 2: {0: {"weight": 3}}, + } G = self.Graph() - G.add_edges_from([(0, 1), (0, 2, {'weight': 3}), - (1, 2, {'data': 4})], data=2) + G.add_edges_from([(0, 1), (0, 2, {"weight": 3}), (1, 2, {"data": 4})], data=2) assert G.adj == { - 0: {1: {'data': 2}, 2: {'weight': 3, 'data': 2}}, - 1: {0: {'data': 2}, 2: {'data': 4}}, - 2: {0: {'weight': 3, 'data': 2}, 1: {'data': 4}} + 0: {1: {"data": 2}, 2: {"weight": 3, "data": 2}}, + 1: {0: {"data": 2}, 2: {"data": 4}}, + 2: {0: {"weight": 3, "data": 2}, 1: {"data": 4}}, } with pytest.raises(nx.NetworkXError): @@ -606,7 +623,7 @@ class TestGraph(BaseAttrGraphTester): def test_clear(self): G = self.K3.copy() - G.graph['name'] = 'K3' + G.graph["name"] = "K3" G.clear() assert list(G.nodes) == [] assert G.adj == {} @@ -614,13 +631,13 @@ class TestGraph(BaseAttrGraphTester): def test_clear_edges(self): G = self.K3.copy() - G.graph['name'] = 'K3' + G.graph["name"] = "K3" nodes = list(G.nodes) G.clear_edges() assert list(G.nodes) == nodes assert G.adj == {0: {}, 1: {}, 2: {}} assert list(G.edges) == [] - assert G.graph['name'] == 'K3' + assert G.graph["name"] == "K3" def test_edges_data(self): G = self.K3 @@ -642,31 +659,50 @@ class TestGraph(BaseAttrGraphTester): def test_update(self): # specify both edgees and nodes G = self.K3.copy() - G.update(nodes=[3, (4, {'size': 2})], - edges=[(4, 5), (6, 7, {'weight': 2})]) - nlist = [(0, {}), (1, {}), (2, {}), (3, {}), - (4, {'size': 2}), (5, {}), (6, {}), (7, {})] + G.update(nodes=[3, (4, {"size": 2})], edges=[(4, 5), (6, 7, {"weight": 2})]) + nlist = [ + (0, {}), + (1, {}), + (2, {}), + (3, {}), + (4, {"size": 2}), + (5, {}), + (6, {}), + (7, {}), + ] assert sorted(G.nodes.data()) == nlist if G.is_directed(): - elist = [(0, 1, {}), (0, 2, {}), (1, 0, {}), (1, 2, {}), - (2, 0, {}), (2, 1, {}), - (4, 5, {}), (6, 7, {'weight': 2})] + elist = [ + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + (4, 5, {}), + (6, 7, {"weight": 2}), + ] else: - elist = [(0, 1, {}), (0, 2, {}), (1, 2, {}), - (4, 5, {}), (6, 7, {'weight': 2})] + elist = [ + (0, 1, {}), + (0, 2, {}), + (1, 2, {}), + (4, 5, {}), + (6, 7, {"weight": 2}), + ] assert sorted(G.edges.data()) == elist assert G.graph == {} # no keywords -- order is edges, nodes G = self.K3.copy() - G.update([(4, 5), (6, 7, {'weight': 2})], [3, (4, {'size': 2})]) + G.update([(4, 5), (6, 7, {"weight": 2})], [3, (4, {"size": 2})]) assert sorted(G.nodes.data()) == nlist assert sorted(G.edges.data()) == elist assert G.graph == {} # update using only a graph G = self.Graph() - G.graph['foo'] = 'bar' + G.graph["foo"] = "bar" G.add_node(2, data=4) G.add_edge(0, 1, weight=0.5) GG = G.copy() @@ -701,10 +737,10 @@ class TestEdgeSubgraph: G = nx.path_graph(5) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = f'node{i}' - G.edges[0, 1]['name'] = 'edge01' - G.edges[3, 4]['name'] = 'edge34' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" # Get the subgraph induced by the first and last edges. self.G = G self.H = G.edge_subgraph([(0, 1), (3, 4)]) @@ -715,8 +751,7 @@ class TestEdgeSubgraph: def test_correct_edges(self): """Tests that the subgraph has the correct edges.""" - assert ([(0, 1, 'edge01'), (3, 4, 'edge34')] == - sorted(self.H.edges(data='name'))) + assert [(0, 1, "edge01"), (3, 4, "edge34")] == sorted(self.H.edges(data="name")) def test_add_node(self): """Tests that adding a node to the original graph does not @@ -742,9 +777,9 @@ class TestEdgeSubgraph: for v in self.H: assert self.G.nodes[v] == self.H.nodes[v] # Making a change to G should make a change in H and vice versa. - self.G.nodes[0]['name'] = 'foo' + self.G.nodes[0]["name"] = "foo" assert self.G.nodes[0] == self.H.nodes[0] - self.H.nodes[1]['name'] = 'bar' + self.H.nodes[1]["name"] = "bar" assert self.G.nodes[1] == self.H.nodes[1] def test_edge_attr_dict(self): @@ -755,12 +790,10 @@ class TestEdgeSubgraph: for u, v in self.H.edges(): assert self.G.edges[u, v] == self.H.edges[u, v] # Making a change to G should make a change in H and vice versa. - self.G.edges[0, 1]['name'] = 'foo' - assert (self.G.edges[0, 1]['name'] == - self.H.edges[0, 1]['name']) - self.H.edges[3, 4]['name'] = 'bar' - assert (self.G.edges[3, 4]['name'] == - self.H.edges[3, 4]['name']) + self.G.edges[0, 1]["name"] = "foo" + assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"] + self.H.edges[3, 4]["name"] = "bar" + assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"] def test_graph_attr_dict(self): """Tests that the graph attribute dictionary of the two graphs diff --git a/networkx/classes/tests/test_graph_historical.py b/networkx/classes/tests/test_graph_historical.py index d0c45fa5..7af081c4 100644 --- a/networkx/classes/tests/test_graph_historical.py +++ b/networkx/classes/tests/test_graph_historical.py @@ -6,7 +6,6 @@ from .historical_tests import HistoricalTests class TestGraphHistorical(HistoricalTests): - @classmethod def setup_class(cls): HistoricalTests.setup_class() diff --git a/networkx/classes/tests/test_graphviews.py b/networkx/classes/tests/test_graphviews.py index 376896d5..2fd8849f 100644 --- a/networkx/classes/tests/test_graphviews.py +++ b/networkx/classes/tests/test_graphviews.py @@ -13,6 +13,7 @@ class TestReverseView: def test_pickle(self): import pickle + rv = self.rv prv = pickle.loads(pickle.dumps(rv, -1)) assert rv._node == prv._node @@ -35,7 +36,6 @@ class TestReverseView: def test_subclass(self): class MyGraph(nx.DiGraph): - def my_method(self): return "me" @@ -61,6 +61,7 @@ class TestMultiReverseView: def test_pickle(self): import pickle + rv = self.rv prv = pickle.loads(pickle.dumps(rv, -1)) assert rv._node == prv._node @@ -104,6 +105,7 @@ class TestToDirected: def test_pickle(self): import pickle + dv = self.dv pdv = pickle.loads(pickle.dumps(dv, -1)) assert dv._node == pdv._node @@ -142,12 +144,13 @@ class TestToUndirected: def test_pickle(self): import pickle + uv = self.uv puv = pickle.loads(pickle.dumps(uv, -1)) assert uv._node == puv._node assert uv._adj == puv._adj assert uv.graph == puv.graph - assert hasattr(uv, '_graph') + assert hasattr(uv, "_graph") def test_contains(self): assert (2, 3) in self.DG.edges @@ -173,14 +176,24 @@ class TestChainsOfViews: cls.MDGv = nx.to_directed(cls.MG) cls.Rv = cls.DG.reverse() cls.MRv = cls.MDG.reverse() - cls.graphs = [cls.G, cls.DG, cls.MG, cls.MDG, - cls.Gv, cls.DGv, cls.MGv, cls.MDGv, - cls.Rv, cls.MRv] + cls.graphs = [ + cls.G, + cls.DG, + cls.MG, + cls.MDG, + cls.Gv, + cls.DGv, + cls.MGv, + cls.MDGv, + cls.Rv, + cls.MRv, + ] for G in cls.graphs: G.edges, G.nodes, G.degree def test_pickle(self): import pickle + for G in self.graphs: H = pickle.loads(pickle.dumps(G, -1)) assert_edges_equal(H.edges, G.edges) @@ -278,42 +291,41 @@ class TestChainsOfViews: SG = G.subgraph([4, 5, 6]) CSG = SG.copy(as_view=True) DCSG = SG.copy(as_view=False) - assert hasattr(CSG, '_graph') # is a view - assert not hasattr(DCSG, '_graph') # not a view + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view def test_copy_disubgraph(self): G = self.DG.copy() SG = G.subgraph([4, 5, 6]) CSG = SG.copy(as_view=True) DCSG = SG.copy(as_view=False) - assert hasattr(CSG, '_graph') # is a view - assert not hasattr(DCSG, '_graph') # not a view + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view def test_copy_multidisubgraph(self): G = self.MDG.copy() SG = G.subgraph([4, 5, 6]) CSG = SG.copy(as_view=True) DCSG = SG.copy(as_view=False) - assert hasattr(CSG, '_graph') # is a view - assert not hasattr(DCSG, '_graph') # not a view + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view def test_copy_multisubgraph(self): G = self.MG.copy() SG = G.subgraph([4, 5, 6]) CSG = SG.copy(as_view=True) DCSG = SG.copy(as_view=False) - assert hasattr(CSG, '_graph') # is a view - assert not hasattr(DCSG, '_graph') # not a view + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view def test_copy_of_view(self): G = nx.OrderedMultiGraph(self.MGv) - assert G.__class__.__name__ == 'OrderedMultiGraph' + assert G.__class__.__name__ == "OrderedMultiGraph" G = G.copy(as_view=True) - assert G.__class__.__name__ == 'OrderedMultiGraph' + assert G.__class__.__name__ == "OrderedMultiGraph" def test_subclass(self): class MyGraph(nx.DiGraph): - def my_method(self): return "me" diff --git a/networkx/classes/tests/test_multidigraph.py b/networkx/classes/tests/test_multidigraph.py index dba835ab..2bf184c6 100644 --- a/networkx/classes/tests/test_multidigraph.py +++ b/networkx/classes/tests/test_multidigraph.py @@ -16,114 +16,135 @@ class BaseMultiDiGraphTester(BaseMultiGraphTester): def test_edges_data(self): G = self.K3 - edges = [(0, 1, {}), (0, 2, {}), (1, 0, {}), - (1, 2, {}), (2, 0, {}), (2, 1, {})] + edges = [(0, 1, {}), (0, 2, {}), (1, 0, {}), (1, 2, {}), (2, 0, {}), (2, 1, {})] assert sorted(G.edges(data=True)) == edges assert sorted(G.edges(0, data=True)) == [(0, 1, {}), (0, 2, {})] pytest.raises((KeyError, nx.NetworkXError), G.neighbors, -1) def test_edges_multi(self): G = self.K3 - assert (sorted(G.edges()) == - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] assert sorted(G.edges(0)) == [(0, 1), (0, 2)] G.add_edge(0, 1) - assert (sorted(G.edges()) == - [(0, 1), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] def test_out_edges(self): G = self.K3 - assert (sorted(G.out_edges()) == - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)] pytest.raises((KeyError, nx.NetworkXError), G.out_edges, -1) assert sorted(G.out_edges(0, keys=True)) == [(0, 1, 0), (0, 2, 0)] def test_out_edges_multi(self): G = self.K3 - assert (sorted(G.out_edges()) == - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)] G.add_edge(0, 1, 2) - assert (sorted(G.out_edges()) == - [(0, 1), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.out_edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] def test_out_edges_data(self): G = self.K3 assert sorted(G.edges(0, data=True)) == [(0, 1, {}), (0, 2, {})] G.remove_edge(0, 1) G.add_edge(0, 1, data=1) - assert (sorted(G.edges(0, data=True)) == - [(0, 1, {'data': 1}), (0, 2, {})]) - assert (sorted(G.edges(0, data='data')) == - [(0, 1, 1), (0, 2, None)]) - assert (sorted(G.edges(0, data='data', default=-1)) == - [(0, 1, 1), (0, 2, -1)]) + assert sorted(G.edges(0, data=True)) == [(0, 1, {"data": 1}), (0, 2, {})] + assert sorted(G.edges(0, data="data")) == [(0, 1, 1), (0, 2, None)] + assert sorted(G.edges(0, data="data", default=-1)) == [(0, 1, 1), (0, 2, -1)] def test_in_edges(self): G = self.K3 - assert (sorted(G.in_edges()) == - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.in_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] assert sorted(G.in_edges(0)) == [(1, 0), (2, 0)] pytest.raises((KeyError, nx.NetworkXError), G.in_edges, -1) G.add_edge(0, 1, 2) - assert (sorted(G.in_edges()) == - [(0, 1), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.in_edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] assert sorted(G.in_edges(0, keys=True)) == [(1, 0, 0), (2, 0, 0)] def test_in_edges_no_keys(self): G = self.K3 - assert (sorted(G.in_edges()) == - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) + assert sorted(G.in_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] assert sorted(G.in_edges(0)) == [(1, 0), (2, 0)] G.add_edge(0, 1, 2) - assert (sorted(G.in_edges()) == - [(0, 1), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]) - - assert (sorted(G.in_edges(data=True, keys=False)) == - [(0, 1, {}), (0, 1, {}), (0, 2, {}), (1, 0, {}), - (1, 2, {}), (2, 0, {}), (2, 1, {})]) + assert sorted(G.in_edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] + + assert sorted(G.in_edges(data=True, keys=False)) == [ + (0, 1, {}), + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + ] def test_in_edges_data(self): G = self.K3 - assert (sorted(G.in_edges(0, data=True)) == - [(1, 0, {}), (2, 0, {})]) + assert sorted(G.in_edges(0, data=True)) == [(1, 0, {}), (2, 0, {})] G.remove_edge(1, 0) G.add_edge(1, 0, data=1) - assert (sorted(G.in_edges(0, data=True)) == - [(1, 0, {'data': 1}), (2, 0, {})]) - assert (sorted(G.in_edges(0, data='data')) == - [(1, 0, 1), (2, 0, None)]) - assert (sorted(G.in_edges(0, data='data', default=-1)) == - [(1, 0, 1), (2, 0, -1)]) + assert sorted(G.in_edges(0, data=True)) == [(1, 0, {"data": 1}), (2, 0, {})] + assert sorted(G.in_edges(0, data="data")) == [(1, 0, 1), (2, 0, None)] + assert sorted(G.in_edges(0, data="data", default=-1)) == [(1, 0, 1), (2, 0, -1)] def is_shallow(self, H, G): # graph - assert G.graph['foo'] == H.graph['foo'] - G.graph['foo'].append(1) - assert G.graph['foo'] == H.graph['foo'] + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] == H.graph["foo"] # node - assert G.nodes[0]['foo'] == H.nodes[0]['foo'] - G.nodes[0]['foo'].append(1) - assert G.nodes[0]['foo'] == H.nodes[0]['foo'] + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] # edge - assert G[1][2][0]['foo'] == H[1][2][0]['foo'] - G[1][2][0]['foo'].append(1) - assert G[1][2][0]['foo'] == H[1][2][0]['foo'] + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] def is_deep(self, H, G): # graph - assert G.graph['foo'] == H.graph['foo'] - G.graph['foo'].append(1) - assert G.graph['foo'] != H.graph['foo'] + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] != H.graph["foo"] # node - assert G.nodes[0]['foo'] == H.nodes[0]['foo'] - G.nodes[0]['foo'].append(1) - assert G.nodes[0]['foo'] != H.nodes[0]['foo'] + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] != H.nodes[0]["foo"] # edge - assert G[1][2][0]['foo'] == H[1][2][0]['foo'] - G[1][2][0]['foo'].append(1) - assert G[1][2][0]['foo'] != H[1][2][0]['foo'] + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] != H[1][2][0]["foo"] def test_to_undirected(self): # MultiDiGraph -> MultiGraph changes number of edges so it is @@ -168,10 +189,8 @@ class BaseMultiDiGraphTester(BaseMultiGraphTester): assert G.degree(0) == 4 assert list(G.degree(iter([0]))) == [(0, 4)] G.add_edge(0, 1, weight=0.3, other=1.2) - assert (sorted(G.degree(weight='weight')) == - [(0, 4.3), (1, 4.3), (2, 4)]) - assert (sorted(G.degree(weight='other')) == - [(0, 5.2), (1, 5.2), (2, 4)]) + assert sorted(G.degree(weight="weight")) == [(0, 4.3), (1, 4.3), (2, 4)] + assert sorted(G.degree(weight="other")) == [(0, 5.2), (1, 5.2), (2, 4)] def test_in_degree(self): G = self.K3 @@ -179,7 +198,7 @@ class BaseMultiDiGraphTester(BaseMultiGraphTester): assert dict(G.in_degree()) == {0: 2, 1: 2, 2: 2} assert G.in_degree(0) == 2 assert list(G.in_degree(iter([0]))) == [(0, 2)] - assert G.in_degree(0, weight='weight') == 2 + assert G.in_degree(0, weight="weight") == 2 def test_out_degree(self): G = self.K3 @@ -187,15 +206,15 @@ class BaseMultiDiGraphTester(BaseMultiGraphTester): assert dict(G.out_degree()) == {0: 2, 1: 2, 2: 2} assert G.out_degree(0) == 2 assert list(G.out_degree(iter([0]))) == [(0, 2)] - assert G.out_degree(0, weight='weight') == 2 + assert G.out_degree(0, weight="weight") == 2 def test_size(self): G = self.K3 assert G.size() == 6 assert G.number_of_edges() == 6 G.add_edge(0, 1, weight=0.3, other=1.2) - assert round(G.size(weight='weight'), 2) == 6.3 - assert round(G.size(weight='other'), 2) == 7.2 + assert round(G.size(weight="weight"), 2) == 6.3 + assert round(G.size(weight="other"), 2) == 7.2 def test_to_undirected_reciprocal(self): G = self.Graph() @@ -256,26 +275,30 @@ class TestMultiDiGraph(BaseMultiDiGraphTester, _TestMultiGraph): def test_add_edges_from(self): G = self.Graph() - G.add_edges_from([(0, 1), (0, 1, {'weight': 3})]) - assert G._adj == {0: {1: {0: {}, 1: {'weight': 3}}}, 1: {}} - assert G._succ == {0: {1: {0: {}, 1: {'weight': 3}}}, 1: {}} - assert G._pred == {0: {}, 1: {0: {0: {}, 1: {'weight': 3}}}} - - G.add_edges_from([(0, 1), (0, 1, {'weight': 3})], weight=2) - assert G._succ == {0: {1: {0: {}, - 1: {'weight': 3}, - 2: {'weight': 2}, - 3: {'weight': 3}}}, - 1: {}} - assert G._pred == {0: {}, 1: {0: {0: {}, 1: {'weight': 3}, - 2: {'weight': 2}, - 3: {'weight': 3}}}} + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})]) + assert G._adj == {0: {1: {0: {}, 1: {"weight": 3}}}, 1: {}} + assert G._succ == {0: {1: {0: {}, 1: {"weight": 3}}}, 1: {}} + assert G._pred == {0: {}, 1: {0: {0: {}, 1: {"weight": 3}}}} + + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})], weight=2) + assert G._succ == { + 0: {1: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + 1: {}, + } + assert G._pred == { + 0: {}, + 1: {0: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + } G = self.Graph() - edges = [(0, 1, {'weight': 3}), (0, 1, (('weight', 2),)), - (0, 1, 5), (0, 1, 's')] + edges = [ + (0, 1, {"weight": 3}), + (0, 1, (("weight", 2),)), + (0, 1, 5), + (0, 1, "s"), + ] G.add_edges_from(edges) - keydict = {0: {'weight': 3}, 1: {'weight': 2}, 5: {}, 's': {}} + keydict = {0: {"weight": 3}, 1: {"weight": 2}, 5: {}, "s": {}} assert G._succ == {0: {1: keydict}, 1: {}} assert G._pred == {1: {0: keydict}, 0: {}} @@ -289,49 +312,66 @@ class TestMultiDiGraph(BaseMultiDiGraphTester, _TestMultiGraph): def test_remove_edge(self): G = self.K3 G.remove_edge(0, 1) - assert G._succ == {0: {2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} - assert G._pred == {0: {1: {0: {}}, 2: {0: {}}}, - 1: {2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} + assert G._succ == { + 0: {2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) - pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, 0, 2, - key=1) + pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, 0, 2, key=1) def test_remove_multiedge(self): G = self.K3 - G.add_edge(0, 1, key='parallel edge') - G.remove_edge(0, 1, key='parallel edge') - assert G._adj == {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} - - assert G._succ == {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} - - assert G._pred == {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} + G.add_edge(0, 1, key="parallel edge") + G.remove_edge(0, 1, key="parallel edge") + assert G._adj == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + + assert G._succ == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } G.remove_edge(0, 1) - assert G._succ == {0: {2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} - assert G._pred == {0: {1: {0: {}}, 2: {0: {}}}, - 1: {2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} + assert G._succ == { + 0: {2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) def test_remove_edges_from(self): G = self.K3 G.remove_edges_from([(0, 1)]) - assert G._succ == {0: {2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} - assert G._pred == {0: {1: {0: {}}, 2: {0: {}}}, - 1: {2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} + assert G._succ == { + 0: {2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } G.remove_edges_from([(0, 0)]) # silent fail @@ -347,12 +387,12 @@ class TestEdgeSubgraph(_TestMultiGraphEdgeSubgraph): nx.add_path(G, reversed(range(5))) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = f'node{i}' - G.adj[0][1][0]['name'] = 'edge010' - G.adj[0][1][1]['name'] = 'edge011' - G.adj[3][4][0]['name'] = 'edge340' - G.adj[3][4][1]['name'] = 'edge341' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.adj[0][1][0]["name"] = "edge010" + G.adj[0][1][1]["name"] = "edge011" + G.adj[3][4][0]["name"] = "edge340" + G.adj[3][4][1]["name"] = "edge341" + G.graph["name"] = "graph" # Get the subgraph induced by one of the first edges and one of # the last edges. self.G = G diff --git a/networkx/classes/tests/test_multigraph.py b/networkx/classes/tests/test_multigraph.py index fb409efd..5fee1897 100644 --- a/networkx/classes/tests/test_multigraph.py +++ b/networkx/classes/tests/test_multigraph.py @@ -25,20 +25,21 @@ class BaseMultiGraphTester(BaseAttrGraphTester): def test_adjacency(self): G = self.K3 - assert (dict(G.adjacency()) == - {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}}) + assert dict(G.adjacency()) == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } def deepcopy_edge_attr(self, H, G): - assert G[1][2][0]['foo'] == H[1][2][0]['foo'] - G[1][2][0]['foo'].append(1) - assert G[1][2][0]['foo'] != H[1][2][0]['foo'] + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] != H[1][2][0]["foo"] def shallow_copy_edge_attr(self, H, G): - assert G[1][2][0]['foo'] == H[1][2][0]['foo'] - G[1][2][0]['foo'].append(1) - assert G[1][2][0]['foo'] == H[1][2][0]['foo'] + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] def graphs_equal(self, H, G): assert G._adj == H._adj @@ -62,30 +63,30 @@ class BaseMultiGraphTester(BaseAttrGraphTester): def same_attrdict(self, H, G): # same attrdict in the edgedata - old_foo = H[1][2][0]['foo'] - H.adj[1][2][0]['foo'] = 'baz' + old_foo = H[1][2][0]["foo"] + H.adj[1][2][0]["foo"] = "baz" assert G._adj == H._adj - H.adj[1][2][0]['foo'] = old_foo + H.adj[1][2][0]["foo"] = old_foo assert G._adj == H._adj - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" assert G._node == H._node - H.nodes[0]['foo'] = old_foo + H.nodes[0]["foo"] = old_foo assert G._node == H._node def different_attrdict(self, H, G): # used by graph_equal_but_different - old_foo = H[1][2][0]['foo'] - H.adj[1][2][0]['foo'] = 'baz' + old_foo = H[1][2][0]["foo"] + H.adj[1][2][0]["foo"] = "baz" assert G._adj != H._adj - H.adj[1][2][0]['foo'] = old_foo + H.adj[1][2][0]["foo"] = old_foo assert G._adj == H._adj - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" assert G._node != H._node - H.nodes[0]['foo'] = old_foo + H.nodes[0]["foo"] = old_foo assert G._node == H._node def test_to_undirected(self): @@ -108,39 +109,56 @@ class BaseMultiGraphTester(BaseAttrGraphTester): G = self.K3 G.add_edge(0, 0) G.add_edge(0, 0) - G.add_edge(0, 0, key='parallel edge') - G.remove_edge(0, 0, key='parallel edge') + G.add_edge(0, 0, key="parallel edge") + G.remove_edge(0, 0, key="parallel edge") assert G.number_of_edges(0, 0) == 2 G.remove_edge(0, 0) assert G.number_of_edges(0, 0) == 1 def test_edge_lookup(self): G = self.Graph() - G.add_edge(1, 2, foo='bar') - G.add_edge(1, 2, 'key', foo='biz') - assert_edges_equal(G.edges[1, 2, 0], {'foo': 'bar'}) - assert_edges_equal(G.edges[1, 2, 'key'], {'foo': 'biz'}) + G.add_edge(1, 2, foo="bar") + G.add_edge(1, 2, "key", foo="biz") + assert_edges_equal(G.edges[1, 2, 0], {"foo": "bar"}) + assert_edges_equal(G.edges[1, 2, "key"], {"foo": "biz"}) def test_edge_attr4(self): G = self.Graph() - G.add_edge(1, 2, key=0, data=7, spam='bar', bar='foo') - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 7, 'spam': 'bar', 'bar': 'foo'})]) - G[1][2][0]['data'] = 10 # OK to set data like this - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 10, 'spam': 'bar', 'bar': 'foo'})]) - - G.adj[1][2][0]['data'] = 20 - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 20, 'spam': 'bar', 'bar': 'foo'})]) - G.edges[1, 2, 0]['data'] = 21 # another spelling, "edge" - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 21, 'spam': 'bar', 'bar': 'foo'})]) - G.adj[1][2][0]['listdata'] = [20, 200] - G.adj[1][2][0]['weight'] = 20 - assert_edges_equal(G.edges(data=True), - [(1, 2, {'data': 21, 'spam': 'bar', 'bar': 'foo', - 'listdata': [20, 200], 'weight':20})]) + G.add_edge(1, 2, key=0, data=7, spam="bar", bar="foo") + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) + G[1][2][0]["data"] = 10 # OK to set data like this + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 10, "spam": "bar", "bar": "foo"})] + ) + + G.adj[1][2][0]["data"] = 20 + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 20, "spam": "bar", "bar": "foo"})] + ) + G.edges[1, 2, 0]["data"] = 21 # another spelling, "edge" + assert_edges_equal( + G.edges(data=True), [(1, 2, {"data": 21, "spam": "bar", "bar": "foo"})] + ) + G.adj[1][2][0]["listdata"] = [20, 200] + G.adj[1][2][0]["weight"] = 20 + assert_edges_equal( + G.edges(data=True), + [ + ( + 1, + 2, + { + "data": 21, + "spam": "bar", + "bar": "foo", + "listdata": [20, 200], + "weight": 20, + }, + ) + ], + ) class TestMultiGraph(BaseMultiGraphTester, _TestGraph): @@ -148,9 +166,7 @@ class TestMultiGraph(BaseMultiGraphTester, _TestGraph): self.Graph = nx.MultiGraph # build K3 ed1, ed2, ed3 = ({0: {}}, {0: {}}, {0: {}}) - self.k3adj = {0: {1: ed1, 2: ed2}, - 1: {0: ed1, 2: ed3}, - 2: {0: ed2, 1: ed3}} + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} self.k3edges = [(0, 1), (0, 2), (1, 2)] self.k3nodes = [0, 1, 2] self.K3 = self.Graph() @@ -170,9 +186,9 @@ class TestMultiGraph(BaseMultiGraphTester, _TestGraph): G = self.K3 assert G[0] == {1: {0: {}}, 2: {0: {}}} with pytest.raises(KeyError): - G.__getitem__('j') + G.__getitem__("j") with pytest.raises(TypeError): - G.__getitem__(['A']) + G.__getitem__(["A"]) def test_remove_node(self): G = self.K3 @@ -201,19 +217,25 @@ class TestMultiGraph(BaseMultiGraphTester, _TestGraph): def test_add_edges_from(self): G = self.Graph() - G.add_edges_from([(0, 1), (0, 1, {'weight': 3})]) - assert G.adj == {0: {1: {0: {}, 1: {'weight': 3}}}, - 1: {0: {0: {}, 1: {'weight': 3}}}} - G.add_edges_from([(0, 1), (0, 1, {'weight': 3})], weight=2) - assert G.adj == {0: {1: {0: {}, 1: {'weight': 3}, - 2: {'weight': 2}, 3: {'weight': 3}}}, - 1: {0: {0: {}, 1: {'weight': 3}, - 2: {'weight': 2}, 3: {'weight': 3}}}} + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})]) + assert G.adj == { + 0: {1: {0: {}, 1: {"weight": 3}}}, + 1: {0: {0: {}, 1: {"weight": 3}}}, + } + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})], weight=2) + assert G.adj == { + 0: {1: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + 1: {0: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + } G = self.Graph() - edges = [(0, 1, {'weight': 3}), (0, 1, (('weight', 2),)), - (0, 1, 5), (0, 1, 's')] + edges = [ + (0, 1, {"weight": 3}), + (0, 1, (("weight", 2),)), + (0, 1, 5), + (0, 1, "s"), + ] G.add_edges_from(edges) - keydict = {0: {'weight': 3}, 1: {'weight': 2}, 5: {}, 's': {}} + keydict = {0: {"weight": 3}, 1: {"weight": 2}, 5: {}, "s": {}} assert G._adj == {0: {1: keydict}, 1: {0: keydict}} # too few in tuple @@ -229,10 +251,7 @@ class TestMultiGraph(BaseMultiGraphTester, _TestGraph): def test_remove_edge(self): G = self.K3 G.remove_edge(0, 1) - assert G.adj == {0: {2: {0: {}}}, - 1: {2: {0: {}}}, - 2: {0: {0: {}}, - 1: {0: {}}}} + assert G.adj == {0: {2: {0: {}}}, 1: {2: {0: {}}}, 2: {0: {0: {}}, 1: {0: {}}}} with pytest.raises(nx.NetworkXError): G.remove_edge(-1, 0) @@ -261,11 +280,13 @@ class TestMultiGraph(BaseMultiGraphTester, _TestGraph): def test_remove_multiedge(self): G = self.K3 - G.add_edge(0, 1, key='parallel edge') - G.remove_edge(0, 1, key='parallel edge') - assert G.adj == {0: {1: {0: {}}, 2: {0: {}}}, - 1: {0: {0: {}}, 2: {0: {}}}, - 2: {0: {0: {}}, 1: {0: {}}}} + G.add_edge(0, 1, key="parallel edge") + G.remove_edge(0, 1, key="parallel edge") + assert G.adj == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } G.remove_edge(0, 1) kd = {0: {}} assert G.adj == {0: {2: kd}, 1: {2: kd}, 2: {0: kd, 1: kd}} @@ -283,12 +304,12 @@ class TestEdgeSubgraph: nx.add_path(G, range(5)) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = f'node{i}' - G.adj[0][1][0]['name'] = 'edge010' - G.adj[0][1][1]['name'] = 'edge011' - G.adj[3][4][0]['name'] = 'edge340' - G.adj[3][4][1]['name'] = 'edge341' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.adj[0][1][0]["name"] = "edge010" + G.adj[0][1][1]["name"] = "edge011" + G.adj[3][4][0]["name"] = "edge340" + G.adj[3][4][1]["name"] = "edge341" + G.graph["name"] = "graph" # Get the subgraph induced by one of the first edges and one of # the last edges. self.G = G @@ -300,8 +321,9 @@ class TestEdgeSubgraph: def test_correct_edges(self): """Tests that the subgraph has the correct edges.""" - assert ([(0, 1, 0, 'edge010'), (3, 4, 1, 'edge341')] == - sorted(self.H.edges(keys=True, data='name'))) + assert [(0, 1, 0, "edge010"), (3, 4, 1, "edge341")] == sorted( + self.H.edges(keys=True, data="name") + ) def test_add_node(self): """Tests that adding a node to the original graph does not @@ -327,9 +349,9 @@ class TestEdgeSubgraph: for v in self.H: assert self.G.nodes[v] == self.H.nodes[v] # Making a change to G should make a change in H and vice versa. - self.G.nodes[0]['name'] = 'foo' + self.G.nodes[0]["name"] = "foo" assert self.G.nodes[0] == self.H.nodes[0] - self.H.nodes[1]['name'] = 'bar' + self.H.nodes[1]["name"] = "bar" assert self.G.nodes[1] == self.H.nodes[1] def test_edge_attr_dict(self): @@ -340,12 +362,10 @@ class TestEdgeSubgraph: for u, v, k in self.H.edges(keys=True): assert self.G._adj[u][v][k] == self.H._adj[u][v][k] # Making a change to G should make a change in H and vice versa. - self.G._adj[0][1][0]['name'] = 'foo' - assert (self.G._adj[0][1][0]['name'] == - self.H._adj[0][1][0]['name']) - self.H._adj[3][4][1]['name'] = 'bar' - assert (self.G._adj[3][4][1]['name'] == - self.H._adj[3][4][1]['name']) + self.G._adj[0][1][0]["name"] = "foo" + assert self.G._adj[0][1][0]["name"] == self.H._adj[0][1][0]["name"] + self.H._adj[3][4][1]["name"] = "bar" + assert self.G._adj[3][4][1]["name"] == self.H._adj[3][4][1]["name"] def test_graph_attr_dict(self): """Tests that the graph attribute dictionary of the two graphs diff --git a/networkx/classes/tests/test_special.py b/networkx/classes/tests/test_special.py index 1db0d86e..cb0142e7 100644 --- a/networkx/classes/tests/test_special.py +++ b/networkx/classes/tests/test_special.py @@ -32,6 +32,7 @@ def test_factories(): adjlist_inner_dict_factory = mydict3 edge_key_dict_factory = mydict4 edge_attr_dict_factory = mydict5 + G = MyGraph() assert isinstance(G._node, mydict1) assert isinstance(G._adj, mydict2) @@ -64,21 +65,22 @@ class TestOrderedGraph(_TestGraph): adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict + self.Graph = MyGraph class TestThinGraph(BaseGraphTester): def setup_method(self): - all_edge_dict = {'weight': 1} + all_edge_dict = {"weight": 1} class MyGraph(nx.Graph): - def edge_attr_dict_factory(self): return all_edge_dict + def edge_attr_dict_factory(self): + return all_edge_dict + self.Graph = MyGraph # build dict-of-dict-of-dict K3 ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict) - self.k3adj = {0: {1: ed1, 2: ed2}, - 1: {0: ed1, 2: ed3}, - 2: {0: ed2, 1: ed3}} + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} self.k3edges = [(0, 1), (0, 2), (1, 2)] self.k3nodes = [0, 1, 2] self.K3 = self.Graph() @@ -104,15 +106,18 @@ class TestOrderedDiGraph(_TestDiGraph): adjlist_outer_dict_factory = OrderedDict adjlist_inner_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict + self.Graph = MyGraph class TestThinDiGraph(BaseDiGraphTester): def setup_method(self): - all_edge_dict = {'weight': 1} + all_edge_dict = {"weight": 1} class MyGraph(nx.DiGraph): - def edge_attr_dict_factory(self): return all_edge_dict + def edge_attr_dict_factory(self): + return all_edge_dict + self.Graph = MyGraph # build dict-of-dict-of-dict K3 ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict) @@ -155,6 +160,7 @@ class TestOrderedMultiGraph(_TestMultiGraph): adjlist_inner_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict + self.Graph = MyGraph @@ -174,4 +180,5 @@ class TestOrderedMultiDiGraph(_TestMultiDiGraph): adjlist_inner_dict_factory = OrderedDict edge_key_dict_factory = OrderedDict edge_attr_dict_factory = OrderedDict + self.Graph = MyGraph diff --git a/networkx/classes/tests/test_subgraphviews.py b/networkx/classes/tests/test_subgraphviews.py index 825af4cc..bcfeea54 100644 --- a/networkx/classes/tests/test_subgraphviews.py +++ b/networkx/classes/tests/test_subgraphviews.py @@ -213,8 +213,8 @@ class TestInducedSubGraph: @classmethod def setup_class(cls): cls.K3 = G = nx.complete_graph(3) - G.graph['foo'] = [] - G.nodes[0]['foo'] = [] + G.graph["foo"] = [] + G.nodes[0]["foo"] = [] G.remove_edge(1, 2) ll = [] G.add_edge(1, 2, foo=ll) @@ -237,15 +237,15 @@ class TestInducedSubGraph: assert dict(H.adj) == {0: {1: {}}, 1: {0: {}}} def same_attrdict(self, H, G): - old_foo = H[1][2]['foo'] - H.edges[1, 2]['foo'] = 'baz' + old_foo = H[1][2]["foo"] + H.edges[1, 2]["foo"] = "baz" assert G.edges == H.edges - H.edges[1, 2]['foo'] = old_foo + H.edges[1, 2]["foo"] = old_foo assert G.edges == H.edges - old_foo = H.nodes[0]['foo'] - H.nodes[0]['foo'] = 'baz' + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" assert G.nodes == H.nodes - H.nodes[0]['foo'] = old_foo + H.nodes[0]["foo"] = old_foo assert G.nodes == H.nodes def graphs_equal(self, H, G): @@ -277,10 +277,10 @@ class TestEdgeSubGraph: cls.G = G = nx.path_graph(5) # Add some node, edge, and graph attributes. for i in range(5): - G.nodes[i]['name'] = f'node{i}' - G.edges[0, 1]['name'] = 'edge01' - G.edges[3, 4]['name'] = 'edge34' - G.graph['name'] = 'graph' + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" # Get the subgraph induced by the first and last edges. cls.H = nx.edge_subgraph(G, [(0, 1), (3, 4)]) @@ -290,8 +290,7 @@ class TestEdgeSubGraph: def test_correct_edges(self): """Tests that the subgraph has the correct edges.""" - assert ([(0, 1, 'edge01'), (3, 4, 'edge34')] == - sorted(self.H.edges(data='name'))) + assert [(0, 1, "edge01"), (3, 4, "edge34")] == sorted(self.H.edges(data="name")) def test_add_node(self): """Tests that adding a node to the original graph does not @@ -319,9 +318,9 @@ class TestEdgeSubGraph: for v in self.H: assert self.G.nodes[v] == self.H.nodes[v] # Making a change to G should make a change in H and vice versa. - self.G.nodes[0]['name'] = 'foo' + self.G.nodes[0]["name"] = "foo" assert self.G.nodes[0] == self.H.nodes[0] - self.H.nodes[1]['name'] = 'bar' + self.H.nodes[1]["name"] = "bar" assert self.G.nodes[1] == self.H.nodes[1] def test_edge_attr_dict(self): @@ -332,12 +331,10 @@ class TestEdgeSubGraph: for u, v in self.H.edges(): assert self.G.edges[u, v] == self.H.edges[u, v] # Making a change to G should make a change in H and vice versa. - self.G.edges[0, 1]['name'] = 'foo' - assert (self.G.edges[0, 1]['name'] == - self.H.edges[0, 1]['name']) - self.H.edges[3, 4]['name'] = 'bar' - assert (self.G.edges[3, 4]['name'] == - self.H.edges[3, 4]['name']) + self.G.edges[0, 1]["name"] = "foo" + assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"] + self.H.edges[3, 4]["name"] = "bar" + assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"] def test_graph_attr_dict(self): """Tests that the graph attribute dictionary of the two graphs diff --git a/networkx/conftest.py b/networkx/conftest.py index dfdcd325..98d18009 100644 --- a/networkx/conftest.py +++ b/networkx/conftest.py @@ -28,27 +28,29 @@ def pytest_collection_modifyitems(config, items): @pytest.fixture(autouse=True) def set_warnings(): warnings.filterwarnings( - "ignore", category=DeprecationWarning, + "ignore", + category=DeprecationWarning, message="literal_stringizer is deprecated*", ) warnings.filterwarnings( - "ignore", category=DeprecationWarning, + "ignore", + category=DeprecationWarning, message="literal_destringizer is deprecated*", ) warnings.filterwarnings( - "ignore", category=DeprecationWarning, - message="is_string_like is deprecated*" + "ignore", category=DeprecationWarning, message="is_string_like is deprecated*" ) warnings.filterwarnings( - "ignore", category=DeprecationWarning, - message="make_str is deprecated*" + "ignore", category=DeprecationWarning, message="make_str is deprecated*" ) warnings.filterwarnings( - "ignore", category=DeprecationWarning, - message="context_manager reversed is deprecated*" + "ignore", + category=DeprecationWarning, + message="context_manager reversed is deprecated*", ) warnings.filterwarnings( - "ignore", category=PendingDeprecationWarning, + "ignore", + category=PendingDeprecationWarning, message="the matrix subclass is not the recommended way*", ) diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py index ea686552..1c670265 100644 --- a/networkx/convert_matrix.py +++ b/networkx/convert_matrix.py @@ -211,8 +211,9 @@ def from_pandas_adjacency(df, create_using=None): return G -def to_pandas_edgelist(G, source="source", target="target", nodelist=None, - dtype=None, order=None): +def to_pandas_edgelist( + G, source="source", target="target", nodelist=None, dtype=None, order=None +): """Returns the graph edge list as a Pandas DataFrame. Parameters @@ -270,8 +271,9 @@ def to_pandas_edgelist(G, source="source", target="target", nodelist=None, return pd.DataFrame(edgelistdict) -def from_pandas_edgelist(df, source="source", target="target", edge_attr=None, - create_using=None): +def from_pandas_edgelist( + df, source="source", target="target", edge_attr=None, create_using=None +): """Returns a graph from Pandas DataFrame containing an edge list. The Pandas DataFrame should contain at least two columns of node names and @@ -607,11 +609,20 @@ def from_numpy_matrix(A, parallel_edges=False, create_using=None): # handle numpy constructed data type if python_type == "void": # Sort the fields by their offset, then by dtype, then by name. - fields = sorted((offset, dtype, name) - for name, (dtype, offset) in A.dtype.fields.items()) - triples = ((u, v, {name: kind_to_python_type[dtype.kind](val) - for (_, dtype, name), val in zip(fields, A[u, v])}) - for u, v in edges) + fields = sorted( + (offset, dtype, name) for name, (dtype, offset) in A.dtype.fields.items() + ) + triples = ( + ( + u, + v, + { + name: kind_to_python_type[dtype.kind](val) + for (_, dtype, name), val in zip(fields, A[u, v]) + }, + ) + for u, v in edges + ) # If the entries in the adjacency matrix are integers, the graph is a # multigraph, and parallel_edges is True, then create parallel edges, each # with weight 1, for each entry in the adjacency matrix. Otherwise, create @@ -625,8 +636,9 @@ def from_numpy_matrix(A, parallel_edges=False, create_using=None): # for d in range(A[u, v]): # G.add_edge(u, v, weight=1) # - triples = chain(((u, v, {"weight": 1}) for d in range(A[u, v])) - for (u, v) in edges) + triples = chain( + ((u, v, {"weight": 1}) for d in range(A[u, v])) for (u, v) in edges + ) else: # basic data type triples = ((u, v, dict(weight=python_type(A[u, v]))) for u, v in edges) # If we are creating an undirected multigraph, only add the edges from the @@ -716,8 +728,7 @@ def to_numpy_recarray(G, nodelist=None, dtype=None, order=None): return M.view(np.recarray) -def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, - weight="weight", format="csr"): +def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, weight="weight", format="csr"): """Returns the graph adjacency matrix as a SciPy sparse matrix. Parameters @@ -827,8 +838,7 @@ def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, row, col, data = [], [], [] if G.is_directed(): - M = sparse.coo_matrix((data, (row, col)), - shape=(nlen, nlen), dtype=dtype) + M = sparse.coo_matrix((data, (row, col)), shape=(nlen, nlen), dtype=dtype) else: # symmetrize matrix d = data + data @@ -916,8 +926,9 @@ def _generate_weighted_edges(A): return _coo_gen_triples(A.tocoo()) -def from_scipy_sparse_matrix(A, parallel_edges=False, create_using=None, - edge_attribute="weight"): +def from_scipy_sparse_matrix( + A, parallel_edges=False, create_using=None, edge_attribute="weight" +): """Creates a new graph from an adjacency matrix given as a SciPy sparse matrix. diff --git a/networkx/drawing/layout.py b/networkx/drawing/layout.py index 955375a0..99c138e6 100644 --- a/networkx/drawing/layout.py +++ b/networkx/drawing/layout.py @@ -18,18 +18,20 @@ Warning: Most layout routines have only been tested in 2-dimensions. import networkx as nx from networkx.utils import random_state -__all__ = ['bipartite_layout', - 'circular_layout', - 'kamada_kawai_layout', - 'random_layout', - 'rescale_layout', - 'shell_layout', - 'spring_layout', - 'spectral_layout', - 'planar_layout', - 'fruchterman_reingold_layout', - 'spiral_layout', - 'multipartite_layout'] +__all__ = [ + "bipartite_layout", + "circular_layout", + "kamada_kawai_layout", + "random_layout", + "rescale_layout", + "shell_layout", + "spring_layout", + "spectral_layout", + "planar_layout", + "fruchterman_reingold_layout", + "spiral_layout", + "multipartite_layout", +] def _process_params(G, center, dim): @@ -147,7 +149,7 @@ def circular_layout(G, scale=1, center=None, dim=2): import numpy as np if dim < 2: - raise ValueError('cannot handle dimensions < 2') + raise ValueError("cannot handle dimensions < 2") G, center = _process_params(G, center, dim) @@ -161,8 +163,9 @@ def circular_layout(G, scale=1, center=None, dim=2): # Discard the extra angle since it matches 0 radians. theta = np.linspace(0, 1, len(G) + 1)[:-1] * 2 * np.pi theta = theta.astype(np.float32) - pos = np.column_stack([np.cos(theta), np.sin(theta), - np.zeros((len(G), paddims))]) + pos = np.column_stack( + [np.cos(theta), np.sin(theta), np.zeros((len(G), paddims))] + ) pos = rescale_layout(pos, scale=scale) + center pos = dict(zip(G, pos)) @@ -220,7 +223,7 @@ def shell_layout(G, nlist=None, rotate=None, scale=1, center=None, dim=2): import numpy as np if dim != 2: - raise ValueError('can only handle 2 dimensions') + raise ValueError("can only handle 2 dimensions") G, center = _process_params(G, center, dim) @@ -248,8 +251,10 @@ def shell_layout(G, nlist=None, rotate=None, scale=1, center=None, dim=2): npos = {} for nodes in nlist: # Discard the last angle (endpoint=False) since 2*pi matches 0 radians - theta = np.linspace(0, 2 * np.pi, len(nodes), - endpoint=False, dtype=np.float32) + first_theta + theta = ( + np.linspace(0, 2 * np.pi, len(nodes), endpoint=False, dtype=np.float32) + + first_theta + ) pos = radius * np.column_stack([np.cos(theta), np.sin(theta)]) + center npos.update(zip(nodes, pos)) radius += radius_bump @@ -258,8 +263,9 @@ def shell_layout(G, nlist=None, rotate=None, scale=1, center=None, dim=2): return npos -def bipartite_layout(G, nodes, align='vertical', - scale=1, center=None, aspect_ratio=4/3): +def bipartite_layout( + G, nodes, align="vertical", scale=1, center=None, aspect_ratio=4 / 3 +): """Position nodes in two straight lines. Parameters @@ -309,13 +315,13 @@ def bipartite_layout(G, nodes, align='vertical', height = 1 width = aspect_ratio * height - offset = (width/2, height/2) + offset = (width / 2, height / 2) top = set(nodes) bottom = set(G) - top nodes = list(top) + list(bottom) - if align == 'vertical': + if align == "vertical": left_xs = np.repeat(0, len(top)) right_xs = np.repeat(width, len(bottom)) left_ys = np.linspace(0, height, len(top)) @@ -329,7 +335,7 @@ def bipartite_layout(G, nodes, align='vertical', pos = dict(zip(nodes, pos)) return pos - if align == 'horizontal': + if align == "horizontal": top_ys = np.repeat(height, len(top)) bottom_ys = np.repeat(0, len(bottom)) top_xs = np.linspace(0, width, len(top)) @@ -343,22 +349,24 @@ def bipartite_layout(G, nodes, align='vertical', pos = dict(zip(nodes, pos)) return pos - msg = 'align must be either vertical or horizontal.' + msg = "align must be either vertical or horizontal." raise ValueError(msg) @random_state(10) -def fruchterman_reingold_layout(G, - k=None, - pos=None, - fixed=None, - iterations=50, - threshold=1e-4, - weight='weight', - scale=1, - center=None, - dim=2, - seed=None): +def fruchterman_reingold_layout( + G, + k=None, + pos=None, + fixed=None, + iterations=50, + threshold=1e-4, + weight="weight", + scale=1, + center=None, + dim=2, + seed=None, +): """Position nodes using Fruchterman-Reingold force-directed algorithm. The algorithm simulates a force-directed representation of the network @@ -444,10 +452,10 @@ def fruchterman_reingold_layout(G, if fixed is not None: if pos is None: - raise ValueError('nodes are fixed without positions given') + raise ValueError("nodes are fixed without positions given") for node in fixed: if node not in pos: - raise ValueError('nodes are fixed without positions given') + raise ValueError("nodes are fixed without positions given") nfixed = {node: i for i, node in enumerate(G)} fixed = np.asarray([nfixed[node] for node in fixed]) @@ -474,22 +482,23 @@ def fruchterman_reingold_layout(G, # Sparse matrix if len(G) < 500: # sparse solver for large graphs raise ValueError - A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype='f') + A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype="f") if k is None and fixed is not None: # We must adjust k by domain size for layouts not near 1x1 nnodes, _ = A.shape k = dom_size / np.sqrt(nnodes) - pos = _sparse_fruchterman_reingold(A, k, pos_arr, fixed, - iterations, threshold, - dim, seed) + pos = _sparse_fruchterman_reingold( + A, k, pos_arr, fixed, iterations, threshold, dim, seed + ) except ValueError: A = nx.to_numpy_array(G, weight=weight) if k is None and fixed is not None: # We must adjust k by domain size for layouts not near 1x1 nnodes, _ = A.shape k = dom_size / np.sqrt(nnodes) - pos = _fruchterman_reingold(A, k, pos_arr, fixed, iterations, - threshold, dim, seed) + pos = _fruchterman_reingold( + A, k, pos_arr, fixed, iterations, threshold, dim, seed + ) if fixed is None and scale is not None: pos = rescale_layout(pos, scale=scale) + center pos = dict(zip(G, pos)) @@ -500,8 +509,9 @@ spring_layout = fruchterman_reingold_layout @random_state(7) -def _fruchterman_reingold(A, k=None, pos=None, fixed=None, iterations=50, - threshold=1e-4, dim=2, seed=None): +def _fruchterman_reingold( + A, k=None, pos=None, fixed=None, iterations=50, threshold=1e-4, dim=2, seed=None +): # Position nodes in adjacency matrix A using Fruchterman-Reingold # Entry point for NetworkX graph is fruchterman_reingold_layout() import numpy as np @@ -542,13 +552,13 @@ def _fruchterman_reingold(A, k=None, pos=None, fixed=None, iterations=50, # enforce minimum distance of 0.01 np.clip(distance, 0.01, None, out=distance) # displacement "force" - displacement = np.einsum('ijk,ij->ik', - delta, - (k * k / distance**2 - A * distance / k)) + displacement = np.einsum( + "ijk,ij->ik", delta, (k * k / distance ** 2 - A * distance / k) + ) # update positions length = np.linalg.norm(displacement, axis=-1) length = np.where(length < 0.01, 0.1, length) - delta_pos = np.einsum('ij,i->ij', displacement, t / length) + delta_pos = np.einsum("ij,i->ij", displacement, t / length) if fixed is not None: # don't change positions of fixed nodes delta_pos[fixed] = 0.0 @@ -562,9 +572,9 @@ def _fruchterman_reingold(A, k=None, pos=None, fixed=None, iterations=50, @random_state(7) -def _sparse_fruchterman_reingold(A, k=None, pos=None, fixed=None, - iterations=50, threshold=1e-4, dim=2, - seed=None): +def _sparse_fruchterman_reingold( + A, k=None, pos=None, fixed=None, iterations=50, threshold=1e-4, dim=2, seed=None +): # Position nodes in adjacency matrix A using Fruchterman-Reingold # Entry point for NetworkX graph is fruchterman_reingold_layout() # Sparse version @@ -617,16 +627,17 @@ def _sparse_fruchterman_reingold(A, k=None, pos=None, fixed=None, # difference between this row's node position and all others delta = (pos[i] - pos).T # distance between points - distance = np.sqrt((delta**2).sum(axis=0)) + distance = np.sqrt((delta ** 2).sum(axis=0)) # enforce minimum distance of 0.01 distance = np.where(distance < 0.01, 0.01, distance) # the adjacency matrix row Ai = np.asarray(A.getrowview(i).toarray()) # displacement "force" - displacement[:, i] +=\ - (delta * (k * k / distance**2 - Ai * distance / k)).sum(axis=1) + displacement[:, i] += ( + delta * (k * k / distance ** 2 - Ai * distance / k) + ).sum(axis=1) # update positions - length = np.sqrt((displacement**2).sum(axis=0)) + length = np.sqrt((displacement ** 2).sum(axis=0)) length = np.where(length < 0.01, 0.1, length) delta_pos = (displacement * t / length).T pos += delta_pos @@ -638,12 +649,9 @@ def _sparse_fruchterman_reingold(A, k=None, pos=None, fixed=None, return pos -def kamada_kawai_layout(G, dist=None, - pos=None, - weight='weight', - scale=1, - center=None, - dim=2): +def kamada_kawai_layout( + G, dist=None, pos=None, weight="weight", scale=1, center=None, dim=2 +): """Position nodes using Kamada-Kawai path-length cost-function. Parameters @@ -727,11 +735,15 @@ def _kamada_kawai_solve(dist_mtx, pos_arr, dim): from scipy.optimize import minimize meanwt = 1e-3 - costargs = (np, 1 / (dist_mtx + np.eye(dist_mtx.shape[0]) * 1e-3), - meanwt, dim) + costargs = (np, 1 / (dist_mtx + np.eye(dist_mtx.shape[0]) * 1e-3), meanwt, dim) - optresult = minimize(_kamada_kawai_costfn, pos_arr.ravel(), - method='L-BFGS-B', args=costargs, jac=True) + optresult = minimize( + _kamada_kawai_costfn, + pos_arr.ravel(), + method="L-BFGS-B", + args=costargs, + jac=True, + ) return optresult.x.reshape((-1, dim)) @@ -743,16 +755,15 @@ def _kamada_kawai_costfn(pos_vec, np, invdist, meanweight, dim): delta = pos_arr[:, np.newaxis, :] - pos_arr[np.newaxis, :, :] nodesep = np.linalg.norm(delta, axis=-1) - direction = np.einsum('ijk,ij->ijk', - delta, - 1 / (nodesep + np.eye(nNodes) * 1e-3)) + direction = np.einsum("ijk,ij->ijk", delta, 1 / (nodesep + np.eye(nNodes) * 1e-3)) offset = nodesep * invdist - 1.0 offset[np.diag_indices(nNodes)] = 0 cost = 0.5 * np.sum(offset ** 2) - grad = (np.einsum('ij,ij,ijk->ik', invdist, offset, direction) - - np.einsum('ij,ij,ijk->jk', invdist, offset, direction)) + grad = np.einsum("ij,ij,ijk->ik", invdist, offset, direction) - np.einsum( + "ij,ij,ijk->jk", invdist, offset, direction + ) # Additional parabolic term to encourage mean position to be near origin: sumpos = np.sum(pos_arr, axis=0) @@ -762,7 +773,7 @@ def _kamada_kawai_costfn(pos_vec, np, invdist, meanweight, dim): return (cost, grad.ravel()) -def spectral_layout(G, weight='weight', scale=1, center=None, dim=2): +def spectral_layout(G, weight="weight", scale=1, center=None, dim=2): """Position nodes using the eigenvectors of the graph Laplacian. Using the unnormalized Laplacian, the layout shows possible clusters of @@ -823,7 +834,7 @@ def spectral_layout(G, weight='weight', scale=1, center=None, dim=2): # Sparse matrix if len(G) < 500: # dense solver is faster for small graphs raise ValueError - A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype='d') + A = nx.to_scipy_sparse_matrix(G, weight=weight, dtype="d") # Symmetrize directed graphs if G.is_directed(): A = A + np.transpose(A) @@ -858,7 +869,7 @@ def _spectral(A, dim=2): eigenvalues, eigenvectors = np.linalg.eig(L) # sort and keep smallest nonzero - index = np.argsort(eigenvalues)[1:dim + 1] # 0 index is zero eigenvalue + index = np.argsort(eigenvalues)[1 : dim + 1] # 0 index is zero eigenvalue return np.real(eigenvectors[:, index]) @@ -885,7 +896,7 @@ def _sparse_spectral(A, dim=2): # number of Lanczos vectors for ARPACK solver.What is the right scaling? ncv = max(2 * k + 1, int(np.sqrt(nnodes))) # return smallest k eigenvalues and eigenvectors - eigenvalues, eigenvectors = eigsh(L, k, which='SM', ncv=ncv) + eigenvalues, eigenvectors = eigsh(L, k, which="SM", ncv=ncv) index = np.argsort(eigenvalues)[1:k] # 0 index is zero eigenvalue return np.real(eigenvectors[:, index]) @@ -932,7 +943,7 @@ def planar_layout(G, scale=1, center=None, dim=2): import numpy as np if dim != 2: - raise ValueError('can only handle 2 dimensions') + raise ValueError("can only handle 2 dimensions") G, center = _process_params(G, center, dim) @@ -953,8 +964,7 @@ def planar_layout(G, scale=1, center=None, dim=2): return dict(zip(node_list, pos)) -def spiral_layout(G, scale=1, center=None, dim=2, - resolution=0.35, equidistant=False): +def spiral_layout(G, scale=1, center=None, dim=2, resolution=0.35, equidistant=False): """Position nodes in a spiral layout. Parameters @@ -995,7 +1005,7 @@ def spiral_layout(G, scale=1, center=None, dim=2, import numpy as np if dim != 2: - raise ValueError('can only handle 2 dimensions') + raise ValueError("can only handle 2 dimensions") G, center = _process_params(G, center, dim) @@ -1034,8 +1044,7 @@ def spiral_layout(G, scale=1, center=None, dim=2, return pos -def multipartite_layout(G, subset_key='subset', align='vertical', - scale=1, center=None): +def multipartite_layout(G, subset_key="subset", align="vertical", scale=1, center=None): """Position nodes in layers of straight lines. Parameters @@ -1091,13 +1100,13 @@ def multipartite_layout(G, subset_key='subset', align='vertical', pos = None nodes = [] - if align == 'vertical': + if align == "vertical": width = len(layers) for i, layer in layers.items(): height = len(layer) xs = np.repeat(i, height) ys = np.arange(0, height, dtype=float) - offset = ((width-1)/2, (height-1)/2) + offset = ((width - 1) / 2, (height - 1) / 2) layer_pos = np.column_stack([xs, ys]) - offset if pos is None: pos = layer_pos @@ -1108,13 +1117,13 @@ def multipartite_layout(G, subset_key='subset', align='vertical', pos = dict(zip(nodes, pos)) return pos - if align == 'horizontal': + if align == "horizontal": height = len(layers) for i, layer in layers.items(): width = len(layer) xs = np.arange(0, width, dtype=float) ys = np.repeat(i, width) - offset = ((width-1)/2, (height-1)/2) + offset = ((width - 1) / 2, (height - 1) / 2) layer_pos = np.column_stack([xs, ys]) - offset if pos is None: pos = layer_pos @@ -1125,9 +1134,10 @@ def multipartite_layout(G, subset_key='subset', align='vertical', pos = dict(zip(nodes, pos)) return pos - msg = 'align must be either vertical or horizontal.' + msg = "align must be either vertical or horizontal." raise ValueError(msg) + def rescale_layout(pos, scale=1): """Returns scaled position array to (-scale, scale) in all axes. diff --git a/networkx/drawing/nx_agraph.py b/networkx/drawing/nx_agraph.py index db619386..aaa399ea 100644 --- a/networkx/drawing/nx_agraph.py +++ b/networkx/drawing/nx_agraph.py @@ -19,11 +19,15 @@ import os import tempfile import networkx as nx -__all__ = ['from_agraph', 'to_agraph', - 'write_dot', 'read_dot', - 'graphviz_layout', - 'pygraphviz_layout', - 'view_pygraphviz'] +__all__ = [ + "from_agraph", + "to_agraph", + "write_dot", + "read_dot", + "graphviz_layout", + "pygraphviz_layout", + "view_pygraphviz", +] def from_agraph(A, create_using=None): @@ -88,16 +92,16 @@ def from_agraph(A, create_using=None): str_attr = {str(k): v for k, v in attr.items()} if not N.is_multigraph(): if e.name is not None: - str_attr['key'] = e.name + str_attr["key"] = e.name N.add_edge(u, v, **str_attr) else: N.add_edge(u, v, key=e.name, **str_attr) # add default attributes for graph, nodes, and edges # hang them on N.graph_attr - N.graph['graph'] = dict(A.graph_attr) - N.graph['node'] = dict(A.node_attr) - N.graph['edge'] = dict(A.edge_attr) + N.graph["graph"] = dict(A.graph_attr) + N.graph["node"] = dict(A.node_attr) + N.graph["edge"] = dict(A.edge_attr) return N @@ -124,19 +128,19 @@ def to_agraph(N): try: import pygraphviz except ImportError as e: - raise ImportError('requires pygraphviz ' - 'http://pygraphviz.github.io/') from e + raise ImportError("requires pygraphviz " "http://pygraphviz.github.io/") from e directed = N.is_directed() strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph() A = pygraphviz.AGraph(name=N.name, strict=strict, directed=directed) # default graph attributes - A.graph_attr.update(N.graph.get('graph', {})) - A.node_attr.update(N.graph.get('node', {})) - A.edge_attr.update(N.graph.get('edge', {})) + A.graph_attr.update(N.graph.get("graph", {})) + A.node_attr.update(N.graph.get("node", {})) + A.edge_attr.update(N.graph.get("edge", {})) - A.graph_attr.update((k, v) for k, v in N.graph.items() - if k not in ('graph', 'node', 'edge')) + A.graph_attr.update( + (k, v) for k, v in N.graph.items() if k not in ("graph", "node", "edge") + ) # add nodes for n, nodedata in N.nodes(data=True): @@ -148,8 +152,7 @@ def to_agraph(N): # loop over edges if N.is_multigraph(): for u, v, key, edgedata in N.edges(data=True, keys=True): - str_edgedata = {k: str(v) for k, v in edgedata.items() - if k != 'key'} + str_edgedata = {k: str(v) for k, v in edgedata.items() if k != "key"} A.add_edge(u, v, key=str(key)) if edgedata is not None: a = A.get_edge(u, v) @@ -193,13 +196,14 @@ def read_dot(path): try: import pygraphviz except ImportError as e: - raise ImportError('read_dot() requires pygraphviz ' - 'http://pygraphviz.github.io/') from e + raise ImportError( + "read_dot() requires pygraphviz " "http://pygraphviz.github.io/" + ) from e A = pygraphviz.AGraph(file=path) return from_agraph(A) -def graphviz_layout(G, prog='neato', root=None, args=''): +def graphviz_layout(G, prog="neato", root=None, args=""): """Create node positions for G using Graphviz. Parameters @@ -230,7 +234,7 @@ def graphviz_layout(G, prog='neato', root=None, args=''): return pygraphviz_layout(G, prog=prog, root=root, args=args) -def pygraphviz_layout(G, prog='neato', root=None, args=''): +def pygraphviz_layout(G, prog="neato", root=None, args=""): """Create node positions for G using Graphviz. Parameters @@ -269,8 +273,7 @@ def pygraphviz_layout(G, prog='neato', root=None, args=''): try: import pygraphviz except ImportError as e: - raise ImportError('requires pygraphviz ' - 'http://pygraphviz.github.io/') from e + raise ImportError("requires pygraphviz " "http://pygraphviz.github.io/") from e if root is not None: args += f"-Groot={root}" A = to_agraph(G) @@ -279,7 +282,7 @@ def pygraphviz_layout(G, prog='neato', root=None, args=''): for n in G: node = pygraphviz.Node(A, n) try: - xs = node.attr["pos"].split(',') + xs = node.attr["pos"].split(",") node_pos[n] = tuple(float(x) for x in xs) except: print("no position for node", n) @@ -287,9 +290,8 @@ def pygraphviz_layout(G, prog='neato', root=None, args=''): return node_pos -@nx.utils.open_file(5, 'w+b') -def view_pygraphviz(G, edgelabel=None, prog='dot', args='', - suffix='', path=None): +@nx.utils.open_file(5, "w+b") +def view_pygraphviz(G, edgelabel=None, prog="dot", args="", suffix="", path=None): """Views the graph G using the specified layout algorithm. Parameters @@ -337,18 +339,20 @@ def view_pygraphviz(G, edgelabel=None, prog='dot', args='', # then they inherit no value and are set only if explicitly set. # to_agraph() uses these values. - attrs = ['edge', 'node', 'graph'] + attrs = ["edge", "node", "graph"] for attr in attrs: if attr not in G.graph: G.graph[attr] = {} # These are the default values. - edge_attrs = {'fontsize': '10'} - node_attrs = {'style': 'filled', - 'fillcolor': '#0000FF40', - 'height': '0.75', - 'width': '0.75', - 'shape': 'circle'} + edge_attrs = {"fontsize": "10"} + node_attrs = { + "style": "filled", + "fillcolor": "#0000FF40", + "height": "0.75", + "width": "0.75", + "shape": "circle", + } graph_attrs = {} def update_attrs(which, attrs): @@ -367,23 +371,25 @@ def view_pygraphviz(G, edgelabel=None, prog='dot', args='', del G.graph[which] # Update all default values - update_attrs('edge', edge_attrs) - update_attrs('node', node_attrs) - update_attrs('graph', graph_attrs) + update_attrs("edge", edge_attrs) + update_attrs("node", node_attrs) + update_attrs("graph", graph_attrs) # Convert to agraph, so we inherit default values A = to_agraph(G) # Remove the default values we added to the original graph. - clean_attrs('edge', edge_attrs) - clean_attrs('node', node_attrs) - clean_attrs('graph', graph_attrs) + clean_attrs("edge", edge_attrs) + clean_attrs("node", node_attrs) + clean_attrs("graph", graph_attrs) # If the user passed in an edgelabel, we update the labels for all edges. if edgelabel is not None: - if not hasattr(edgelabel, '__call__'): + if not hasattr(edgelabel, "__call__"): + def func(data): - return ''.join([" ", str(data[edgelabel]), " "]) + return "".join([" ", str(data[edgelabel]), " "]) + else: func = edgelabel @@ -392,14 +398,14 @@ def view_pygraphviz(G, edgelabel=None, prog='dot', args='', for u, v, key, data in G.edges(keys=True, data=True): # PyGraphviz doesn't convert the key to a string. See #339 edge = A.get_edge(u, v, str(key)) - edge.attr['label'] = str(func(data)) + edge.attr["label"] = str(func(data)) else: for u, v, data in G.edges(data=True): edge = A.get_edge(u, v) - edge.attr['label'] = str(func(data)) + edge.attr["label"] = str(func(data)) if path is None: - ext = 'png' + ext = "png" if suffix: suffix = f"_{suffix}.{ext}" else: @@ -414,7 +420,7 @@ def view_pygraphviz(G, edgelabel=None, prog='dot', args='', return path.name, A -def display_pygraphviz(graph, path, format=None, prog=None, args=''): +def display_pygraphviz(graph, path, format=None, prog=None, args=""): """Internal function to display a graph in OS dependent manner. Parameters diff --git a/networkx/drawing/nx_pydot.py b/networkx/drawing/nx_pydot.py index 3ebadcc3..f534cd95 100644 --- a/networkx/drawing/nx_pydot.py +++ b/networkx/drawing/nx_pydot.py @@ -17,11 +17,17 @@ from locale import getpreferredencoding from networkx.utils import open_file import networkx as nx -__all__ = ['write_dot', 'read_dot', 'graphviz_layout', 'pydot_layout', - 'to_pydot', 'from_pydot'] +__all__ = [ + "write_dot", + "read_dot", + "graphviz_layout", + "pydot_layout", + "to_pydot", + "from_pydot", +] -@open_file(1, mode='w') +@open_file(1, mode="w") def write_dot(G, path): """Write NetworkX graph G to Graphviz dot format on path. @@ -32,7 +38,7 @@ def write_dot(G, path): return -@open_file(0, mode='r') +@open_file(0, mode="r") def read_dot(path): """Returns a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the dot file with the passed path. @@ -56,6 +62,7 @@ def read_dot(path): :class:`MultiGraph`. """ import pydot + data = path.read() # List of one or more "pydot.Dot" instances deserialized from this file. @@ -93,7 +100,7 @@ def from_pydot(P): else: multiedges = True - if P.get_type() == 'graph': # undirected + if P.get_type() == "graph": # undirected if multiedges: N = nx.MultiGraph() else: @@ -106,13 +113,13 @@ def from_pydot(P): # assign defaults name = P.get_name().strip('"') - if name != '': + if name != "": N.name = name # add nodes, attributes to N.node_attr for p in P.get_node_list(): n = p.get_name().strip('"') - if n in ('node', 'graph', 'edge'): + if n in ("node", "graph", "edge"): continue N.add_node(n, **p.get_attributes()) @@ -127,13 +134,13 @@ def from_pydot(P): if isinstance(u, str): s.append(u.strip('"')) else: - for unodes in u['nodes']: + for unodes in u["nodes"]: s.append(unodes.strip('"')) if isinstance(v, str): d.append(v.strip('"')) else: - for vnodes in v['nodes']: + for vnodes in v["nodes"]: d.append(vnodes.strip('"')) for source_node in s: @@ -143,13 +150,13 @@ def from_pydot(P): # add default attributes for graph, nodes, edges pattr = P.get_attributes() if pattr: - N.graph['graph'] = pattr + N.graph["graph"] = pattr try: - N.graph['node'] = P.get_node_defaults()[0] + N.graph["node"] = P.get_node_defaults()[0] except (IndexError, TypeError): pass # N.graph['node']={} try: - N.graph['edge'] = P.get_edge_defaults()[0] + N.graph["edge"] = P.get_edge_defaults()[0] except (IndexError, TypeError): pass # N.graph['edge']={} return N @@ -176,24 +183,25 @@ def to_pydot(N): # set Graphviz graph type if N.is_directed(): - graph_type = 'digraph' + graph_type = "digraph" else: - graph_type = 'graph' + graph_type = "graph" strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph() name = N.name - graph_defaults = N.graph.get('graph', {}) - if name == '': - P = pydot.Dot('', graph_type=graph_type, strict=strict, - **graph_defaults) + graph_defaults = N.graph.get("graph", {}) + if name == "": + P = pydot.Dot("", graph_type=graph_type, strict=strict, **graph_defaults) else: - P = pydot.Dot(f'"{name}"', graph_type=graph_type, strict=strict, **graph_defaults) + P = pydot.Dot( + f'"{name}"', graph_type=graph_type, strict=strict, **graph_defaults + ) try: - P.set_node_defaults(**N.graph['node']) + P.set_node_defaults(**N.graph["node"]) except KeyError: pass try: - P.set_edge_defaults(**N.graph['edge']) + P.set_edge_defaults(**N.graph["edge"]) except KeyError: pass @@ -204,10 +212,8 @@ def to_pydot(N): if N.is_multigraph(): for u, v, key, edgedata in N.edges(data=True, keys=True): - str_edgedata = {k: str(v) for k, v in edgedata.items() - if k != 'key'} - edge = pydot.Edge(str(u), str(v), - key=str(key), **str_edgedata) + str_edgedata = {k: str(v) for k, v in edgedata.items() if k != "key"} + edge = pydot.Edge(str(u), str(v), key=str(key), **str_edgedata) P.add_edge(edge) else: @@ -218,7 +224,7 @@ def to_pydot(N): return P -def graphviz_layout(G, prog='neato', root=None): +def graphviz_layout(G, prog="neato", root=None): """Create node positions using Pydot and Graphviz. Returns a dictionary of positions keyed by node. @@ -251,7 +257,7 @@ def graphviz_layout(G, prog='neato', root=None): return pydot_layout(G=G, prog=prog, root=root) -def pydot_layout(G, prog='neato', root=None): +def pydot_layout(G, prog="neato", root=None): """Create node positions using :mod:`pydot` and Graphviz. Parameters @@ -290,6 +296,7 @@ def pydot_layout(G, prog='neato', root=None): """ import pydot + P = to_pydot(G) if root is not None: P.set("root", str(root)) @@ -306,7 +313,7 @@ def pydot_layout(G, prog='neato', root=None): print() print("To debug what happened try:") print("P = nx.nx_pydot.to_pydot(G)") - print("P.write_dot(\"file.dot\")") + print('P.write_dot("file.dot")') print(f"And then run {prog} on file.dot") return diff --git a/networkx/drawing/nx_pylab.py b/networkx/drawing/nx_pylab.py index 59c5b98c..eb7be33b 100644 --- a/networkx/drawing/nx_pylab.py +++ b/networkx/drawing/nx_pylab.py @@ -15,23 +15,31 @@ pygraphviz: http://pygraphviz.github.io/ """ from numbers import Number import networkx as nx -from networkx.drawing.layout import shell_layout, \ - circular_layout, kamada_kawai_layout, spectral_layout, \ - spring_layout, random_layout, planar_layout - -__all__ = ['draw', - 'draw_networkx', - 'draw_networkx_nodes', - 'draw_networkx_edges', - 'draw_networkx_labels', - 'draw_networkx_edge_labels', - 'draw_circular', - 'draw_kamada_kawai', - 'draw_random', - 'draw_spectral', - 'draw_spring', - 'draw_planar', - 'draw_shell'] +from networkx.drawing.layout import ( + shell_layout, + circular_layout, + kamada_kawai_layout, + spectral_layout, + spring_layout, + random_layout, + planar_layout, +) + +__all__ = [ + "draw", + "draw_networkx", + "draw_networkx_nodes", + "draw_networkx_edges", + "draw_networkx_labels", + "draw_networkx_edge_labels", + "draw_circular", + "draw_kamada_kawai", + "draw_random", + "draw_spectral", + "draw_spring", + "draw_planar", + "draw_shell", +] def draw(G, pos=None, ax=None, **kwds): @@ -103,15 +111,15 @@ def draw(G, pos=None, ax=None, **kwds): cf = plt.gcf() else: cf = ax.get_figure() - cf.set_facecolor('w') + cf.set_facecolor("w") if ax is None: if cf._axstack() is None: ax = cf.add_axes((0, 0, 1, 1)) else: ax = cf.gca() - if 'with_labels' not in kwds: - kwds['with_labels'] = 'labels' in kwds + if "with_labels" not in kwds: + kwds["with_labels"] = "labels" in kwds draw_networkx(G, pos=pos, ax=ax, **kwds) ax.set_axis_off() @@ -263,24 +271,59 @@ def draw_networkx(G, pos=None, arrows=True, with_labels=True, **kwds): print("Matplotlib unable to open display") raise - valid_node_kwds = ('nodelist', 'node_size', 'node_color', 'node_shape', - 'alpha', 'cmap', 'vmin', 'vmax', 'ax', 'linewidths', - 'edgecolors', 'label') - - valid_edge_kwds = ('edgelist', 'width', 'edge_color', 'style', 'alpha', - 'arrowstyle', 'arrowsize', 'edge_cmap', 'edge_vmin', - 'edge_vmax', 'ax', 'label', 'node_size', 'nodelist', - 'node_shape', 'connectionstyle', 'min_source_margin', - 'min_target_margin') - - valid_label_kwds = ('labels', 'font_size', 'font_color', 'font_family', - 'font_weight', 'alpha', 'bbox', 'ax', - 'horizontalalignment', 'verticalalignment') + valid_node_kwds = ( + "nodelist", + "node_size", + "node_color", + "node_shape", + "alpha", + "cmap", + "vmin", + "vmax", + "ax", + "linewidths", + "edgecolors", + "label", + ) + + valid_edge_kwds = ( + "edgelist", + "width", + "edge_color", + "style", + "alpha", + "arrowstyle", + "arrowsize", + "edge_cmap", + "edge_vmin", + "edge_vmax", + "ax", + "label", + "node_size", + "nodelist", + "node_shape", + "connectionstyle", + "min_source_margin", + "min_target_margin", + ) + + valid_label_kwds = ( + "labels", + "font_size", + "font_color", + "font_family", + "font_weight", + "alpha", + "bbox", + "ax", + "horizontalalignment", + "verticalalignment", + ) valid_kwds = valid_node_kwds + valid_edge_kwds + valid_label_kwds if any([k not in valid_kwds for k in kwds]): - invalid_args = ', '.join([k for k in kwds if k not in valid_kwds]) + invalid_args = ", ".join([k for k in kwds if k not in valid_kwds]) raise ValueError(f"Received invalid argument(s): {invalid_args}") node_kwds = {k: v for k, v in kwds.items() if k in valid_node_kwds} @@ -297,19 +340,22 @@ def draw_networkx(G, pos=None, arrows=True, with_labels=True, **kwds): plt.draw_if_interactive() -def draw_networkx_nodes(G, pos, - nodelist=None, - node_size=300, - node_color='#1f78b4', - node_shape='o', - alpha=None, - cmap=None, - vmin=None, - vmax=None, - ax=None, - linewidths=None, - edgecolors=None, - label=None): +def draw_networkx_nodes( + G, + pos, + nodelist=None, + node_size=300, + node_color="#1f78b4", + node_shape="o", + alpha=None, + cmap=None, + vmin=None, + vmax=None, + ax=None, + linewidths=None, + edgecolors=None, + label=None, +): """Draw the nodes of the graph G. This draws only the nodes of the graph G. @@ -387,6 +433,7 @@ def draw_networkx_nodes(G, pos, draw_networkx_edge_labels() """ from collections.abc import Iterable + try: import matplotlib.pyplot as plt from matplotlib.collections import PathCollection @@ -411,55 +458,62 @@ def draw_networkx_nodes(G, pos, except KeyError as e: raise nx.NetworkXError(f"Node {e} has no position.") from e except ValueError as e: - raise nx.NetworkXError('Bad value in node positions.') from e + raise nx.NetworkXError("Bad value in node positions.") from e if isinstance(alpha, Iterable): node_color = apply_alpha(node_color, alpha, nodelist, cmap, vmin, vmax) alpha = None - node_collection = ax.scatter(xy[:, 0], xy[:, 1], - s=node_size, - c=node_color, - marker=node_shape, - cmap=cmap, - vmin=vmin, - vmax=vmax, - alpha=alpha, - linewidths=linewidths, - edgecolors=edgecolors, - label=label) + node_collection = ax.scatter( + xy[:, 0], + xy[:, 1], + s=node_size, + c=node_color, + marker=node_shape, + cmap=cmap, + vmin=vmin, + vmax=vmax, + alpha=alpha, + linewidths=linewidths, + edgecolors=edgecolors, + label=label, + ) ax.tick_params( - axis='both', - which='both', + axis="both", + which="both", bottom=False, left=False, labelbottom=False, - labelleft=False) + labelleft=False, + ) node_collection.set_zorder(2) return node_collection -def draw_networkx_edges(G, pos, - edgelist=None, - width=1.0, - edge_color='k', - style='solid', - alpha=None, - arrowstyle='-|>', - arrowsize=10, - edge_cmap=None, - edge_vmin=None, - edge_vmax=None, - ax=None, - arrows=True, - label=None, - node_size=300, - nodelist=None, - node_shape="o", - connectionstyle=None, - min_source_margin=0, - min_target_margin=0): +def draw_networkx_edges( + G, + pos, + edgelist=None, + width=1.0, + edge_color="k", + style="solid", + alpha=None, + arrowstyle="-|>", + arrowsize=10, + edge_cmap=None, + edge_vmin=None, + edge_vmax=None, + ax=None, + arrows=True, + label=None, + node_size=300, + nodelist=None, + node_shape="o", + connectionstyle=None, + min_source_margin=0, + min_target_margin=0, +): """Draw the edges of the graph G. This draws only the edges of the graph G. @@ -597,17 +651,20 @@ def draw_networkx_edges(G, pos, # FancyArrowPatch handles color=None different from LineCollection if edge_color is None: - edge_color = 'k' + edge_color = "k" # set edge positions edge_pos = np.asarray([(pos[e[0]], pos[e[1]]) for e in edgelist]) # Check if edge_color is an array of floats and map to edge_cmap. # This is the only case handled differently from matplotlib - if np.iterable(edge_color) and (len(edge_color) == len(edge_pos)) \ - and np.alltrue([isinstance(c, Number) for c in edge_color]): + if ( + np.iterable(edge_color) + and (len(edge_color) == len(edge_pos)) + and np.alltrue([isinstance(c, Number) for c in edge_color]) + ): if edge_cmap is not None: - assert(isinstance(edge_cmap, Colormap)) + assert isinstance(edge_cmap, Colormap) else: edge_cmap = plt.get_cmap() if edge_vmin is None: @@ -617,15 +674,16 @@ def draw_networkx_edges(G, pos, color_normal = Normalize(vmin=edge_vmin, vmax=edge_vmax) edge_color = [edge_cmap(color_normal(e)) for e in edge_color] - if (not G.is_directed() or not arrows): - edge_collection = LineCollection(edge_pos, - colors=edge_color, - linewidths=width, - antialiaseds=(1,), - linestyle=style, - transOffset=ax.transData, - alpha=alpha - ) + if not G.is_directed() or not arrows: + edge_collection = LineCollection( + edge_pos, + colors=edge_color, + linewidths=width, + antialiaseds=(1,), + linestyle=style, + transOffset=ax.transData, + alpha=alpha, + ) edge_collection.set_cmap(edge_cmap) edge_collection.set_clim(edge_vmin, edge_vmax) @@ -690,16 +748,19 @@ def draw_networkx_edges(G, pos, else: line_width = width - arrow = FancyArrowPatch((x1, y1), (x2, y2), - arrowstyle=arrowstyle, - shrinkA=shrink_source, - shrinkB=shrink_target, - mutation_scale=mutation_scale, - color=arrow_color, - linewidth=line_width, - connectionstyle=connectionstyle, - linestyle=style, - zorder=1) # arrows go behind nodes + arrow = FancyArrowPatch( + (x1, y1), + (x2, y2), + arrowstyle=arrowstyle, + shrinkA=shrink_source, + shrinkB=shrink_target, + mutation_scale=mutation_scale, + color=arrow_color, + linewidth=line_width, + connectionstyle=connectionstyle, + linestyle=style, + zorder=1, + ) # arrows go behind nodes # There seems to be a bug in matplotlib to make collections of # FancyArrowPatch instances. Until fixed, the patches are added @@ -715,33 +776,37 @@ def draw_networkx_edges(G, pos, w = maxx - minx h = maxy - miny - padx, pady = 0.05 * w, 0.05 * h + padx, pady = 0.05 * w, 0.05 * h corners = (minx - padx, miny - pady), (maxx + padx, maxy + pady) ax.update_datalim(corners) ax.autoscale_view() ax.tick_params( - axis='both', - which='both', + axis="both", + which="both", bottom=False, left=False, labelbottom=False, - labelleft=False) + labelleft=False, + ) return arrow_collection -def draw_networkx_labels(G, pos, - labels=None, - font_size=12, - font_color='k', - font_family='sans-serif', - font_weight='normal', - alpha=None, - bbox=None, - horizontalalignment='center', - verticalalignment='center', - ax=None): +def draw_networkx_labels( + G, + pos, + labels=None, + font_size=12, + font_color="k", + font_family="sans-serif", + font_weight="normal", + alpha=None, + bbox=None, + horizontalalignment="center", + verticalalignment="center", + ax=None, +): """Draw node labels on the graph G. Parameters @@ -823,45 +888,51 @@ def draw_networkx_labels(G, pos, (x, y) = pos[n] if not isinstance(label, str): label = str(label) # this makes "1" and 1 labeled the same - t = ax.text(x, y, - label, - size=font_size, - color=font_color, - family=font_family, - weight=font_weight, - alpha=alpha, - horizontalalignment=horizontalalignment, - verticalalignment=verticalalignment, - transform=ax.transData, - bbox=bbox, - clip_on=True, - ) + t = ax.text( + x, + y, + label, + size=font_size, + color=font_color, + family=font_family, + weight=font_weight, + alpha=alpha, + horizontalalignment=horizontalalignment, + verticalalignment=verticalalignment, + transform=ax.transData, + bbox=bbox, + clip_on=True, + ) text_items[n] = t ax.tick_params( - axis='both', - which='both', + axis="both", + which="both", bottom=False, left=False, labelbottom=False, - labelleft=False) + labelleft=False, + ) return text_items -def draw_networkx_edge_labels(G, pos, - edge_labels=None, - label_pos=0.5, - font_size=10, - font_color='k', - font_family='sans-serif', - font_weight='normal', - alpha=None, - bbox=None, - horizontalalignment='center', - verticalalignment='center', - ax=None, - rotate=True): +def draw_networkx_edge_labels( + G, + pos, + edge_labels=None, + label_pos=0.5, + font_size=10, + font_color="k", + font_family="sans-serif", + font_weight="normal", + alpha=None, + bbox=None, + horizontalalignment="center", + verticalalignment="center", + ax=None, + rotate=True, +): """Draw edge labels. Parameters @@ -954,8 +1025,10 @@ def draw_networkx_edge_labels(G, pos, for (n1, n2), label in labels.items(): (x1, y1) = pos[n1] (x2, y2) = pos[n2] - (x, y) = (x1 * label_pos + x2 * (1.0 - label_pos), - y1 * label_pos + y2 * (1.0 - label_pos)) + (x, y) = ( + x1 * label_pos + x2 * (1.0 - label_pos), + y1 * label_pos + y2 * (1.0 - label_pos), + ) if rotate: # in degrees @@ -963,47 +1036,48 @@ def draw_networkx_edge_labels(G, pos, # make label orientation "right-side-up" if angle > 90: angle -= 180 - if angle < - 90: + if angle < -90: angle += 180 # transform data coordinate angle to screen coordinate angle xy = np.array((x, y)) - trans_angle = ax.transData.transform_angles(np.array((angle,)), - xy.reshape((1, 2)))[0] + trans_angle = ax.transData.transform_angles( + np.array((angle,)), xy.reshape((1, 2)) + )[0] else: trans_angle = 0.0 # use default box of white with white border if bbox is None: - bbox = dict(boxstyle='round', - ec=(1.0, 1.0, 1.0), - fc=(1.0, 1.0, 1.0), - ) + bbox = dict(boxstyle="round", ec=(1.0, 1.0, 1.0), fc=(1.0, 1.0, 1.0),) if not isinstance(label, str): label = str(label) # this makes "1" and 1 labeled the same - t = ax.text(x, y, - label, - size=font_size, - color=font_color, - family=font_family, - weight=font_weight, - alpha=alpha, - horizontalalignment=horizontalalignment, - verticalalignment=verticalalignment, - rotation=trans_angle, - transform=ax.transData, - bbox=bbox, - zorder=1, - clip_on=True, - ) + t = ax.text( + x, + y, + label, + size=font_size, + color=font_color, + family=font_family, + weight=font_weight, + alpha=alpha, + horizontalalignment=horizontalalignment, + verticalalignment=verticalalignment, + rotation=trans_angle, + transform=ax.transData, + bbox=bbox, + zorder=1, + clip_on=True, + ) text_items[(n1, n2)] = t ax.tick_params( - axis='both', - which='both', + axis="both", + which="both", bottom=False, left=False, labelbottom=False, - labelleft=False) + labelleft=False, + ) return text_items @@ -1106,9 +1180,9 @@ def draw_shell(G, **kwargs): with the exception of the pos parameter which is not used by this function. """ - nlist = kwargs.get('nlist', None) + nlist = kwargs.get("nlist", None) if nlist is not None: - del(kwargs['nlist']) + del kwargs["nlist"] draw(G, shell_layout(G, nlist=nlist), **kwargs) @@ -1188,8 +1262,7 @@ def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None): try: rgba_colors = np.array([colorConverter.to_rgba(colors)]) except ValueError: - rgba_colors = np.array([colorConverter.to_rgba(color) - for color in colors]) + rgba_colors = np.array([colorConverter.to_rgba(color) for color in colors]) # Set the final column of the rgba_colors to have the relevant alpha values try: # If alpha is longer than the number of colors, resize to the number of @@ -1201,7 +1274,7 @@ def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None): rgba_colors[1:, 0] = rgba_colors[0, 0] rgba_colors[1:, 1] = rgba_colors[0, 1] rgba_colors[1:, 2] = rgba_colors[0, 2] - rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors))) + rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors))) except TypeError: rgba_colors[:, -1] = alpha return rgba_colors diff --git a/networkx/drawing/tests/test_agraph.py b/networkx/drawing/tests/test_agraph.py index 47a53571..b55e8876 100644 --- a/networkx/drawing/tests/test_agraph.py +++ b/networkx/drawing/tests/test_agraph.py @@ -2,28 +2,27 @@ import os import tempfile import pytest -pygraphviz = pytest.importorskip('pygraphviz') +pygraphviz = pytest.importorskip("pygraphviz") -from networkx.testing import assert_edges_equal, assert_nodes_equal, \ - assert_graphs_equal + +from networkx.testing import assert_edges_equal, assert_nodes_equal, assert_graphs_equal import networkx as nx class TestAGraph: - def build_graph(self, G): - edges = [('A', 'B'), ('A', 'C'), ('A', 'C'), ('B', 'C'), ('A', 'D')] + edges = [("A", "B"), ("A", "C"), ("A", "C"), ("B", "C"), ("A", "D")] G.add_edges_from(edges) - G.add_node('E') - G.graph['metal'] = 'bronze' + G.add_node("E") + G.graph["metal"] = "bronze" return G def assert_equal(self, G1, G2): assert_nodes_equal(G1.nodes(), G2.nodes()) assert_edges_equal(G1.edges(), G2.edges()) - assert G1.graph['metal'] == G2.graph['metal'] + assert G1.graph["metal"] == G2.graph["metal"] def agraph_checks(self, G): G = self.build_graph(G) @@ -38,7 +37,7 @@ class TestAGraph: self.assert_equal(H, Hin) (fd, fname) = tempfile.mkstemp() - with open(fname, 'w') as fh: + with open(fname, "w") as fh: nx.drawing.nx_agraph.write_dot(H, fh) with open(fname) as fh: @@ -47,10 +46,10 @@ class TestAGraph: self.assert_equal(H, Hin) def test_from_agraph_name(self): - G = nx.Graph(name='test') + G = nx.Graph(name="test") A = nx.nx_agraph.to_agraph(G) H = nx.nx_agraph.from_agraph(A) - assert G.name == 'test' + assert G.name == "test" def test_undirected(self): self.agraph_checks(nx.Graph()) @@ -74,7 +73,7 @@ class TestAGraph: G = nx.Graph() G.add_edge(1, 2, weight=7) G.add_edge(2, 3, weight=8) - nx.nx_agraph.view_pygraphviz(G, edgelabel='weight') + nx.nx_agraph.view_pygraphviz(G, edgelabel="weight") def test_graph_with_reserved_keywords(self): # test attribute/keyword clash case for #1582 @@ -82,9 +81,9 @@ class TestAGraph: # edges: u,v G = nx.Graph() G = self.build_graph(G) - G.nodes['E']['n'] = 'keyword' - G.edges[('A', 'B')]['u'] = 'keyword' - G.edges[('A', 'B')]['v'] = 'keyword' + G.nodes["E"]["n"] = "keyword" + G.edges[("A", "B")]["u"] = "keyword" + G.edges[("A", "B")]["v"] = "keyword" A = nx.nx_agraph.to_agraph(G) def test_round_trip(self): @@ -95,16 +94,16 @@ class TestAGraph: AA = nx.nx_agraph.to_agraph(H) HH = nx.nx_agraph.from_agraph(AA) assert_graphs_equal(H, HH) - G.graph['graph'] = {} - G.graph['node'] = {} - G.graph['edge'] = {} + G.graph["graph"] = {} + G.graph["node"] = {} + G.graph["edge"] = {} assert_graphs_equal(G, HH) def test_2d_layout(self): G = nx.Graph() G = self.build_graph(G) G.graph["dimen"] = 2 - pos = nx.nx_agraph.pygraphviz_layout(G, prog='neato') + pos = nx.nx_agraph.pygraphviz_layout(G, prog="neato") pos = list(pos.values()) assert len(pos) == 5 assert len(pos[0]) == 2 @@ -113,7 +112,7 @@ class TestAGraph: G = nx.Graph() G = self.build_graph(G) G.graph["dimen"] = 3 - pos = nx.nx_agraph.pygraphviz_layout(G, prog='neato') + pos = nx.nx_agraph.pygraphviz_layout(G, prog="neato") pos = list(pos.values()) assert len(pos) == 5 assert len(pos[0]) == 3 diff --git a/networkx/drawing/tests/test_layout.py b/networkx/drawing/tests/test_layout.py index 04ff2d98..43c7b701 100644 --- a/networkx/drawing/tests/test_layout.py +++ b/networkx/drawing/tests/test_layout.py @@ -3,17 +3,17 @@ import networkx as nx from networkx.testing import almost_equal import pytest -numpy = pytest.importorskip('numpy') -test_smoke_empty_graphscipy = pytest.importorskip('scipy') +numpy = pytest.importorskip("numpy") +test_smoke_empty_graphscipy = pytest.importorskip("scipy") -class TestLayout: +class TestLayout: @classmethod def setup_class(cls): cls.Gi = nx.grid_2d_graph(5, 5) cls.Gs = nx.Graph() - nx.add_path(cls.Gs, 'abcdef') + nx.add_path(cls.Gs, "abcdef") cls.bigG = nx.grid_2d_graph(25, 25) # > 500 nodes for sparse @staticmethod @@ -37,6 +37,7 @@ class TestLayout: def test_spring_init_pos(self): # Tests GH #2448 import math + G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (2, 0), (2, 3)]) @@ -44,7 +45,7 @@ class TestLayout: fixed_pos = [0] pos = nx.fruchterman_reingold_layout(G, pos=init_pos, fixed=fixed_pos) has_nan = any(math.isnan(c) for coords in pos.values() for c in coords) - assert not has_nan, 'values should not be nan' + assert not has_nan, "values should not be nan" def test_smoke_empty_graph(self): G = [] @@ -119,7 +120,6 @@ class TestLayout: c = (2, 3, 5) sc(nx.kamada_kawai_layout(G, dim=3, scale=2, center=c), scale=2, center=c) - def test_planar_layout_non_planar_input(self): G = nx.complete_graph(9) pytest.raises(nx.NetworkXException, nx.planar_layout, G) @@ -145,7 +145,6 @@ class TestLayout: c = (0, 0, 0) sc(nx.kamada_kawai_layout(G, dim=3), scale=1, center=c) - def test_circular_planar_and_shell_dim_error(self): G = nx.path_graph(4) pytest.raises(ValueError, nx.circular_layout, G, dim=1) @@ -164,7 +163,7 @@ class TestLayout: assert pos.shape == (6, 2) def test_adjacency_interface_scipy(self): - A = nx.to_scipy_sparse_matrix(self.Gs, dtype='d') + A = nx.to_scipy_sparse_matrix(self.Gs, dtype="d") pos = nx.drawing.layout._sparse_fruchterman_reingold(A) assert pos.shape == (6, 2) pos = nx.drawing.layout._sparse_spectral(A) @@ -270,11 +269,9 @@ class TestLayout: for node in bottom: assert vpos[node][0] == bottom_x - vpos = nx.bipartite_layout(G, top, - align='horizontal', - center=(2, 2), - scale=2, - aspect_ratio=1) + vpos = nx.bipartite_layout( + G, top, align="horizontal", center=(2, 2), scale=2, aspect_ratio=1 + ) assert len(vpos) == len(G) top_y = vpos[list(top)[0]][1] @@ -284,7 +281,7 @@ class TestLayout: for node in bottom: assert vpos[node][1] == bottom_y - pytest.raises(ValueError, nx.bipartite_layout, G, top, align='foo') + pytest.raises(ValueError, nx.bipartite_layout, G, top, align="foo") def test_multipartite_layout(self): sizes = (0, 5, 7, 2, 8) @@ -295,23 +292,20 @@ class TestLayout: start = 0 for n in sizes: - end = start+n - assert all(vpos[start][0]==vpos[i][0] for i in range(start+1,end)) - start+=n - - vpos = nx.multipartite_layout(G, - align='horizontal', - scale=2, - center=(2,2)) + end = start + n + assert all(vpos[start][0] == vpos[i][0] for i in range(start + 1, end)) + start += n + + vpos = nx.multipartite_layout(G, align="horizontal", scale=2, center=(2, 2)) assert len(vpos) == len(G) start = 0 for n in sizes: - end = start+n - assert all(vpos[start][1]==vpos[i][1] for i in range(start+1,end)) - start+=n + end = start + n + assert all(vpos[start][1] == vpos[i][1] for i in range(start + 1, end)) + start += n - pytest.raises(ValueError, nx.multipartite_layout, G, align='foo') + pytest.raises(ValueError, nx.multipartite_layout, G, align="foo") def test_kamada_kawai_costfn_1d(self): costfn = nx.drawing.layout._kamada_kawai_costfn @@ -328,8 +322,7 @@ class TestLayout: def check_kamada_kawai_costfn(self, pos, invdist, meanwt, dim): costfn = nx.drawing.layout._kamada_kawai_costfn - cost, grad = costfn(pos.ravel(), numpy, invdist, - meanweight=meanwt, dim=dim) + cost, grad = costfn(pos.ravel(), numpy, invdist, meanweight=meanwt, dim=dim) expected_cost = 0.5 * meanwt * numpy.sum(numpy.sum(pos, axis=0) ** 2) for i in range(pos.shape[0]): @@ -346,37 +339,31 @@ class TestLayout: pos0 = pos.flatten() pos0[idx] += dx - cplus = costfn(pos0, numpy, invdist, - meanweight=meanwt, dim=pos.shape[1])[0] + cplus = costfn( + pos0, numpy, invdist, meanweight=meanwt, dim=pos.shape[1] + )[0] pos0[idx] -= 2 * dx - cminus = costfn(pos0, numpy, invdist, - meanweight=meanwt, dim=pos.shape[1])[0] + cminus = costfn( + pos0, numpy, invdist, meanweight=meanwt, dim=pos.shape[1] + )[0] - assert almost_equal(grad[idx], (cplus - cminus) / (2 * dx), - places=5) + assert almost_equal(grad[idx], (cplus - cminus) / (2 * dx), places=5) def test_kamada_kawai_costfn(self): - invdist = 1 / numpy.array([[0.1, 2.1, 1.7], - [2.1, 0.2, 0.6], - [1.7, 0.6, 0.3]]) + invdist = 1 / numpy.array([[0.1, 2.1, 1.7], [2.1, 0.2, 0.6], [1.7, 0.6, 0.3]]) meanwt = 0.3 # 2d - pos = numpy.array([[1.3, -3.2], - [2.7, -0.3], - [5.1, 2.5]]) + pos = numpy.array([[1.3, -3.2], [2.7, -0.3], [5.1, 2.5]]) self.check_kamada_kawai_costfn(pos, invdist, meanwt, 2) # 3d - pos = numpy.array([[0.9, 8.6, -8.7], - [-10, -0.5, -7.1], - [9.1, -8.1, 1.6]]) + pos = numpy.array([[0.9, 8.6, -8.7], [-10, -0.5, -7.1], [9.1, -8.1, 1.6]]) self.check_kamada_kawai_costfn(pos, invdist, meanwt, 3) - def test_spiral_layout(self): G = self.Gs @@ -397,7 +384,5 @@ class TestLayout: for d in range(1, len(distances_equidistant) - 1): # test similarity to two decimal places assert almost_equal( - distances_equidistant[d], - distances_equidistant[d+1], - 2 + distances_equidistant[d], distances_equidistant[d + 1], 2 ) diff --git a/networkx/drawing/tests/test_pydot.py b/networkx/drawing/tests/test_pydot.py index 29a1703e..04a2d79f 100644 --- a/networkx/drawing/tests/test_pydot.py +++ b/networkx/drawing/tests/test_pydot.py @@ -5,23 +5,24 @@ import networkx as nx from networkx.testing import assert_graphs_equal import pytest -pydot = pytest.importorskip('pydot') + +pydot = pytest.importorskip("pydot") class TestPydot: def pydot_checks(self, G, prog): - ''' + """ Validate :mod:`pydot`-based usage of the passed NetworkX graph with the passed basename of an external GraphViz command (e.g., `dot`, `neato`). - ''' + """ # Set the name of this graph to... "G". Failing to do so will # subsequently trip an assertion expecting this name. - G.graph['name'] = 'G' + G.graph["name"] = "G" # Add arbitrary nodes and edges to the passed empty graph. - G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'C'), ('A', 'D')]) - G.add_node('E') + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("A", "D")]) + G.add_node("E") # Validate layout of this graph with the passed GraphViz command. graph_layout = nx.nx_pydot.pydot_layout(G, prog=prog) @@ -41,7 +42,7 @@ class TestPydot: P.write_raw(fname) # Deserialize a list of new "pydot.Dot" instances back from this file. - Pin_list = pydot.graph_from_dot_file(path=fname, encoding='utf-8') + Pin_list = pydot.graph_from_dot_file(path=fname, encoding="utf-8") # Validate this file to contain only one graph. assert len(Pin_list) == 1 @@ -59,12 +60,12 @@ class TestPydot: assert n1 == n2 # Sorted list of all edges in the original "pydot.Dot" instance. - e1 = sorted([ - (e.get_source(), e.get_destination()) for e in P.get_edge_list()]) + e1 = sorted([(e.get_source(), e.get_destination()) for e in P.get_edge_list()]) # Sorted list of all edges in the original "pydot.Dot" instance. - e2 = sorted([(e.get_source(), e.get_destination()) - for e in Pin.get_edge_list()]) + e2 = sorted( + [(e.get_source(), e.get_destination()) for e in Pin.get_edge_list()] + ) # Validate these instances to contain the same edges. assert e1 == e2 @@ -77,15 +78,15 @@ class TestPydot: assert_graphs_equal(G, Hin) def test_undirected(self): - self.pydot_checks(nx.Graph(), prog='neato') + self.pydot_checks(nx.Graph(), prog="neato") def test_directed(self): - self.pydot_checks(nx.DiGraph(), prog='dot') + self.pydot_checks(nx.DiGraph(), prog="dot") def test_read_write(self): G = nx.MultiGraph() - G.graph['name'] = 'G' - G.add_edge('1', '2', key='0') # read assumes strings + G.graph["name"] = "G" + G.add_edge("1", "2", key="0") # read assumes strings fh = StringIO() nx.nx_pydot.write_dot(G, fh) fh.seek(0) diff --git a/networkx/drawing/tests/test_pylab.py b/networkx/drawing/tests/test_pylab.py index d9186d0f..7a07acd9 100644 --- a/networkx/drawing/tests/test_pylab.py +++ b/networkx/drawing/tests/test_pylab.py @@ -3,41 +3,38 @@ import os import itertools import pytest -mpl = pytest.importorskip('matplotlib') -mpl.use('PS') -plt = pytest.importorskip('matplotlib.pyplot') -plt.rcParams['text.usetex'] = False +mpl = pytest.importorskip("matplotlib") +mpl.use("PS") +plt = pytest.importorskip("matplotlib.pyplot") +plt.rcParams["text.usetex"] = False import networkx as nx class TestPylab: - @classmethod def setup_class(cls): cls.G = nx.barbell_graph(4, 6) def test_draw(self): try: - functions = [nx.draw_circular, - nx.draw_kamada_kawai, - nx.draw_planar, - nx.draw_random, - nx.draw_spectral, - nx.draw_spring, - nx.draw_shell] - options = [{ - 'node_color': 'black', - 'node_size': 100, - 'width': 3, - }] + functions = [ + nx.draw_circular, + nx.draw_kamada_kawai, + nx.draw_planar, + nx.draw_random, + nx.draw_spectral, + nx.draw_spring, + nx.draw_shell, + ] + options = [{"node_color": "black", "node_size": 100, "width": 3,}] for function, option in itertools.product(functions, options): function(self.G, **option) - plt.savefig('test.ps') + plt.savefig("test.ps") finally: try: - os.unlink('test.ps') + os.unlink("test.ps") except OSError: pass @@ -45,17 +42,18 @@ class TestPylab: try: nlist = [list(range(4)), list(range(4, 10)), list(range(10, 14))] nx.draw_shell(self.G, nlist=nlist) - plt.savefig('test.ps') + plt.savefig("test.ps") finally: try: - os.unlink('test.ps') + os.unlink("test.ps") except OSError: pass def test_edge_colormap(self): colors = range(self.G.number_of_edges()) - nx.draw_spring(self.G, edge_color=colors, width=4, - edge_cmap=plt.cm.Blues, with_labels=True) + nx.draw_spring( + self.G, edge_color=colors, width=4, edge_cmap=plt.cm.Blues, with_labels=True + ) plt.show() def test_arrows(self): @@ -68,54 +66,75 @@ class TestPylab: nx.draw_networkx_nodes(G, pos, node_color=[(1.0, 1.0, 0.2, 0.5)]) nx.draw_networkx_labels(G, pos) # edge with default color and width - nx.draw_networkx_edges(G, pos, edgelist=[(0, 1)], - width=None, - edge_color=None) + nx.draw_networkx_edges( + G, pos, edgelist=[(0, 1)], width=None, edge_color=None + ) # edges with global color strings and widths in lists - nx.draw_networkx_edges(G, pos, edgelist=[(0, 2), (0, 3)], - width=[3], - edge_color=['r']) + nx.draw_networkx_edges( + G, pos, edgelist=[(0, 2), (0, 3)], width=[3], edge_color=["r"] + ) # edges with color strings and widths for each edge - nx.draw_networkx_edges(G, pos, edgelist=[(0, 2), (0, 3)], - width=[1, 3], - edge_color=['r', 'b']) + nx.draw_networkx_edges( + G, pos, edgelist=[(0, 2), (0, 3)], width=[1, 3], edge_color=["r", "b"] + ) # edges with fewer color strings and widths than edges - nx.draw_networkx_edges(G, pos, - edgelist=[(1, 2), (1, 3), (2, 3), (3, 4)], - width=[1, 3], - edge_color=['g', 'm', 'c']) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(1, 2), (1, 3), (2, 3), (3, 4)], + width=[1, 3], + edge_color=["g", "m", "c"], + ) # edges with more color strings and widths than edges - nx.draw_networkx_edges(G, pos, edgelist=[(3, 4)], - width=[1, 2, 3, 4], - edge_color=['r', 'b', 'g', 'k']) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(3, 4)], + width=[1, 2, 3, 4], + edge_color=["r", "b", "g", "k"], + ) # with rgb tuple and 3 edges - is interpreted with cmap - nx.draw_networkx_edges(G, pos, edgelist=[(4, 5), (5, 6), (6, 7)], - edge_color=(1.0, 0.4, 0.3)) + nx.draw_networkx_edges( + G, pos, edgelist=[(4, 5), (5, 6), (6, 7)], edge_color=(1.0, 0.4, 0.3) + ) # with rgb tuple in list - nx.draw_networkx_edges(G, pos, edgelist=[(7, 8), (8, 9)], - edge_color=[(0.4, 1.0, 0.0)]) + nx.draw_networkx_edges( + G, pos, edgelist=[(7, 8), (8, 9)], edge_color=[(0.4, 1.0, 0.0)] + ) # with rgba tuple and 4 edges - is interpretted with cmap - nx.draw_networkx_edges(G, pos, edgelist=[(9, 10), (10, 11), - (10, 12), (10, 13)], - edge_color=(0.0, 1.0, 1.0, 0.5)) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(9, 10), (10, 11), (10, 12), (10, 13)], + edge_color=(0.0, 1.0, 1.0, 0.5), + ) # with rgba tuple in list - nx.draw_networkx_edges(G, pos, edgelist=[(9, 10), (10, 11), - (10, 12), (10, 13)], - edge_color=[(0.0, 1.0, 1.0, 0.5)]) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(9, 10), (10, 11), (10, 12), (10, 13)], + edge_color=[(0.0, 1.0, 1.0, 0.5)], + ) # with color string and global alpha - nx.draw_networkx_edges(G, pos, edgelist=[(11, 12), (11, 13)], - edge_color='purple', alpha=0.2) + nx.draw_networkx_edges( + G, pos, edgelist=[(11, 12), (11, 13)], edge_color="purple", alpha=0.2 + ) # with color string in a list - nx.draw_networkx_edges(G, pos, edgelist=[(11, 12), (11, 13)], - edge_color=['purple']) + nx.draw_networkx_edges( + G, pos, edgelist=[(11, 12), (11, 13)], edge_color=["purple"] + ) # with single edge and hex color string - nx.draw_networkx_edges(G, pos, edgelist=[(12, 13)], - edge_color='#1f78b4f0') + nx.draw_networkx_edges(G, pos, edgelist=[(12, 13)], edge_color="#1f78b4f0") # edge_color as numeric using vmin, vmax - nx.draw_networkx_edges(G, pos, edgelist=[(7, 8), (8, 9)], - edge_color=[0.2, 0.5], - edge_vmin=0.1, edge_vmax=0.6) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(7, 8), (8, 9)], + edge_color=[0.2, 0.5], + edge_vmin=0.1, + edge_vmax=0.6, + ) plt.show() @@ -123,41 +142,57 @@ class TestPylab: G = nx.cubical_graph() pos = nx.spring_layout(G) # positions for all nodes # nodes - nx.draw_networkx_nodes(G, pos, - nodelist=[0, 1, 2, 3], - node_color='r', - node_size=500, - alpha=0.75) - nx.draw_networkx_nodes(G, pos, - nodelist=[4, 5, 6, 7], - node_color='b', - node_size=500, - alpha=[0.25, 0.5, 0.75, 1.0]) + nx.draw_networkx_nodes( + G, pos, nodelist=[0, 1, 2, 3], node_color="r", node_size=500, alpha=0.75 + ) + nx.draw_networkx_nodes( + G, + pos, + nodelist=[4, 5, 6, 7], + node_color="b", + node_size=500, + alpha=[0.25, 0.5, 0.75, 1.0], + ) # edges nx.draw_networkx_edges(G, pos, width=1.0, alpha=0.5) - nx.draw_networkx_edges(G, pos, - edgelist=[(0, 1), (1, 2), (2, 3), (3, 0)], - width=8, alpha=0.5, edge_color='r') - nx.draw_networkx_edges(G, pos, - edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], - width=8, alpha=0.5, edge_color='b') - nx.draw_networkx_edges(G, pos, - edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], - min_source_margin=0.5, min_target_margin=0.75, - width=8, edge_color='b') + nx.draw_networkx_edges( + G, + pos, + edgelist=[(0, 1), (1, 2), (2, 3), (3, 0)], + width=8, + alpha=0.5, + edge_color="r", + ) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], + width=8, + alpha=0.5, + edge_color="b", + ) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], + min_source_margin=0.5, + min_target_margin=0.75, + width=8, + edge_color="b", + ) # some math labels labels = {} - labels[0] = r'$a$' - labels[1] = r'$b$' - labels[2] = r'$c$' - labels[3] = r'$d$' - labels[4] = r'$\alpha$' - labels[5] = r'$\beta$' - labels[6] = r'$\gamma$' - labels[7] = r'$\delta$' + labels[0] = r"$a$" + labels[1] = r"$b$" + labels[2] = r"$c$" + labels[3] = r"$d$" + labels[4] = r"$\alpha$" + labels[5] = r"$\beta$" + labels[6] = r"$\gamma$" + labels[7] = r"$\delta$" nx.draw_networkx_labels(G, pos, labels, font_size=16) nx.draw_networkx_edge_labels(G, pos, edge_labels=None, rotate=False) - nx.draw_networkx_edge_labels(G, pos, edge_labels={(4, 5): '4-5'}) + nx.draw_networkx_edge_labels(G, pos, edge_labels={(4, 5): "4-5"}) plt.show() def test_axes(self): @@ -171,6 +206,7 @@ class TestPylab: def test_draw_empty_nodes_return_values(self): # See Issue #3833 from matplotlib.collections import PathCollection, LineCollection + G = nx.Graph([(1, 2), (2, 3)]) DG = nx.DiGraph([(1, 2), (2, 3)]) pos = nx.circular_layout(G) @@ -178,12 +214,15 @@ class TestPylab: assert isinstance(nx.draw_networkx_nodes(DG, pos, nodelist=[]), PathCollection) # drawing empty edges either return an empty LineCollection or empty list. - assert isinstance(nx.draw_networkx_edges(G, pos, edgelist=[], arrows=True), - LineCollection) - assert isinstance(nx.draw_networkx_edges(G, pos, edgelist=[], arrows=False), - LineCollection) - assert isinstance(nx.draw_networkx_edges(DG, pos, edgelist=[], arrows=False), - LineCollection) + assert isinstance( + nx.draw_networkx_edges(G, pos, edgelist=[], arrows=True), LineCollection + ) + assert isinstance( + nx.draw_networkx_edges(G, pos, edgelist=[], arrows=False), LineCollection + ) + assert isinstance( + nx.draw_networkx_edges(DG, pos, edgelist=[], arrows=False), LineCollection + ) assert nx.draw_networkx_edges(DG, pos, edgelist=[], arrows=True) == [] def test_multigraph_edgelist_tuples(self): @@ -210,4 +249,4 @@ class TestPylab: def test_error_invalid_kwds(self): with pytest.raises(ValueError, match="Received invalid argument"): - nx.draw(self.G, foo='bar') + nx.draw(self.G, foo="bar") diff --git a/networkx/generators/atlas.py b/networkx/generators/atlas.py index 2fce3ad9..c5104e14 100644 --- a/networkx/generators/atlas.py +++ b/networkx/generators/atlas.py @@ -8,7 +8,7 @@ import os.path import networkx as nx -__all__ = ['graph_atlas', 'graph_atlas_g'] +__all__ = ["graph_atlas", "graph_atlas_g"] #: The total number of graphs in the atlas. #: @@ -51,7 +51,7 @@ THIS_DIR = os.path.dirname(os.path.abspath(__file__)) #: f.write(bytes(f'NODES {len(G)}\n', encoding='utf-8')) #: write_edgelist(G, f, data=False) #: -ATLAS_FILE = os.path.join(THIS_DIR, 'atlas.dat.gz') +ATLAS_FILE = os.path.join(THIS_DIR, "atlas.dat.gz") def _generate_graphs(): @@ -61,9 +61,9 @@ def _generate_graphs(): This function reads the file given in :data:`.ATLAS_FILE`. """ - with gzip.open(ATLAS_FILE, 'rb') as f: + with gzip.open(ATLAS_FILE, "rb") as f: line = f.readline() - while line and line.startswith(b'GRAPH'): + while line and line.startswith(b"GRAPH"): # The first two lines of each entry tell us the index of the # graph in the list and the number of nodes in the graph. # They look like this: @@ -78,11 +78,11 @@ def _generate_graphs(): # GRAPH line (or until the end of the file). edgelist = [] line = f.readline() - while line and not line.startswith(b'GRAPH'): + while line and not line.startswith(b"GRAPH"): edgelist.append(line.rstrip()) line = f.readline() G = nx.Graph() - G.name = f'G{graph_index}' + G.name = f"G{graph_index}" G.add_nodes_from(range(num_nodes)) G.add_edges_from(tuple(map(int, e.split())) for e in edgelist) yield G @@ -122,7 +122,7 @@ def graph_atlas(i): """ if not (0 <= i < NUM_GRAPHS): - raise ValueError(f'index must be between 0 and {NUM_GRAPHS}') + raise ValueError(f"index must be between 0 and {NUM_GRAPHS}") return next(islice(_generate_graphs(), i, None)) diff --git a/networkx/generators/classic.py b/networkx/generators/classic.py index 075d2f2c..fadbc064 100644 --- a/networkx/generators/classic.py +++ b/networkx/generators/classic.py @@ -19,31 +19,34 @@ from itertools import accumulate from networkx.utils import nodes_or_number from networkx.utils import pairwise -__all__ = ['balanced_tree', - 'barbell_graph', - 'binomial_tree', - 'complete_graph', - 'complete_multipartite_graph', - 'circular_ladder_graph', - 'circulant_graph', - 'cycle_graph', - 'dorogovtsev_goltsev_mendes_graph', - 'empty_graph', - 'full_rary_tree', - 'ladder_graph', - 'lollipop_graph', - 'null_graph', - 'path_graph', - 'star_graph', - 'trivial_graph', - 'turan_graph', - 'wheel_graph'] +__all__ = [ + "balanced_tree", + "barbell_graph", + "binomial_tree", + "complete_graph", + "complete_multipartite_graph", + "circular_ladder_graph", + "circulant_graph", + "cycle_graph", + "dorogovtsev_goltsev_mendes_graph", + "empty_graph", + "full_rary_tree", + "ladder_graph", + "lollipop_graph", + "null_graph", + "path_graph", + "star_graph", + "trivial_graph", + "turan_graph", + "wheel_graph", +] # ------------------------------------------------------------------- # Some Classic Graphs # ------------------------------------------------------------------- + def _tree_edges(n, r): if n == 0: return @@ -162,11 +165,9 @@ def barbell_graph(m1, m2, create_using=None): """ if m1 < 2: - raise NetworkXError( - "Invalid graph description, m1 should be >=2") + raise NetworkXError("Invalid graph description, m1 should be >=2") if m2 < 0: - raise NetworkXError( - "Invalid graph description, m2 should be >=0") + raise NetworkXError("Invalid graph description, m2 should be >=0") # left barbell G = complete_graph(m1, create_using) @@ -178,8 +179,9 @@ def barbell_graph(m1, m2, create_using=None): if m2 > 1: G.add_edges_from(pairwise(range(m1, m1 + m2))) # right barbell - G.add_edges_from((u, v) for u in range(m1 + m2, 2 * m1 + m2) - for v in range(u + 1, 2 * m1 + m2)) + G.add_edges_from( + (u, v) for u in range(m1 + m2, 2 * m1 + m2) for v in range(u + 1, 2 * m1 + m2) + ) # connect it up G.add_edge(m1 - 1, m1) if m2 > 0: @@ -363,7 +365,7 @@ def dorogovtsev_goltsev_mendes_graph(n, create_using=None): G.add_edge(0, 1) if n == 0: return G - new_node = 2 # next node to be added + new_node = 2 # next node to be added for i in range(1, n + 1): # iterate over number of generations. last_generation_edges = list(G.edges()) number_of_edges_in_last_generation = len(last_generation_edges) @@ -454,7 +456,7 @@ def empty_graph(n=0, create_using=None, default=nx.Graph): """ if create_using is None: G = default() - elif hasattr(create_using, '_adj'): + elif hasattr(create_using, "_adj"): # create_using is a NetworkX style Graph create_using.clear() G = create_using @@ -518,11 +520,9 @@ def lollipop_graph(m, n, create_using=None): if isinstance(m, int): n_nodes = [len(m_nodes) + i for i in n_nodes] if M < 2: - raise NetworkXError( - "Invalid graph description, m should be >=2") + raise NetworkXError("Invalid graph description, m should be >=2") if N < 0: - raise NetworkXError( - "Invalid graph description, n should be >=0") + raise NetworkXError("Invalid graph description, n should be >=0") # the ball G = complete_graph(m_nodes, create_using) diff --git a/networkx/generators/cographs.py b/networkx/generators/cographs.py index 606b0348..e8763586 100644 --- a/networkx/generators/cographs.py +++ b/networkx/generators/cographs.py @@ -14,7 +14,7 @@ References import networkx as nx from networkx.utils import py_random_state -__all__ = ['random_cograph'] +__all__ = ["random_cograph"] @py_random_state(1) diff --git a/networkx/generators/community.py b/networkx/generators/community.py index a3e9ec66..0f987481 100644 --- a/networkx/generators/community.py +++ b/networkx/generators/community.py @@ -10,7 +10,10 @@ try: def zeta(x, q, tolerance): return _zeta(x, q) + + except ImportError: + def zeta(x, q, tolerance): """The Hurwitz zeta function, or the Riemann zeta function of two arguments. @@ -21,7 +24,7 @@ except ImportError: convergence, as decided by ``tolerance``. """ z = 0 - z_prev = -float('inf') + z_prev = -float("inf") k = 0 while abs(z - z_prev) > tolerance: z_prev = z @@ -29,11 +32,19 @@ except ImportError: k += 1 return z -__all__ = ['caveman_graph', 'connected_caveman_graph', - 'relaxed_caveman_graph', 'random_partition_graph', - 'planted_partition_graph', 'gaussian_random_partition_graph', - 'ring_of_cliques', 'windmill_graph', 'stochastic_block_model', - 'LFR_benchmark_graph'] + +__all__ = [ + "caveman_graph", + "connected_caveman_graph", + "relaxed_caveman_graph", + "random_partition_graph", + "planted_partition_graph", + "gaussian_random_partition_graph", + "ring_of_cliques", + "windmill_graph", + "stochastic_block_model", + "LFR_benchmark_graph", +] def caveman_graph(l, k): @@ -124,8 +135,9 @@ def connected_caveman_graph(l, k): Amer. J. Soc. 105, 493-527, 1999. """ if k < 2: - raise nx.NetworkXError('The size of cliques in a connected caveman graph ' - 'must be at least 2.') + raise nx.NetworkXError( + "The size of cliques in a connected caveman graph " "must be at least 2." + ) G = nx.caveman_graph(l, k) for start in range(0, l * k, k): @@ -252,9 +264,15 @@ def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): for r in range(num_blocks): p[r][r] = p_in - return stochastic_block_model(sizes, p, nodelist=None, seed=seed, - directed=directed, selfloops=False, - sparse=True) + return stochastic_block_model( + sizes, + p, + nodelist=None, + seed=seed, + directed=directed, + selfloops=False, + sparse=True, + ) @py_random_state(4) @@ -313,8 +331,7 @@ def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): @py_random_state(6) -def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, - seed=None): +def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=None): """Generate a Gaussian random partition graph. A Gaussian random partition graph is created by creating k partitions @@ -428,18 +445,19 @@ def ring_of_cliques(num_cliques, clique_size): simply adds the link without removing any link from the cliques. """ if num_cliques < 2: - raise nx.NetworkXError('A ring of cliques must have at least ' - 'two cliques') + raise nx.NetworkXError("A ring of cliques must have at least " "two cliques") if clique_size < 2: - raise nx.NetworkXError('The cliques must have at least two nodes') + raise nx.NetworkXError("The cliques must have at least two nodes") G = nx.Graph() for i in range(num_cliques): - edges = itertools.combinations(range(i * clique_size, i * clique_size + - clique_size), 2) + edges = itertools.combinations( + range(i * clique_size, i * clique_size + clique_size), 2 + ) G.add_edges_from(edges) - G.add_edge(i * clique_size + 1, (i + 1) * clique_size % - (num_cliques * clique_size)) + G.add_edge( + i * clique_size + 1, (i + 1) * clique_size % (num_cliques * clique_size) + ) return G @@ -481,21 +499,24 @@ def windmill_graph(n, k): are in the opposite order as the parameters of this method. """ if n < 2: - msg = 'A windmill graph must have at least two cliques' + msg = "A windmill graph must have at least two cliques" raise nx.NetworkXError(msg) if k < 2: - raise nx.NetworkXError('The cliques must have at least two nodes') + raise nx.NetworkXError("The cliques must have at least two nodes") - G = nx.disjoint_union_all(itertools.chain([nx.complete_graph(k)], - (nx.complete_graph(k - 1) - for _ in range(n - 1)))) + G = nx.disjoint_union_all( + itertools.chain( + [nx.complete_graph(k)], (nx.complete_graph(k - 1) for _ in range(n - 1)) + ) + ) G.add_edges_from((0, i) for i in range(k, G.number_of_nodes())) return G @py_random_state(3) -def stochastic_block_model(sizes, p, nodelist=None, seed=None, - directed=False, selfloops=False, sparse=True): +def stochastic_block_model( + sizes, p, nodelist=None, seed=None, directed=False, selfloops=False, sparse=True +): """Returns a stochastic block model graph. This model partitions the nodes in blocks of arbitrary sizes, and places @@ -612,17 +633,19 @@ def stochastic_block_model(sizes, p, nodelist=None, seed=None, block_iter = itertools.combinations_with_replacement(block_range, 2) # Split nodelist in a partition (list of sets). size_cumsum = [sum(sizes[0:x]) for x in range(0, len(sizes) + 1)] - g.graph['partition'] = [set(nodelist[size_cumsum[x]:size_cumsum[x + 1]]) - for x in range(0, len(size_cumsum) - 1)] + g.graph["partition"] = [ + set(nodelist[size_cumsum[x] : size_cumsum[x + 1]]) + for x in range(0, len(size_cumsum) - 1) + ] # Setup nodes and graph name - for block_id, nodes in enumerate(g.graph['partition']): + for block_id, nodes in enumerate(g.graph["partition"]): for node in nodes: g.add_node(node, block=block_id) g.name = "stochastic_block_model" # Test for edge existence - parts = g.graph['partition'] + parts = g.graph["partition"] for i, j in block_iter: if i == j: if directed: @@ -705,8 +728,7 @@ def _powerlaw_sequence(gamma, low, high, condition, length, max_iters, seed): # TODO Needs documentation. -def _generate_min_degree(gamma, average_degree, max_degree, tolerance, - max_iters): +def _generate_min_degree(gamma, average_degree, max_degree, tolerance, max_iters): """Returns a minimum degree from the given average degree.""" min_deg_top = max_degree min_deg_bot = 1 @@ -718,8 +740,7 @@ def _generate_min_degree(gamma, average_degree, max_degree, tolerance, raise nx.ExceededMaxIterations("Could not match average_degree") mid_avg_deg = 0 for x in range(int(min_deg_mid), max_degree + 1): - mid_avg_deg += (x ** (-gamma + 1)) / zeta(gamma, min_deg_mid, - tolerance) + mid_avg_deg += (x ** (-gamma + 1)) / zeta(gamma, min_deg_mid, tolerance) if mid_avg_deg > average_degree: min_deg_top = min_deg_mid min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot @@ -778,15 +799,25 @@ def _generate_communities(degree_seq, community_sizes, mu, max_iters, seed): free.append(result[c].pop()) if not free: return result - msg = 'Could not assign communities; try increasing min_community' + msg = "Could not assign communities; try increasing min_community" raise nx.ExceededMaxIterations(msg) @py_random_state(11) -def LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=None, - min_degree=None, max_degree=None, min_community=None, - max_community=None, tol=1.0e-7, max_iters=500, - seed=None): +def LFR_benchmark_graph( + n, + tau1, + tau2, + mu, + average_degree=None, + min_degree=None, + max_degree=None, + min_community=None, + max_community=None, + tol=1.0e-7, + max_iters=500, + seed=None, +): r"""Returns the LFR benchmark graph. This algorithm proceeds as follows: @@ -968,20 +999,24 @@ def LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=None, elif not 0 < max_degree <= n: raise nx.NetworkXError("max_degree must be in the interval (0, n]") if not ((min_degree is None) ^ (average_degree is None)): - raise nx.NetworkXError("Must assign exactly one of min_degree and" - " average_degree") + raise nx.NetworkXError( + "Must assign exactly one of min_degree and" " average_degree" + ) if min_degree is None: - min_degree = _generate_min_degree(tau1, average_degree, max_degree, - tol, max_iters) + min_degree = _generate_min_degree( + tau1, average_degree, max_degree, tol, max_iters + ) # Generate a degree sequence with a power law distribution. low, high = min_degree, max_degree - def condition(seq): return sum(seq) % 2 == 0 + def condition(seq): + return sum(seq) % 2 == 0 + + def length(seq): + return len(seq) >= n - def length(seq): return len(seq) >= n - deg_seq = _powerlaw_sequence(tau1, low, high, condition, - length, max_iters, seed) + deg_seq = _powerlaw_sequence(tau1, low, high, condition, length, max_iters, seed) # Validate parameters for generating the community size sequence. if min_community is None: @@ -999,11 +1034,13 @@ def LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=None, # generate a valid community size sequence. low, high = min_community, max_community - def condition(seq): return sum(seq) == n + def condition(seq): + return sum(seq) == n + + def length(seq): + return sum(seq) >= n - def length(seq): return sum(seq) >= n - comms = _powerlaw_sequence(tau2, low, high, condition, - length, max_iters, seed) + comms = _powerlaw_sequence(tau2, low, high, condition, length, max_iters, seed) # Generate the communities based on the given degree sequence and # community sizes. @@ -1024,5 +1061,5 @@ def LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=None, v = seed.choice(range(n)) if v not in c: G.add_edge(u, v) - G.nodes[u]['community'] = c + G.nodes[u]["community"] = c return G diff --git a/networkx/generators/degree_seq.py b/networkx/generators/degree_seq.py index 08326025..fe7ada03 100644 --- a/networkx/generators/degree_seq.py +++ b/networkx/generators/degree_seq.py @@ -11,13 +11,15 @@ from operator import itemgetter import networkx as nx from networkx.utils import random_weighted_sample, py_random_state -__all__ = ['configuration_model', - 'directed_configuration_model', - 'expected_degree_graph', - 'havel_hakimi_graph', - 'directed_havel_hakimi_graph', - 'degree_sequence_tree', - 'random_degree_sequence_graph'] +__all__ = [ + "configuration_model", + "directed_configuration_model", + "expected_degree_graph", + "havel_hakimi_graph", + "directed_havel_hakimi_graph", + "degree_sequence_tree", + "random_degree_sequence_graph", +] chaini = chain.from_iterable @@ -54,8 +56,9 @@ def _to_stublist(degree_sequence): return list(chaini([n] * d for n, d in enumerate(degree_sequence))) -def _configuration_model(deg_sequence, create_using, directed=False, - in_deg_sequence=None, seed=None): +def _configuration_model( + deg_sequence, create_using, directed=False, in_deg_sequence=None, seed=None +): """Helper function for generating either undirected or directed configuration model graphs. @@ -212,12 +215,12 @@ def configuration_model(deg_sequence, create_using=None, seed=None): """ if sum(deg_sequence) % 2 != 0: - msg = 'Invalid degree sequence: sum of degrees must be even, not odd' + msg = "Invalid degree sequence: sum of degrees must be even, not odd" raise nx.NetworkXError(msg) G = nx.empty_graph(0, create_using, default=nx.MultiGraph) if G.is_directed(): - raise nx.NetworkXNotImplemented('not implemented for directed graphs') + raise nx.NetworkXNotImplemented("not implemented for directed graphs") G = _configuration_model(deg_sequence, G, seed=seed) @@ -225,9 +228,9 @@ def configuration_model(deg_sequence, create_using=None, seed=None): @py_random_state(3) -def directed_configuration_model(in_degree_sequence, - out_degree_sequence, - create_using=None, seed=None): +def directed_configuration_model( + in_degree_sequence, out_degree_sequence, create_using=None, seed=None +): """Returns a directed_random graph with the given degree sequences. The configuration model generates a random directed pseudograph @@ -308,14 +311,19 @@ def directed_configuration_model(in_degree_sequence, """ if sum(in_degree_sequence) != sum(out_degree_sequence): - msg = 'Invalid degree sequences: sequences must have equal sums' + msg = "Invalid degree sequences: sequences must have equal sums" raise nx.NetworkXError(msg) if create_using is None: create_using = nx.MultiDiGraph - G = _configuration_model(out_degree_sequence, create_using, directed=True, - in_deg_sequence=in_degree_sequence, seed=seed) + G = _configuration_model( + out_degree_sequence, + create_using, + directed=True, + in_deg_sequence=in_degree_sequence, + seed=seed, + ) name = "directed configuration_model {} nodes {} edges" return G @@ -470,7 +478,7 @@ def havel_hakimi_graph(deg_sequence, create_using=None): and Factors Discrete Mathematics, 6(1), pp. 79-88 (1973) """ if not nx.is_graphical(deg_sequence): - raise nx.NetworkXError('Invalid degree sequence') + raise nx.NetworkXError("Invalid degree sequence") p = len(deg_sequence) G = nx.empty_graph(p, create_using) @@ -496,7 +504,7 @@ def havel_hakimi_graph(deg_sequence, create_using=None): # If there are not enough stubs to connect to, then the sequence is # not graphical if dmax > n - 1: - raise nx.NetworkXError('Non-graphical integer sequence') + raise nx.NetworkXError("Non-graphical integer sequence") # Remove largest stub in list source = num_degs[dmax].pop() @@ -522,9 +530,7 @@ def havel_hakimi_graph(deg_sequence, create_using=None): return G -def directed_havel_hakimi_graph(in_deg_sequence, - out_deg_sequence, - create_using=None): +def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using=None): """Returns a directed graph with the given degree sequences. Parameters @@ -583,7 +589,8 @@ def directed_havel_hakimi_graph(in_deg_sequence, in_deg = in_deg_sequence[n] if in_deg < 0 or out_deg < 0: raise nx.NetworkXError( - 'Invalid degree sequences. Sequence values must be positive.') + "Invalid degree sequences. Sequence values must be positive." + ) sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg) if in_deg > 0: stubheap.append((-1 * out_deg, -1 * in_deg, n)) @@ -591,7 +598,8 @@ def directed_havel_hakimi_graph(in_deg_sequence, zeroheap.append((-1 * out_deg, n)) if sumin != sumout: raise nx.NetworkXError( - 'Invalid degree sequences. Sequences must have equal sums.') + "Invalid degree sequences. Sequences must have equal sums." + ) heapq.heapify(stubheap) heapq.heapify(zeroheap) @@ -602,7 +610,7 @@ def directed_havel_hakimi_graph(in_deg_sequence, (freeout, freein, target) = heapq.heappop(stubheap) freein *= -1 if freein > len(stubheap) + len(zeroheap): - raise nx.NetworkXError('Non-digraphical integer sequence') + raise nx.NetworkXError("Non-digraphical integer sequence") # Attach arcs from the nodes with the most stubs mslen = 0 @@ -613,7 +621,7 @@ def directed_havel_hakimi_graph(in_deg_sequence, else: (stubout, stubin, stubsource) = heapq.heappop(stubheap) if stubout == 0: - raise nx.NetworkXError('Non-digraphical integer sequence') + raise nx.NetworkXError("Non-digraphical integer sequence") G.add_edge(stubsource, target) # Check if source is now totally connected if stubout + 1 < 0 or stubin < 0: @@ -643,11 +651,13 @@ def degree_sequence_tree(deg_sequence, create_using=None): # The sum of the degree sequence must be even (for any undirected graph). degree_sum = sum(deg_sequence) if degree_sum % 2 != 0: - msg = 'Invalid degree sequence: sum of degrees must be even, not odd' + msg = "Invalid degree sequence: sum of degrees must be even, not odd" raise nx.NetworkXError(msg) if len(deg_sequence) - degree_sum // 2 != 1: - msg = ('Invalid degree sequence: tree must have number of nodes equal' - ' to one less than the number of edges') + msg = ( + "Invalid degree sequence: tree must have number of nodes equal" + " to one less than the number of edges" + ) raise nx.NetworkXError(msg) G = nx.empty_graph(0, create_using) if G.is_directed(): @@ -744,7 +754,7 @@ class DegreeSequenceRandomGraph: # use random_degree_sequence_graph() def __init__(self, degree, rng): if not nx.is_graphical(degree): - raise nx.NetworkXUnfeasible('degree sequence is not graphical') + raise nx.NetworkXUnfeasible("degree sequence is not graphical") self.rng = rng self.degree = list(degree) # node labels are integers 0,...,n-1 @@ -795,7 +805,7 @@ class DegreeSequenceRandomGraph: def q(self, u, v): # remaining degree probability - norm = float(max(self.remaining_degree.values()))**2 + norm = float(max(self.remaining_degree.values())) ** 2 return self.remaining_degree[u] * self.remaining_degree[v] / norm def suitable_edge(self): @@ -810,7 +820,7 @@ class DegreeSequenceRandomGraph: def phase1(self): # choose node pairs from (degree) weighted distribution rem_deg = self.remaining_degree - while sum(rem_deg.values()) >= 2 * self.dmax**2: + while sum(rem_deg.values()) >= 2 * self.dmax ** 2: u, v = sorted(random_weighted_sample(rem_deg, 2, self.rng)) if self.graph.has_edge(u, v): continue @@ -837,12 +847,13 @@ class DegreeSequenceRandomGraph: # build potential remaining edges and choose with rejection sampling potential_edges = combinations(self.remaining_degree, 2) # build auxiliary graph of potential edges not already in graph - H = nx.Graph([(u, v) for (u, v) in potential_edges - if not self.graph.has_edge(u, v)]) + H = nx.Graph( + [(u, v) for (u, v) in potential_edges if not self.graph.has_edge(u, v)] + ) rng = self.rng while self.remaining_degree: if not self.suitable_edge(): - raise nx.NetworkXUnfeasible('no suitable edges left') + raise nx.NetworkXUnfeasible("no suitable edges left") while True: u, v = sorted(rng.choice(list(H.edges()))) if rng.random() < self.q(u, v): diff --git a/networkx/generators/directed.py b/networkx/generators/directed.py index 2d5ceaac..0e6009b5 100644 --- a/networkx/generators/directed.py +++ b/networkx/generators/directed.py @@ -12,8 +12,13 @@ from networkx.utils import discrete_sequence from networkx.utils import weighted_choice from networkx.utils import py_random_state -__all__ = ['gn_graph', 'gnc_graph', 'gnr_graph', 'random_k_out_graph', - 'scale_free_graph'] +__all__ = [ + "gn_graph", + "gnc_graph", + "gnr_graph", + "random_k_out_graph", + "scale_free_graph", +] @py_random_state(3) @@ -62,7 +67,9 @@ def gn_graph(n, kernel=None, create_using=None, seed=None): raise nx.NetworkXError("create_using must indicate a Directed Graph") if kernel is None: - def kernel(x): return x + + def kernel(x): + return x if n == 1: return G @@ -174,8 +181,16 @@ def gnc_graph(n, create_using=None, seed=None): @py_random_state(7) -def scale_free_graph(n, alpha=0.41, beta=0.54, gamma=0.05, delta_in=0.2, - delta_out=0, create_using=None, seed=None): +def scale_free_graph( + n, + alpha=0.41, + beta=0.54, + gamma=0.05, + delta_in=0.2, + delta_out=0, + create_using=None, + seed=None, +): """Returns a scale-free directed graph. Parameters @@ -233,7 +248,7 @@ def scale_free_graph(n, alpha=0.41, beta=0.54, gamma=0.05, delta_in=0.2, break return n - if create_using is None or not hasattr(create_using, '_adj'): + if create_using is None or not hasattr(create_using, "_adj"): # start with 3-cycle G = nx.empty_graph(3, create_using, default=nx.MultiDiGraph) G.add_edges_from([(0, 1), (1, 2), (2, 0)]) @@ -243,14 +258,14 @@ def scale_free_graph(n, alpha=0.41, beta=0.54, gamma=0.05, delta_in=0.2, raise nx.NetworkXError("MultiDiGraph required in create_using") if alpha <= 0: - raise ValueError('alpha must be > 0.') + raise ValueError("alpha must be > 0.") if beta <= 0: - raise ValueError('beta must be > 0.') + raise ValueError("beta must be > 0.") if gamma <= 0: - raise ValueError('gamma must be > 0.') + raise ValueError("gamma must be > 0.") if abs(alpha + beta + gamma - 1.0) >= 1e-9: - raise ValueError('alpha+beta+gamma must equal 1.') + raise ValueError("alpha+beta+gamma must equal 1.") number_of_edges = G.number_of_edges() while len(G) < n: @@ -282,8 +297,7 @@ def scale_free_graph(n, alpha=0.41, beta=0.54, gamma=0.05, delta_in=0.2, @py_random_state(4) -def random_uniform_k_out_graph(n, k, self_loops=True, with_replacement=True, - seed=None): +def random_uniform_k_out_graph(n, k, self_loops=True, with_replacement=True, seed=None): """Returns a random `k`-out graph with uniform attachment. A random `k`-out graph with uniform attachment is a multidigraph @@ -431,7 +445,7 @@ def random_k_out_graph(n, k, alpha, self_loops=True, seed=None): """ if alpha < 0: - raise ValueError('alpha must be positive') + raise ValueError("alpha must be positive") G = nx.empty_graph(n, create_using=nx.MultiDiGraph) weights = Counter({v: alpha for v in G}) for i in range(k * n): diff --git a/networkx/generators/duplication.py b/networkx/generators/duplication.py index f0af687c..3bc8cbf8 100644 --- a/networkx/generators/duplication.py +++ b/networkx/generators/duplication.py @@ -9,7 +9,7 @@ import networkx as nx from networkx.utils import py_random_state from networkx.exception import NetworkXError -__all__ = ['partial_duplication_graph', 'duplication_divergence_graph'] +__all__ = ["partial_duplication_graph", "duplication_divergence_graph"] @py_random_state(4) @@ -132,7 +132,7 @@ def duplication_divergence_graph(n, p, seed=None): msg = f"NetworkXError p={p} is not in [0,1]." raise nx.NetworkXError(msg) if n < 2: - msg = 'n must be greater than or equal to 2' + msg = "n must be greater than or equal to 2" raise nx.NetworkXError(msg) G = nx.Graph() diff --git a/networkx/generators/ego.py b/networkx/generators/ego.py index c19c5fbd..cca7dfae 100644 --- a/networkx/generators/ego.py +++ b/networkx/generators/ego.py @@ -1,7 +1,7 @@ """ Ego graph. """ -__all__ = ['ego_graph'] +__all__ = ["ego_graph"] import networkx as nx @@ -43,17 +43,18 @@ def ego_graph(G, n, radius=1, center=True, undirected=False, distance=None): """ if undirected: if distance is not None: - sp, _ = nx.single_source_dijkstra(G.to_undirected(), - n, cutoff=radius, - weight=distance) + sp, _ = nx.single_source_dijkstra( + G.to_undirected(), n, cutoff=radius, weight=distance + ) else: - sp = dict(nx.single_source_shortest_path_length(G.to_undirected(), - n, cutoff=radius)) + sp = dict( + nx.single_source_shortest_path_length( + G.to_undirected(), n, cutoff=radius + ) + ) else: if distance is not None: - sp, _ = nx.single_source_dijkstra(G, - n, cutoff=radius, - weight=distance) + sp, _ = nx.single_source_dijkstra(G, n, cutoff=radius, weight=distance) else: sp = dict(nx.single_source_shortest_path_length(G, n, cutoff=radius)) diff --git a/networkx/generators/expanders.py b/networkx/generators/expanders.py index 80af4a18..88c49ea3 100644 --- a/networkx/generators/expanders.py +++ b/networkx/generators/expanders.py @@ -4,7 +4,7 @@ import itertools import networkx as nx -__all__ = ['margulis_gabber_galil_graph', 'chordal_cycle_graph', 'paley_graph'] +__all__ = ["margulis_gabber_galil_graph", "chordal_cycle_graph", "paley_graph"] # Other discrete torus expanders can be constructed by using the following edge @@ -70,10 +70,14 @@ def margulis_gabber_galil_graph(n, create_using=None): raise nx.NetworkXError(msg) for (x, y) in itertools.product(range(n), repeat=2): - for (u, v) in (((x + 2 * y) % n, y), ((x + (2 * y + 1)) % n, y), - (x, (y + 2 * x) % n), (x, (y + (2 * x + 1)) % n)): + for (u, v) in ( + ((x + 2 * y) % n, y), + ((x + (2 * y + 1)) % n, y), + (x, (y + 2 * x) % n), + (x, (y + (2 * x + 1)) % n), + ): G.add_edge((x, y), (u, v)) - G.graph['name'] = f"margulis_gabber_galil_graph({n})" + G.graph["name"] = f"margulis_gabber_galil_graph({n})" return G @@ -136,7 +140,7 @@ def chordal_cycle_graph(p, create_using=None): chord = pow(x, p - 2, p) if x > 0 else 0 for y in (left, right, chord): G.add_edge(x, y) - G.graph['name'] = f"chordal_cycle_graph({p})" + G.graph["name"] = f"chordal_cycle_graph({p})" return G @@ -194,5 +198,5 @@ def paley_graph(p, create_using=None): for x in range(p): for x2 in square_set: G.add_edge(x, (x + x2) % p) - G.graph['name'] = f"paley({p})" + G.graph["name"] = f"paley({p})" return G diff --git a/networkx/generators/geometric.py b/networkx/generators/geometric.py index 19ac0bbe..2c50ee0d 100644 --- a/networkx/generators/geometric.py +++ b/networkx/generators/geometric.py @@ -2,13 +2,10 @@ """ from bisect import bisect_left -from itertools import ( - accumulate, - combinations, - product -) +from itertools import accumulate, combinations, product from math import sqrt import math + try: from scipy.spatial import cKDTree as KDTree except ImportError: @@ -19,9 +16,14 @@ else: import networkx as nx from networkx.utils import nodes_or_number, py_random_state -__all__ = ['geographical_threshold_graph', 'waxman_graph', - 'navigable_small_world_graph', 'random_geometric_graph', - 'soft_random_geometric_graph', 'thresholded_random_geometric_graph'] +__all__ = [ + "geographical_threshold_graph", + "waxman_graph", + "navigable_small_world_graph", + "random_geometric_graph", + "soft_random_geometric_graph", + "thresholded_random_geometric_graph", +] def euclidean(x, y): @@ -40,7 +42,7 @@ def _fast_edges(G, radius, p): Requires scipy to be installed. """ - pos = nx.get_node_attributes(G, 'pos') + pos = nx.get_node_attributes(G, "pos") nodes, coords = list(zip(*pos.items())) kdtree = KDTree(coords) # Cannot provide generator. edge_indexes = kdtree.query_pairs(radius, p) @@ -56,7 +58,7 @@ def _slow_edges(G, radius, p): """ # TODO This can be parallelized. edges = [] - for (u, pu), (v, pv) in combinations(G.nodes(data='pos'), 2): + for (u, pu), (v, pv) in combinations(G.nodes(data="pos"), 2): if sum(abs(a - b) ** p for a, b in zip(pu, pv)) <= radius ** p: edges.append((u, v)) return edges @@ -148,7 +150,7 @@ def random_geometric_graph(n, radius, dim=2, pos=None, p=2, seed=None): # Euclidean space of the specified dimension. if pos is None: pos = {v: [seed.random() for i in range(dim)] for v in nodes} - nx.set_node_attributes(G, pos, 'pos') + nx.set_node_attributes(G, pos, "pos") if _is_scipy_available: edges = _fast_edges(G, radius, p) @@ -161,8 +163,9 @@ def random_geometric_graph(n, radius, dim=2, pos=None, p=2, seed=None): @py_random_state(6) @nodes_or_number(0) -def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None, - seed=None): +def soft_random_geometric_graph( + n, radius, dim=2, pos=None, p=2, p_dist=None, seed=None +): r"""Returns a soft random geometric graph in the unit cube. The soft random geometric graph [1] model places `n` nodes uniformly at @@ -263,13 +266,13 @@ def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None, """ n_name, nodes = n G = nx.Graph() - G.name = f'soft_random_geometric_graph({n}, {radius}, {dim})' + G.name = f"soft_random_geometric_graph({n}, {radius}, {dim})" G.add_nodes_from(nodes) # If no positions are provided, choose uniformly random vectors in # Euclidean space of the specified dimension. if pos is None: pos = {v: [seed.random() for i in range(dim)] for v in nodes} - nx.set_node_attributes(G, pos, 'pos') + nx.set_node_attributes(G, pos, "pos") # if p_dist function not supplied the default function is an exponential # distribution with rate parameter :math:`\lambda=1`. @@ -281,7 +284,7 @@ def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None, def should_join(pair): u, v = pair u_pos, v_pos = pos[u], pos[v] - dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos)))**(1 / p) + dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos))) ** (1 / p) # Check if dist <= radius parameter. This check is redundant if scipy # is available and _fast_edges routine is used, but provides the # check in case scipy is not available and all edge combinations @@ -302,8 +305,9 @@ def soft_random_geometric_graph(n, radius, dim=2, pos=None, p=2, p_dist=None, @py_random_state(7) @nodes_or_number(0) -def geographical_threshold_graph(n, theta, dim=2, pos=None, weight=None, - metric=None, p_dist=None, seed=None): +def geographical_threshold_graph( + n, theta, dim=2, pos=None, weight=None, metric=None, p_dist=None, seed=None +): r"""Returns a geographical threshold graph. The geographical threshold graph model places $n$ nodes uniformly at @@ -428,13 +432,14 @@ def geographical_threshold_graph(n, theta, dim=2, pos=None, weight=None, # If no distance metric is provided, use Euclidean distance. if metric is None: metric = euclidean - nx.set_node_attributes(G, weight, 'weight') - nx.set_node_attributes(G, pos, 'pos') + nx.set_node_attributes(G, weight, "weight") + nx.set_node_attributes(G, pos, "pos") # if p_dist is not supplied, use default r^-2 if p_dist is None: + def p_dist(r): - return r**-2 + return r ** -2 # Returns ``True`` if and only if the nodes whose attributes are # ``du`` and ``dv`` should be joined, according to the threshold @@ -451,8 +456,9 @@ def geographical_threshold_graph(n, theta, dim=2, pos=None, weight=None, @py_random_state(6) @nodes_or_number(0) -def waxman_graph(n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), - metric=None, seed=None): +def waxman_graph( + n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), metric=None, seed=None +): r"""Returns a Waxman random graph. The Waxman random graph model places `n` nodes uniformly at random @@ -543,7 +549,7 @@ def waxman_graph(n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), (xmin, ymin, xmax, ymax) = domain # Each node gets a uniformly random position in the given rectangle. pos = {v: (seed.uniform(xmin, xmax), seed.uniform(ymin, ymax)) for v in G} - nx.set_node_attributes(G, pos, 'pos') + nx.set_node_attributes(G, pos, "pos") # If no distance metric is provided, use Euclidean distance. if metric is None: metric = euclidean @@ -556,9 +562,13 @@ def waxman_graph(n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), if L is None: L = max(metric(x, y) for x, y in combinations(pos.values(), 2)) - def dist(u, v): return metric(pos[u], pos[v]) + def dist(u, v): + return metric(pos[u], pos[v]) + else: - def dist(u, v): return seed.random() * L + + def dist(u, v): + return seed.random() * L # `pair` is the pair of nodes to decide whether to join. def should_join(pair): @@ -615,11 +625,11 @@ def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): .. [1] J. Kleinberg. The small-world phenomenon: An algorithmic perspective. Proc. 32nd ACM Symposium on Theory of Computing, 2000. """ - if (p < 1): + if p < 1: raise nx.NetworkXException("p must be >= 1") - if (q < 0): + if q < 0: raise nx.NetworkXException("q must be >= 0") - if (r < 0): + if r < 0: raise nx.NetworkXException("r must be >= 1") G = nx.DiGraph() @@ -632,7 +642,7 @@ def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): d = sum((abs(b - a) for a, b in zip(p1, p2))) if d <= p: G.add_edge(p1, p2) - probs.append(d**-r) + probs.append(d ** -r) cdf = list(accumulate(probs)) for _ in range(q): target = nodes[bisect_left(cdf, seed.uniform(0, cdf[-1]))] @@ -642,8 +652,9 @@ def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): @py_random_state(7) @nodes_or_number(0) -def thresholded_random_geometric_graph(n, radius, theta, dim=2, - pos=None, weight=None, p=2, seed=None): +def thresholded_random_geometric_graph( + n, radius, theta, dim=2, pos=None, weight=None, p=2, seed=None +): r"""Returns a thresholded random geometric graph in the unit cube. The thresholded random geometric graph [1] model places `n` nodes @@ -755,8 +766,8 @@ def thresholded_random_geometric_graph(n, radius, theta, dim=2, pos = {v: [seed.random() for i in range(dim)] for v in nodes} # If no distance metric is provided, use Euclidean distance. - nx.set_node_attributes(G, weight, 'weight') - nx.set_node_attributes(G, pos, 'pos') + nx.set_node_attributes(G, weight, "weight") + nx.set_node_attributes(G, pos, "pos") # Returns ``True`` if and only if the nodes whose attributes are # ``du`` and ``dv`` should be joined, according to the threshold @@ -766,7 +777,7 @@ def thresholded_random_geometric_graph(n, radius, theta, dim=2, u, v = pair u_weight, v_weight = weight[u], weight[v] u_pos, v_pos = pos[u], pos[v] - dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos)))**(1 / p) + dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos))) ** (1 / p) # Check if dist is <= radius parameter. This check is redundant if # scipy is available and _fast_edges routine is used, but provides # the check in case scipy is not available and all edge combinations diff --git a/networkx/generators/harary_graph.py b/networkx/generators/harary_graph.py index fbd83328..7af21b33 100644 --- a/networkx/generators/harary_graph.py +++ b/networkx/generators/harary_graph.py @@ -18,7 +18,7 @@ References import networkx as nx from networkx.exception import NetworkXError -__all__ = ['hnm_harary_graph', 'hkn_harary_graph'] +__all__ = ["hnm_harary_graph", "hkn_harary_graph"] def hnm_harary_graph(n, m, create_using=None): diff --git a/networkx/generators/internet_as_graphs.py b/networkx/generators/internet_as_graphs.py index 2ec3d622..77fdb267 100644 --- a/networkx/generators/internet_as_graphs.py +++ b/networkx/generators/internet_as_graphs.py @@ -3,7 +3,7 @@ import networkx as nx from networkx.utils import py_random_state -__all__ = ['random_internet_as_graph'] +__all__ = ["random_internet_as_graph"] def uniform_int_from_avg(a, m, seed): @@ -21,10 +21,11 @@ def uniform_int_from_avg(a, m, seed): """ from math import floor - assert(m >= a) - b = 2*m - a - p = (b-floor(b))/2 - X1 = int(round(seed.random()*(floor(b)-a) + a)) + + assert m >= a + b = 2 * m - a + p = (b - floor(b)) / 2 + X1 = int(round(seed.random() * (floor(b) - a) + a)) if seed.random() < p: X2 = 1 else: @@ -94,18 +95,18 @@ class AS_graph_generator: """ self.seed = seed - self.n_t = min(n, int(round(self.seed.random()*2+4))) # num of T nodes - self.n_m = int(round(0.15*n)) # number of M nodes - self.n_cp = int(round(0.05*n)) # number of CP nodes - self.n_c = max(0, n-self.n_t-self.n_m-self.n_cp) # number of C nodes + self.n_t = min(n, int(round(self.seed.random() * 2 + 4))) # num of T nodes + self.n_m = int(round(0.15 * n)) # number of M nodes + self.n_cp = int(round(0.05 * n)) # number of CP nodes + self.n_c = max(0, n - self.n_t - self.n_m - self.n_cp) # number of C nodes - self.d_m = 2 + (2.5*n)/10000 # average multihoming degree for M nodes - self.d_cp = 2 + (1.5*n)/10000 # avg multihoming degree for CP nodes - self.d_c = 1 + (5*n)/100000 # average multihoming degree for C nodes + self.d_m = 2 + (2.5 * n) / 10000 # average multihoming degree for M nodes + self.d_cp = 2 + (1.5 * n) / 10000 # avg multihoming degree for CP nodes + self.d_c = 1 + (5 * n) / 100000 # average multihoming degree for C nodes - self.p_m_m = 1 + (2*n)/10000 # avg num of peer edges between M and M - self.p_cp_m = 0.2 + (2*n)/10000 # avg num of peer edges between CP, M - self.p_cp_cp = 0.05 + (2*n)/100000 # avg num of peer edges btwn CP, CP + self.p_m_m = 1 + (2 * n) / 10000 # avg num of peer edges between M and M + self.p_cp_m = 0.2 + (2 * n) / 10000 # avg num of peer edges between CP, M + self.p_cp_cp = 0.05 + (2 * n) / 100000 # avg num of peer edges btwn CP, CP self.t_m = 0.375 # probability M's provider is T self.t_cp = 0.375 # probability CP's provider is T @@ -127,16 +128,16 @@ class AS_graph_generator: self.regions[r].add(i) for j in self.G.nodes(): if i != j: - self.add_edge(i, j, 'peer') + self.add_edge(i, j, "peer") self.customers[i] = set() self.providers[i] = set() return self.G def add_edge(self, i, j, kind): - if kind == 'transit': + if kind == "transit": customer = str(i) else: - customer = 'none' + customer = "none" self.G.add_edge(i, j, type=kind, customer=customer) def choose_peer_pref_attach(self, node_list): @@ -148,7 +149,7 @@ class AS_graph_generator: d = {} for n in node_list: - d[n] = self.G.nodes[n]['peers'] + d[n] = self.G.nodes[n]["peers"] return choose_pref_attach(d, self.seed) def choose_node_pref_attach(self, node_list): @@ -210,20 +211,21 @@ class AS_graph_generator: edge_num = uniform_int_from_avg(1, avg_deg, self.seed) - t_options = node_options.intersection(self.nodes['T']) - m_options = node_options.intersection(self.nodes['M']) + t_options = node_options.intersection(self.nodes["T"]) + m_options = node_options.intersection(self.nodes["M"]) if i in m_options: m_options.remove(i) d = 0 while d < edge_num and (len(t_options) > 0 or len(m_options) > 0): - if len(m_options) == 0 or (len(t_options) > 0 and - self.seed.random() < t_edge_prob): # add edge to a T node + if len(m_options) == 0 or ( + len(t_options) > 0 and self.seed.random() < t_edge_prob + ): # add edge to a T node j = self.choose_node_pref_attach(t_options) t_options.remove(j) else: j = self.choose_node_pref_attach(m_options) m_options.remove(j) - self.add_edge(i, j, 'transit') + self.add_edge(i, j, "transit") self.add_customer(i, j) d += 1 @@ -248,7 +250,7 @@ class AS_graph_generator: """ # candidates are of type 'M' and are not customers of m - node_options = self.nodes['M'].difference(self.customers[m]) + node_options = self.nodes["M"].difference(self.customers[m]) # candidates are not providers of m node_options = node_options.difference(self.providers[m]) # remove self @@ -262,9 +264,9 @@ class AS_graph_generator: if len(node_options) > 0: j = self.choose_peer_pref_attach(node_options) - self.add_edge(m, j, 'peer') - self.G.nodes[m]['peers'] += 1 - self.G.nodes[j]['peers'] += 1 + self.add_edge(m, j, "peer") + self.G.nodes[m]["peers"] += 1 + self.G.nodes[j]["peers"] += 1 return True else: return False @@ -309,9 +311,9 @@ class AS_graph_generator: if len(node_options) > 0: j = self.seed.sample(node_options, 1)[0] - self.add_edge(cp, j, 'peer') - self.G.nodes[cp]['peers'] += 1 - self.G.nodes[j]['peers'] += 1 + self.add_edge(cp, j, "peer") + self.G.nodes[cp]["peers"] += 1 + self.G.nodes[j]["peers"] += 1 return True else: return False @@ -327,18 +329,18 @@ class AS_graph_generator: self.regions = {} for i in range(rn): - self.regions["REG"+str(i)] = set() + self.regions["REG" + str(i)] = set() def add_peering_links(self, from_kind, to_kind): """ Utility function to add peering links among node groups. """ peer_link_method = None - if from_kind == 'M': + if from_kind == "M": peer_link_method = self.add_m_peering_link m = self.p_m_m - if from_kind == 'CP': + if from_kind == "CP": peer_link_method = self.add_cp_peering_link - if to_kind == 'M': + if to_kind == "M": m = self.p_cp_m else: m = self.p_cp_cp @@ -374,26 +376,25 @@ class AS_graph_generator: self.graph_regions(5) self.customers = {} self.providers = {} - self.nodes = {'T': set(), 'M': set(), 'CP': set(), 'C': set()} + self.nodes = {"T": set(), "M": set(), "CP": set(), "C": set()} self.t_graph() - self.nodes['T'] = set(list(self.G.nodes())) + self.nodes["T"] = set(list(self.G.nodes())) - i = len(self.nodes['T']) + i = len(self.nodes["T"]) for _ in range(self.n_m): - self.nodes['M'].add(self.add_node(i, 'M', 0.2, self.d_m, self.t_m)) + self.nodes["M"].add(self.add_node(i, "M", 0.2, self.d_m, self.t_m)) i += 1 for _ in range(self.n_cp): - self.nodes['CP'].add(self.add_node(i, 'CP', 0.05, self.d_cp, - self.t_cp)) + self.nodes["CP"].add(self.add_node(i, "CP", 0.05, self.d_cp, self.t_cp)) i += 1 for _ in range(self.n_c): - self.nodes['C'].add(self.add_node(i, 'C', 0, self.d_c, self.t_c)) + self.nodes["C"].add(self.add_node(i, "C", 0, self.d_c, self.t_c)) i += 1 - self.add_peering_links('M', 'M') - self.add_peering_links('CP', 'M') - self.add_peering_links('CP', 'CP') + self.add_peering_links("M", "M") + self.add_peering_links("CP", "M") + self.add_peering_links("CP", "CP") return self.G diff --git a/networkx/generators/intersection.py b/networkx/generators/intersection.py index 6680482f..ee1e048d 100644 --- a/networkx/generators/intersection.py +++ b/networkx/generators/intersection.py @@ -5,10 +5,11 @@ import networkx as nx from networkx.algorithms import bipartite from networkx.utils import py_random_state -__all__ = ['uniform_random_intersection_graph', - 'k_random_intersection_graph', - 'general_random_intersection_graph', - ] +__all__ = [ + "uniform_random_intersection_graph", + "k_random_intersection_graph", + "general_random_intersection_graph", +] @py_random_state(3) diff --git a/networkx/generators/interval_graph.py b/networkx/generators/interval_graph.py index 2e3e0a82..be15ba98 100644 --- a/networkx/generators/interval_graph.py +++ b/networkx/generators/interval_graph.py @@ -4,7 +4,7 @@ Generators for interval graph. from collections.abc import Sequence import networkx as nx -__all__ = ['interval_graph'] +__all__ = ["interval_graph"] def interval_graph(intervals): @@ -46,11 +46,14 @@ def interval_graph(intervals): intervals = list(intervals) for interval in intervals: if not (isinstance(interval, Sequence) and len(interval) == 2): - raise TypeError("Each interval must have length 2, and be a " - "collections.abc.Sequence such as tuple or list.") + raise TypeError( + "Each interval must have length 2, and be a " + "collections.abc.Sequence such as tuple or list." + ) if interval[0] > interval[1]: - raise ValueError(f"Interval must have lower value first. " - f"Got {interval}") + raise ValueError( + f"Interval must have lower value first. " f"Got {interval}" + ) graph = nx.Graph() diff --git a/networkx/generators/joint_degree_seq.py b/networkx/generators/joint_degree_seq.py index 4454e70b..8734dfc3 100644 --- a/networkx/generators/joint_degree_seq.py +++ b/networkx/generators/joint_degree_seq.py @@ -3,10 +3,12 @@ import networkx as nx from networkx.utils import py_random_state -__all__ = ['is_valid_joint_degree', - 'is_valid_directed_joint_degree', - 'joint_degree_graph', - 'directed_joint_degree_graph'] +__all__ = [ + "is_valid_joint_degree", + "is_valid_directed_joint_degree", + "joint_degree_graph", + "directed_joint_degree_graph", +] def is_valid_joint_degree(joint_degrees): @@ -62,12 +64,10 @@ def is_valid_joint_degree(joint_degrees): if not float(joint_degrees[k][l]).is_integer(): return False - if (k != l) and (joint_degrees[k][l] > - degree_count[k] * degree_count[l]): + if (k != l) and (joint_degrees[k][l] > degree_count[k] * degree_count[l]): return False elif k == l: - if (joint_degrees[k][k] > degree_count[k] * - (degree_count[k] - 1)): + if joint_degrees[k][k] > degree_count[k] * (degree_count[k] - 1): return False if joint_degrees[k][k] % 2 != 0: return False @@ -198,12 +198,11 @@ def joint_degree_graph(joint_degrees, seed=None): """ if not is_valid_joint_degree(joint_degrees): - msg = 'Input joint degree dict not realizable as a simple graph' + msg = "Input joint degree dict not realizable as a simple graph" raise nx.NetworkXError(msg) # compute degree count from joint_degrees - degree_count = {k: sum(l.values()) // k for k, l in joint_degrees.items() - if k > 0} + degree_count = {k: sum(l.values()) // k for k, l in joint_degrees.items() if k > 0} # start with empty N-node graph N = sum(degree_count.values()) @@ -268,12 +267,11 @@ def joint_degree_graph(joint_degrees, seed=None): # if node w has no free stubs then do neighbor switch if h_node_residual[w] == 0: if k != l: - _neighbor_switch(G, w, l_unsat, - h_node_residual) + _neighbor_switch(G, w, l_unsat, h_node_residual) else: - _neighbor_switch(G, w, l_unsat, - h_node_residual, - avoid_node_id=v) + _neighbor_switch( + G, w, l_unsat, h_node_residual, avoid_node_id=v + ) # add edge (v, w) and update data structures G.add_edge(v, w) @@ -361,8 +359,9 @@ def is_valid_directed_joint_degree(in_degrees, out_degrees, nkk): return True -def _directed_neighbor_switch(G, w, unsat, h_node_residual_out, chords, - h_partition_in, partition): +def _directed_neighbor_switch( + G, w, unsat, h_node_residual_out, chords, h_partition_in, partition +): """ Releases one free stub for node w, while preserving joint degree in G. Parameters @@ -423,8 +422,9 @@ def _directed_neighbor_switch(G, w, unsat, h_node_residual_out, chords, return w_prime -def _directed_neighbor_switch_rev(G, w, unsat, h_node_residual_in, chords, - h_partition_out, partition): +def _directed_neighbor_switch_rev( + G, w, unsat, h_node_residual_in, chords, h_partition_out, partition +): """ The reverse of directed_neighbor_switch. Parameters @@ -541,7 +541,7 @@ def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): >>> """ if not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk): - msg = 'Input is not realizable as a simple graph' + msg = "Input is not realizable as a simple graph" raise nx.NetworkXError(msg) # start with an empty directed graph. @@ -575,8 +575,7 @@ def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): for idx, o in enumerate(out_degrees): o = out_degrees[idx] - non_chords[(o, in_degrees[idx])] = non_chords.get((o, in_degrees[idx]), - 0) + 1 + non_chords[(o, in_degrees[idx])] = non_chords.get((o, in_degrees[idx]), 0) + 1 idx = int(idx) if o > 0: h_degree_nodelist_out.setdefault(o, []) @@ -601,14 +600,15 @@ def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): for l in nkk[k]: n_edges_add = nkk[k][l] - if (n_edges_add > 0): + if n_edges_add > 0: # chords contains a random set of potential edges. chords = set() k_len = nk_out[k] l_len = nk_in[l] - chords_sample = seed.sample(range(k_len * l_len), n_edges_add - + non_chords.get((k, l), 0)) + chords_sample = seed.sample( + range(k_len * l_len), n_edges_add + non_chords.get((k, l), 0) + ) num = 0 while len(chords) < n_edges_add: @@ -630,19 +630,29 @@ def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): # if node v has no free stubs then do neighbor switch. if h_node_residual_out[v] == 0: - _v = _directed_neighbor_switch(G, v, k_unsat, - h_node_residual_out, - chords, h_partition_in, - l) + _v = _directed_neighbor_switch( + G, + v, + k_unsat, + h_node_residual_out, + chords, + h_partition_in, + l, + ) if _v is not None: v = _v # if node w has no free stubs then do neighbor switch. if h_node_residual_in[w] == 0: - _w = _directed_neighbor_switch_rev(G, w, l_unsat, - h_node_residual_in, - chords, - h_partition_out, k) + _w = _directed_neighbor_switch_rev( + G, + w, + l_unsat, + h_node_residual_in, + chords, + h_partition_out, + k, + ) if _w is not None: w = _w diff --git a/networkx/generators/lattice.py b/networkx/generators/lattice.py index b4ac7d7f..72bc925f 100644 --- a/networkx/generators/lattice.py +++ b/networkx/generators/lattice.py @@ -26,8 +26,13 @@ from networkx.generators.classic import empty_graph from networkx.generators.classic import path_graph from itertools import repeat -__all__ = ['grid_2d_graph', 'grid_graph', 'hypercube_graph', - 'triangular_lattice_graph', 'hexagonal_lattice_graph'] +__all__ = [ + "grid_2d_graph", + "grid_graph", + "hypercube_graph", + "triangular_lattice_graph", + "hexagonal_lattice_graph", +] @nodes_or_number([0, 1]) @@ -61,10 +66,8 @@ def grid_2d_graph(m, n, periodic=False, create_using=None): row_name, rows = m col_name, cols = n G.add_nodes_from((i, j) for i in rows for j in cols) - G.add_edges_from(((i, j), (pi, j)) - for pi, i in pairwise(rows) for j in cols) - G.add_edges_from(((i, j), (i, pj)) - for i in rows for pj, j in pairwise(cols)) + G.add_edges_from(((i, j), (pi, j)) for pi, i in pairwise(rows) for j in cols) + G.add_edges_from(((i, j), (i, pj)) for i in rows for pj, j in pairwise(cols)) if iterable(periodic): periodic_r, periodic_c = periodic @@ -165,8 +168,9 @@ def hypercube_graph(n): return G -def triangular_lattice_graph(m, n, periodic=False, with_positions=True, - create_using=None): +def triangular_lattice_graph( + m, n, periodic=False, with_positions=True, create_using=None +): r"""Returns the $m$ by $n$ triangular lattice graph. The `triangular lattice graph`_ is a two-dimensional `grid graph`_ in @@ -235,10 +239,8 @@ def triangular_lattice_graph(m, n, periodic=False, with_positions=True, H.add_edges_from(((i, j), (i + 1, j)) for j in rows for i in cols[:N]) H.add_edges_from(((i, j), (i, j + 1)) for j in rows[:m] for i in cols) # add diagonals - H.add_edges_from(((i, j), (i + 1, j + 1)) - for j in rows[1:m:2] for i in cols[:N]) - H.add_edges_from(((i + 1, j), (i, j + 1)) - for j in rows[:m:2] for i in cols[:N]) + H.add_edges_from(((i, j), (i + 1, j + 1)) for j in rows[1:m:2] for i in cols[:N]) + H.add_edges_from(((i + 1, j), (i, j + 1)) for j in rows[:m:2] for i in cols[:N]) # identify boundary nodes if periodic if periodic is True: for i in cols: @@ -256,17 +258,17 @@ def triangular_lattice_graph(m, n, periodic=False, with_positions=True, xx = (0.5 * (j % 2) + i for i in cols for j in rows) h = sqrt(3) / 2 if periodic: - yy = (h * j + .01 * i * i for i in cols for j in rows) + yy = (h * j + 0.01 * i * i for i in cols for j in rows) else: yy = (h * j for i in cols for j in rows) - pos = {(i, j): (x, y) for i, j, x, y in zip(ii, jj, xx, yy) - if (i, j) in H} - set_node_attributes(H, pos, 'pos') + pos = {(i, j): (x, y) for i, j, x, y in zip(ii, jj, xx, yy) if (i, j) in H} + set_node_attributes(H, pos, "pos") return H -def hexagonal_lattice_graph(m, n, periodic=False, with_positions=True, - create_using=None): +def hexagonal_lattice_graph( + m, n, periodic=False, with_positions=True, create_using=None +): """Returns an `m` by `n` hexagonal lattice graph. The *hexagonal lattice graph* is a graph whose nodes and edges are @@ -320,13 +322,12 @@ def hexagonal_lattice_graph(m, n, periodic=False, with_positions=True, msg = "periodic hexagonal lattice needs m > 1, n > 1 and even n" raise NetworkXError(msg) - M = 2 * m # twice as many nodes as hexagons vertically + M = 2 * m # twice as many nodes as hexagons vertically rows = range(M + 2) cols = range(n + 1) # make lattice - col_edges = (((i, j), (i, j + 1)) for i in cols for j in rows[:M + 1]) - row_edges = (((i, j), (i + 1, j)) for i in cols[:n] for j in rows - if i % 2 == j % 2) + col_edges = (((i, j), (i, j + 1)) for i in cols for j in rows[: M + 1]) + row_edges = (((i, j), (i + 1, j)) for i in cols[:n] for j in rows if i % 2 == j % 2) G.add_edges_from(col_edges) G.add_edges_from(row_edges) # Remove corner nodes with one edge @@ -346,14 +347,13 @@ def hexagonal_lattice_graph(m, n, periodic=False, with_positions=True, # calc position in embedded space ii = (i for i in cols for j in rows) jj = (j for i in cols for j in rows) - xx = (0.5 + i + i // 2 + (j % 2) * ((i % 2) - .5) - for i in cols for j in rows) + xx = (0.5 + i + i // 2 + (j % 2) * ((i % 2) - 0.5) for i in cols for j in rows) h = sqrt(3) / 2 if periodic: - yy = (h * j + .01 * i * i for i in cols for j in rows) + yy = (h * j + 0.01 * i * i for i in cols for j in rows) else: yy = (h * j for i in cols for j in rows) # exclude nodes not in G pos = {(i, j): (x, y) for i, j, x, y in zip(ii, jj, xx, yy) if (i, j) in G} - set_node_attributes(G, pos, 'pos') + set_node_attributes(G, pos, "pos") return G diff --git a/networkx/generators/line.py b/networkx/generators/line.py index 9ad0f3b1..e3bd7ffb 100644 --- a/networkx/generators/line.py +++ b/networkx/generators/line.py @@ -6,7 +6,7 @@ import networkx as nx from networkx.utils import arbitrary_element, generate_unique_node from networkx.utils.decorators import not_implemented_for -__all__ = ['line_graph', 'inverse_line_graph'] +__all__ = ["line_graph", "inverse_line_graph"] def line_graph(G, create_using=None): @@ -114,11 +114,15 @@ def _node_func(G): """ if G.is_multigraph(): + def sorted_node(u, v, key): return (u, v, key) if u <= v else (v, u, key) + else: + def sorted_node(u, v): return (u, v) if u <= v else (v, u) + return sorted_node @@ -127,11 +131,15 @@ def _edge_func(G): """ if G.is_multigraph(): + def get_edges(nbunch=None): return G.edges(nbunch, keys=True) + else: + def get_edges(nbunch=None): return G.edges(nbunch) + return get_edges @@ -225,14 +233,14 @@ def _lg_undirected(G, selfloops=False, create_using=None): # especially important for multigraphs, we store the edges in # canonical form in a set. for i, a in enumerate(nodes): - edges.update([_sorted_edge(a, b) for b in nodes[i + shift:]]) + edges.update([_sorted_edge(a, b) for b in nodes[i + shift :]]) L.add_edges_from(edges) return L -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def inverse_line_graph(G): """ Returns the inverse line graph of graph G. @@ -309,8 +317,7 @@ def inverse_line_graph(G): P_count[u] += 1 if max(P_count.values()) > 2: - msg = "G is not a line graph (vertex found in more " \ - "than two partition cells)" + msg = "G is not a line graph (vertex found in more " "than two partition cells)" raise nx.NetworkXError(msg) W = tuple([(u,) for u in P_count if P_count[u] == 1]) H = nx.Graph() @@ -417,8 +424,10 @@ def _find_partition(G, starting_cell): for u in new_cell: for v in new_cell: if (u != v) and (v not in G_partition[u]): - msg = "G is not a line graph" \ - "(partition cell not a complete subgraph)" + msg = ( + "G is not a line graph" + "(partition cell not a complete subgraph)" + ) raise nx.NetworkXError(msg) P.append(tuple(new_cell)) G_partition.remove_edges_from(list(combinations(new_cell, 2))) @@ -500,17 +509,23 @@ def _select_starting_cell(G, starting_edge=None): for u in triangle_nodes: for v in triangle_nodes: if u != v and (v not in G[u]): - msg = "G is not a line graph (odd triangles " \ - "do not form complete subgraph)" + msg = ( + "G is not a line graph (odd triangles " + "do not form complete subgraph)" + ) raise nx.NetworkXError(msg) # otherwise then we can use this as the starting cell starting_cell = tuple(triangle_nodes) else: - msg = "G is not a line graph (odd triangles " \ - "do not form complete subgraph)" + msg = ( + "G is not a line graph (odd triangles " + "do not form complete subgraph)" + ) raise nx.NetworkXError(msg) else: - msg = "G is not a line graph (incorrect number of " \ - "odd triangles around starting edge)" + msg = ( + "G is not a line graph (incorrect number of " + "odd triangles around starting edge)" + ) raise nx.NetworkXError(msg) return starting_cell diff --git a/networkx/generators/mycielski.py b/networkx/generators/mycielski.py index dd70907a..e5e7e57e 100644 --- a/networkx/generators/mycielski.py +++ b/networkx/generators/mycielski.py @@ -6,11 +6,11 @@ of graphs. import networkx as nx from networkx.utils import not_implemented_for -__all__ = ['mycielskian', 'mycielski_graph'] +__all__ = ["mycielskian", "mycielski_graph"] -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def mycielskian(G, iterations=1): r"""Returns the Mycielskian of a simple, undirected graph G diff --git a/networkx/generators/nonisomorphic_trees.py b/networkx/generators/nonisomorphic_trees.py index 57e9335d..b7655a70 100644 --- a/networkx/generators/nonisomorphic_trees.py +++ b/networkx/generators/nonisomorphic_trees.py @@ -7,8 +7,7 @@ the root. """ -__all__ = ['nonisomorphic_trees', - 'number_of_nonisomorphic_trees'] +__all__ = ["nonisomorphic_trees", "number_of_nonisomorphic_trees"] import networkx as nx @@ -125,7 +124,7 @@ def _next_tree(candidate): new_left, new_rest = _split_tree(new_candidate) new_left_height = max(new_left) suffix = range(1, new_left_height + 2) - new_candidate[-len(suffix):] = suffix + new_candidate[-len(suffix) :] = suffix return new_candidate diff --git a/networkx/generators/random_clustered.py b/networkx/generators/random_clustered.py index 3afbf3ae..622fb911 100644 --- a/networkx/generators/random_clustered.py +++ b/networkx/generators/random_clustered.py @@ -3,7 +3,7 @@ import networkx as nx from networkx.utils import py_random_state -__all__ = ['random_clustered_graph'] +__all__ = ["random_clustered_graph"] @py_random_state(2) @@ -102,7 +102,7 @@ def random_clustered_graph(joint_degree_sequence, create_using=None, seed=None): tlist.append(n) if len(ilist) % 2 != 0 or len(tlist) % 3 != 0: - raise nx.NetworkXError('Invalid degree sequence') + raise nx.NetworkXError("Invalid degree sequence") seed.shuffle(ilist) seed.shuffle(tlist) diff --git a/networkx/generators/random_graphs.py b/networkx/generators/random_graphs.py index 745f64e4..4f4dfbb4 100644 --- a/networkx/generators/random_graphs.py +++ b/networkx/generators/random_graphs.py @@ -12,25 +12,27 @@ from .classic import empty_graph, path_graph, complete_graph from .degree_seq import degree_sequence_tree from collections import defaultdict -__all__ = ['fast_gnp_random_graph', - 'gnp_random_graph', - 'dense_gnm_random_graph', - 'gnm_random_graph', - 'erdos_renyi_graph', - 'binomial_graph', - 'newman_watts_strogatz_graph', - 'watts_strogatz_graph', - 'connected_watts_strogatz_graph', - 'random_regular_graph', - 'barabasi_albert_graph', - 'dual_barabasi_albert_graph', - 'extended_barabasi_albert_graph', - 'powerlaw_cluster_graph', - 'random_lobster', - 'random_shell_graph', - 'random_powerlaw_tree', - 'random_powerlaw_tree_sequence', - 'random_kernel_graph'] +__all__ = [ + "fast_gnp_random_graph", + "gnp_random_graph", + "dense_gnm_random_graph", + "gnm_random_graph", + "erdos_renyi_graph", + "binomial_graph", + "newman_watts_strogatz_graph", + "watts_strogatz_graph", + "connected_watts_strogatz_graph", + "random_regular_graph", + "barabasi_albert_graph", + "dual_barabasi_albert_graph", + "extended_barabasi_albert_graph", + "powerlaw_cluster_graph", + "random_lobster", + "random_shell_graph", + "random_powerlaw_tree", + "random_powerlaw_tree_sequence", + "random_kernel_graph", +] @py_random_state(2) @@ -476,7 +478,7 @@ def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None): G = watts_strogatz_graph(n, k, p, seed) if nx.is_connected(G): return G - raise nx.NetworkXError('Maximum number of tries exceeded') + raise nx.NetworkXError("Maximum number of tries exceeded") @py_random_state(2) @@ -572,8 +574,11 @@ def random_regular_graph(d, n, seed=None): if not _suitable(edges, potential_edges): return None # failed to find suitable edge set - stubs = [node for node, potential in potential_edges.items() - for _ in range(potential)] + stubs = [ + node + for node, potential in potential_edges.items() + for _ in range(potential) + ] return edges # Even though a suitable edge set exists, @@ -638,7 +643,9 @@ def barabasi_albert_graph(n, m, seed=None): """ if m < 1 or m >= n: - raise nx.NetworkXError(f"Barabási–Albert network must have m >= 1 and m < n, m = {m}, n = {n}") + raise nx.NetworkXError( + f"Barabási–Albert network must have m >= 1 and m < n, m = {m}, n = {n}" + ) # Add m initial nodes (m0 in barabasi-speak) G = empty_graph(m) @@ -700,11 +707,17 @@ def dual_barabasi_albert_graph(n, m1, m2, p, seed=None): """ if m1 < 1 or m1 >= n: - raise nx.NetworkXError(f"Dual Barabási–Albert network must have m1 >= 1 and m1 < n, m1 = {m1}, n = {n}") + raise nx.NetworkXError( + f"Dual Barabási–Albert network must have m1 >= 1 and m1 < n, m1 = {m1}, n = {n}" + ) if m2 < 1 or m2 >= n: - raise nx.NetworkXError(f"Dual Barabási–Albert network must have m2 >= 1 and m2 < n, m2 = {m2}, n = {n}") + raise nx.NetworkXError( + f"Dual Barabási–Albert network must have m2 >= 1 and m2 < n, m2 = {m2}, n = {n}" + ) if p < 0 or p > 1: - raise nx.NetworkXError(f"Dual Barabási–Albert network must have 0 <= p <= 1, p = {p}") + raise nx.NetworkXError( + f"Dual Barabási–Albert network must have 0 <= p <= 1, p = {p}" + ) # For simplicity, if p == 0 or 1, just return BA if p == 1: @@ -823,8 +836,7 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): # Adding m new edges, if there is room to add them if a_probability < p and G.size() <= clique_size - m: # Select the nodes where an edge can be added - elligible_nodes = [nd for nd, deg in G.degree() - if deg < clique_degree] + elligible_nodes = [nd for nd, deg in G.degree() if deg < clique_degree] for i in range(m): # Choosing a random source node from elligible_nodes src_node = seed.choice(elligible_nodes) @@ -834,8 +846,9 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): prohibited_nodes = list(G[src_node]) prohibited_nodes.append(src_node) # This will raise an exception if the sequence is empty - dest_node = seed.choice([nd for nd in attachment_preference - if nd not in prohibited_nodes]) + dest_node = seed.choice( + [nd for nd in attachment_preference if nd not in prohibited_nodes] + ) # Adding the new edge G.add_edge(src_node, dest_node) @@ -846,8 +859,10 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): # Adjusting the elligible nodes. Degree may be saturated. if G.degree(src_node) == clique_degree: elligible_nodes.remove(src_node) - if G.degree(dest_node) == clique_degree \ - and dest_node in elligible_nodes: + if ( + G.degree(dest_node) == clique_degree + and dest_node in elligible_nodes + ): elligible_nodes.remove(dest_node) # Rewiring m edges, if there are enough edges @@ -855,8 +870,7 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): # Selecting nodes that have at least 1 edge but that are not # fully connected to ALL other nodes (center of star). # These nodes are the pivot nodes of the edges to rewire - elligible_nodes = [nd for nd, deg in G.degree() - if 0 < deg < clique_degree] + elligible_nodes = [nd for nd, deg in G.degree() if 0 < deg < clique_degree] for i in range(m): # Choosing a random source node node = seed.choice(elligible_nodes) @@ -870,8 +884,9 @@ def extended_barabasi_albert_graph(n, m, p, q, seed=None): # Picking a target node that is not 'node' or # neighbor with 'node', with preferential attachment neighbor_nodes.append(node) - dest_node = seed.choice([nd for nd in attachment_preference - if nd not in neighbor_nodes]) + dest_node = seed.choice( + [nd for nd in attachment_preference if nd not in neighbor_nodes] + ) # Rewire G.remove_edge(node, src_node) G.add_edge(node, dest_node) @@ -962,8 +977,8 @@ def powerlaw_cluster_graph(n, m, p, seed=None): G = empty_graph(m) # add m initial nodes (m0 in barabasi-speak) repeated_nodes = list(G.nodes()) # list of existing nodes to sample from # with nodes repeated once for each adjacent edge - source = m # next node is m - while source < n: # Now add the other n-1 nodes + source = m # next node is m + while source < n: # Now add the other n-1 nodes possible_targets = _random_subset(repeated_nodes, m, seed) # do one preferential attachment for new node target = possible_targets.pop() @@ -972,9 +987,11 @@ def powerlaw_cluster_graph(n, m, p, seed=None): count = 1 while count < m: # add m-1 more new links if seed.random() < p: # clustering step: add triangle - neighborhood = [nbr for nbr in G.neighbors(target) - if not G.has_edge(source, nbr) - and not nbr == source] + neighborhood = [ + nbr + for nbr in G.neighbors(target) + if not G.has_edge(source, nbr) and not nbr == source + ] if neighborhood: # if there is a neighbor without a link nbr = seed.choice(neighborhood) G.add_edge(source, nbr) # add triangle @@ -1076,8 +1093,8 @@ def random_shell_graph(constructor, seed=None): inter_edges = int(m * d) intra_edges.append(m - inter_edges) g = nx.convert_node_labels_to_integers( - gnm_random_graph(n, inter_edges, seed=seed), - first_label=nnodes) + gnm_random_graph(n, inter_edges, seed=seed), first_label=nnodes + ) glist.append(g) nnodes += n G = nx.operators.union(G, g) @@ -1186,7 +1203,9 @@ def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): index = seed.randint(0, n - 1) zseq[index] = swap.pop() - raise nx.NetworkXError(f"Exceeded max ({tries}) attempts for a valid tree sequence.") + raise nx.NetworkXError( + f"Exceeded max ({tries}) attempts for a valid tree sequence." + ) @py_random_state(3) @@ -1255,7 +1274,9 @@ def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None): def kernel_root(y, a, r): def my_function(b): return kernel_integral(y, a, b) - r + return optimize.brentq(my_function, a, 1) + graph = nx.Graph() graph.add_nodes_from(range(n)) (i, j) = (1, 1) diff --git a/networkx/generators/small.py b/networkx/generators/small.py index da93aeef..8a93010e 100644 --- a/networkx/generators/small.py +++ b/networkx/generators/small.py @@ -3,33 +3,40 @@ Various small and named graphs, together with some compact generators. """ -__all__ = ['make_small_graph', - 'LCF_graph', - 'bull_graph', - 'chvatal_graph', - 'cubical_graph', - 'desargues_graph', - 'diamond_graph', - 'dodecahedral_graph', - 'frucht_graph', - 'heawood_graph', - 'hoffman_singleton_graph', - 'house_graph', - 'house_x_graph', - 'icosahedral_graph', - 'krackhardt_kite_graph', - 'moebius_kantor_graph', - 'octahedral_graph', - 'pappus_graph', - 'petersen_graph', - 'sedgewick_maze_graph', - 'tetrahedral_graph', - 'truncated_cube_graph', - 'truncated_tetrahedron_graph', - 'tutte_graph'] +__all__ = [ + "make_small_graph", + "LCF_graph", + "bull_graph", + "chvatal_graph", + "cubical_graph", + "desargues_graph", + "diamond_graph", + "dodecahedral_graph", + "frucht_graph", + "heawood_graph", + "hoffman_singleton_graph", + "house_graph", + "house_x_graph", + "icosahedral_graph", + "krackhardt_kite_graph", + "moebius_kantor_graph", + "octahedral_graph", + "pappus_graph", + "petersen_graph", + "sedgewick_maze_graph", + "tetrahedral_graph", + "truncated_cube_graph", + "truncated_tetrahedron_graph", + "tutte_graph", +] import networkx as nx -from networkx.generators.classic import empty_graph, cycle_graph, path_graph, complete_graph +from networkx.generators.classic import ( + empty_graph, + cycle_graph, + path_graph, + complete_graph, +) from networkx.exception import NetworkXError @@ -158,7 +165,7 @@ def LCF_graph(n, shift_list, repeats, create_using=None): for i in range(n_extra_edges): shift = shift_list[i % len(shift_list)] # cycle through shift_list - v1 = nodes[i % n] # cycle repeatedly through nodes + v1 = nodes[i % n] # cycle repeatedly through nodes v2 = nodes[(i + shift) % n] G.add_edge(v1, v2) return G @@ -168,13 +175,14 @@ def LCF_graph(n, shift_list, repeats, create_using=None): # Various small and named graphs # ------------------------------------------------------------------------------- + def bull_graph(create_using=None): """Returns the Bull graph. """ description = [ "adjacencylist", "Bull Graph", 5, - [[2, 3], [1, 3, 4], [1, 2, 5], [2], [3]] + [[2, 3], [1, 3, 4], [1, 2, 5], [2], [3]], ] G = make_small_undirected_graph(description, create_using) return G @@ -186,9 +194,20 @@ def chvatal_graph(create_using=None): "adjacencylist", "Chvatal Graph", 12, - [[2, 5, 7, 10], [3, 6, 8], [4, 7, 9], [5, 8, 10], - [6, 9], [11, 12], [11, 12], [9, 12], - [11], [11, 12], [], []] + [ + [2, 5, 7, 10], + [3, 6, 8], + [4, 7, 9], + [5, 8, 10], + [6, 9], + [11, 12], + [11, 12], + [9, 12], + [11], + [11, 12], + [], + [], + ], ] G = make_small_undirected_graph(description, create_using) return G @@ -200,8 +219,16 @@ def cubical_graph(create_using=None): "adjacencylist", "Platonic Cubical Graph", 8, - [[2, 4, 5], [1, 3, 8], [2, 4, 7], [1, 3, 6], - [1, 6, 8], [4, 5, 7], [3, 6, 8], [2, 5, 7]] + [ + [2, 4, 5], + [1, 3, 8], + [2, 4, 7], + [1, 3, 6], + [1, 6, 8], + [4, 5, 7], + [3, 6, 8], + [2, 5, 7], + ], ] G = make_small_undirected_graph(description, create_using) return G @@ -220,7 +247,7 @@ def diamond_graph(create_using=None): "adjacencylist", "Diamond Graph", 4, - [[2, 3], [1, 3, 4], [1, 2, 4], [2, 3]] + [[2, 3], [1, 3, 4], [1, 2, 4], [2, 3]], ] G = make_small_undirected_graph(description, create_using) return G @@ -241,8 +268,21 @@ def frucht_graph(create_using=None): """ G = cycle_graph(7, create_using) - G.add_edges_from([[0, 7], [1, 7], [2, 8], [3, 9], [4, 9], [5, 10], [6, 10], - [7, 11], [8, 11], [8, 9], [10, 11]]) + G.add_edges_from( + [ + [0, 7], + [1, 7], + [2, 8], + [3, 9], + [4, 9], + [5, 10], + [6, 10], + [7, 11], + [8, 11], + [8, 9], + [10, 11], + ] + ) G.name = "Frucht Graph" return G @@ -256,19 +296,18 @@ def heawood_graph(create_using=None): def hoffman_singleton_graph(): - '''Return the Hoffman-Singleton Graph.''' + """Return the Hoffman-Singleton Graph.""" G = nx.Graph() for i in range(5): for j in range(5): - G.add_edge(('pentagon', i, j), ('pentagon', i, (j - 1) % 5)) - G.add_edge(('pentagon', i, j), ('pentagon', i, (j + 1) % 5)) - G.add_edge(('pentagram', i, j), ('pentagram', i, (j - 2) % 5)) - G.add_edge(('pentagram', i, j), ('pentagram', i, (j + 2) % 5)) + G.add_edge(("pentagon", i, j), ("pentagon", i, (j - 1) % 5)) + G.add_edge(("pentagon", i, j), ("pentagon", i, (j + 1) % 5)) + G.add_edge(("pentagram", i, j), ("pentagram", i, (j - 2) % 5)) + G.add_edge(("pentagram", i, j), ("pentagram", i, (j + 2) % 5)) for k in range(5): - G.add_edge(('pentagon', i, j), - ('pentagram', k, (i * k + j) % 5)) + G.add_edge(("pentagon", i, j), ("pentagram", k, (i * k + j) % 5)) G = nx.convert_node_labels_to_integers(G) - G.name = 'Hoffman-Singleton Graph' + G.name = "Hoffman-Singleton Graph" return G @@ -278,7 +317,7 @@ def house_graph(create_using=None): "adjacencylist", "House Graph", 5, - [[2, 3], [1, 4], [1, 4, 5], [2, 3, 5], [3, 4]] + [[2, 3], [1, 4], [1, 4, 5], [2, 3, 5], [3, 4]], ] G = make_small_undirected_graph(description, create_using) return G @@ -290,7 +329,7 @@ def house_x_graph(create_using=None): "adjacencylist", "House-with-X-inside Graph", 5, - [[2, 3, 4], [1, 3, 4], [1, 2, 4, 5], [1, 2, 3, 5], [3, 4]] + [[2, 3, 4], [1, 3, 4], [1, 2, 4, 5], [1, 2, 3, 5], [3, 4]], ] G = make_small_undirected_graph(description, create_using) return G @@ -302,9 +341,20 @@ def icosahedral_graph(create_using=None): "adjacencylist", "Platonic Icosahedral Graph", 12, - [[2, 6, 8, 9, 12], [3, 6, 7, 9], [4, 7, 9, 10], [5, 7, 10, 11], - [6, 7, 11, 12], [7, 12], [], [9, 10, 11, 12], - [10], [11], [12], []] + [ + [2, 6, 8, 9, 12], + [3, 6, 7, 9], + [4, 7, 9, 10], + [5, 7, 10, 11], + [6, 7, 11, 12], + [7, 12], + [], + [9, 10, 11, 12], + [10], + [11], + [12], + [], + ], ] G = make_small_undirected_graph(description, create_using) return G @@ -325,8 +375,18 @@ def krackhardt_kite_graph(create_using=None): "adjacencylist", "Krackhardt Kite Social Network", 10, - [[2, 3, 4, 6], [1, 4, 5, 7], [1, 4, 6], [1, 2, 3, 5, 6, 7], [2, 4, 7], - [1, 3, 4, 7, 8], [2, 4, 5, 6, 8], [6, 7, 9], [8, 10], [9]] + [ + [2, 3, 4, 6], + [1, 4, 5, 7], + [1, 4, 6], + [1, 2, 3, 5, 6, 7], + [2, 4, 7], + [1, 3, 4, 7, 8], + [2, 4, 5, 6, 8], + [6, 7, 9], + [8, 10], + [9], + ], ] G = make_small_undirected_graph(description, create_using) return G @@ -345,7 +405,7 @@ def octahedral_graph(create_using=None): "adjacencylist", "Platonic Octahedral Graph", 6, - [[2, 3, 4, 5], [3, 4, 6], [5, 6], [5, 6], [6], []] + [[2, 3, 4, 5], [3, 4, 6], [5, 6], [5, 6], [6], []], ] G = make_small_undirected_graph(description, create_using) return G @@ -364,8 +424,18 @@ def petersen_graph(create_using=None): "adjacencylist", "Petersen Graph", 10, - [[2, 5, 6], [1, 3, 7], [2, 4, 8], [3, 5, 9], [4, 1, 10], [1, 8, 9], [2, 9, 10], - [3, 6, 10], [4, 6, 7], [5, 7, 8]] + [ + [2, 5, 6], + [1, 3, 7], + [2, 4, 8], + [3, 5, 9], + [4, 1, 10], + [1, 8, 9], + [2, 9, 10], + [3, 6, 10], + [4, 6, 7], + [5, 7, 8], + ], ] G = make_small_undirected_graph(description, create_using) return G @@ -402,12 +472,32 @@ def truncated_cube_graph(create_using=None): "adjacencylist", "Truncated Cube Graph", 24, - [[2, 3, 5], [12, 15], [4, 5], [7, 9], - [6], [17, 19], [8, 9], [11, 13], - [10], [18, 21], [12, 13], [15], - [14], [22, 23], [16], [20, 24], - [18, 19], [21], [20], [24], - [22], [23], [24], []] + [ + [2, 3, 5], + [12, 15], + [4, 5], + [7, 9], + [6], + [17, 19], + [8, 9], + [11, 13], + [10], + [18, 21], + [12, 13], + [15], + [14], + [22, 23], + [16], + [20, 24], + [18, 19], + [21], + [20], + [24], + [22], + [23], + [24], + [], + ], ] G = make_small_undirected_graph(description, create_using) return G @@ -416,7 +506,7 @@ def truncated_cube_graph(create_using=None): def truncated_tetrahedron_graph(create_using=None): """Returns the skeleton of the truncated Platonic tetrahedron.""" G = path_graph(12, create_using) -# G.add_edges_from([(1,3),(1,10),(2,7),(4,12),(5,12),(6,8),(9,11)]) + # G.add_edges_from([(1,3),(1,10),(2,7),(4,12),(5,12),(6,8),(9,11)]) G.add_edges_from([(0, 2), (0, 9), (1, 6), (3, 11), (4, 11), (5, 7), (8, 10)]) G.name = "Truncated Tetrahedron Graph" return G @@ -428,15 +518,54 @@ def tutte_graph(create_using=None): "adjacencylist", "Tutte's Graph", 46, - [[2, 3, 4], [5, 27], [11, 12], [19, 20], [6, 34], - [7, 30], [8, 28], [9, 15], [10, 39], [11, 38], - [40], [13, 40], [14, 36], [15, 16], [35], - [17, 23], [18, 45], [19, 44], [46], [21, 46], - [22, 42], [23, 24], [41], [25, 28], [26, 33], - [27, 32], [34], [29], [30, 33], [31], - [32, 34], [33], [], [], [36, 39], - [37], [38, 40], [39], [], [], - [42, 45], [43], [44, 46], [45], [], []] + [ + [2, 3, 4], + [5, 27], + [11, 12], + [19, 20], + [6, 34], + [7, 30], + [8, 28], + [9, 15], + [10, 39], + [11, 38], + [40], + [13, 40], + [14, 36], + [15, 16], + [35], + [17, 23], + [18, 45], + [19, 44], + [46], + [21, 46], + [22, 42], + [23, 24], + [41], + [25, 28], + [26, 33], + [27, 32], + [34], + [29], + [30, 33], + [31], + [32, 34], + [33], + [], + [], + [36, 39], + [37], + [38, 40], + [39], + [], + [], + [42, 45], + [43], + [44, 46], + [45], + [], + [], + ], ] G = make_small_undirected_graph(description, create_using) return G diff --git a/networkx/generators/social.py b/networkx/generators/social.py index c0375069..c8013817 100644 --- a/networkx/generators/social.py +++ b/networkx/generators/social.py @@ -3,8 +3,12 @@ Famous social networks. """ import networkx as nx -__all__ = ['karate_club_graph', 'davis_southern_women_graph', - 'florentine_families_graph', 'les_miserables_graph'] +__all__ = [ + "karate_club_graph", + "davis_southern_women_graph", + "florentine_families_graph", + "les_miserables_graph", +] def karate_club_graph(): @@ -79,7 +83,7 @@ def karate_club_graph(): 0 0 1 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 1 0 1 0 1 1 0 0 0 0 0 1 1 1 0 1 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 1 0 0 1 1 1 0 1 1 0 0 1 1 1 1 1 1 1 0""" - for row, line in enumerate(zacharydat.split('\n')): + for row, line in enumerate(zacharydat.split("\n")): thisrow = [int(b) for b in line.split()] for col, entry in enumerate(thisrow): if entry == 1: @@ -87,7 +91,7 @@ def karate_club_graph(): # Add the name of each member's club as a node attribute. for v in G: - G.nodes[v]['club'] = 'Mr. Hi' if v in club1 else 'Officer' + G.nodes[v]["club"] = "Mr. Hi" if v in club1 else "Officer" return G @@ -103,133 +107,141 @@ def davis_southern_women_graph(): """ G = nx.Graph() # Top nodes - women = ["Evelyn Jefferson", - "Laura Mandeville", - "Theresa Anderson", - "Brenda Rogers", - "Charlotte McDowd", - "Frances Anderson", - "Eleanor Nye", - "Pearl Oglethorpe", - "Ruth DeSand", - "Verne Sanderson", - "Myra Liddel", - "Katherina Rogers", - "Sylvia Avondale", - "Nora Fayette", - "Helen Lloyd", - "Dorothy Murchison", - "Olivia Carleton", - "Flora Price"] + women = [ + "Evelyn Jefferson", + "Laura Mandeville", + "Theresa Anderson", + "Brenda Rogers", + "Charlotte McDowd", + "Frances Anderson", + "Eleanor Nye", + "Pearl Oglethorpe", + "Ruth DeSand", + "Verne Sanderson", + "Myra Liddel", + "Katherina Rogers", + "Sylvia Avondale", + "Nora Fayette", + "Helen Lloyd", + "Dorothy Murchison", + "Olivia Carleton", + "Flora Price", + ] G.add_nodes_from(women, bipartite=0) # Bottom nodes - events = ["E1", - "E2", - "E3", - "E4", - "E5", - "E6", - "E7", - "E8", - "E9", - "E10", - "E11", - "E12", - "E13", - "E14"] + events = [ + "E1", + "E2", + "E3", + "E4", + "E5", + "E6", + "E7", + "E8", + "E9", + "E10", + "E11", + "E12", + "E13", + "E14", + ] G.add_nodes_from(events, bipartite=1) - G.add_edges_from([("Evelyn Jefferson", "E1"), - ("Evelyn Jefferson", "E2"), - ("Evelyn Jefferson", "E3"), - ("Evelyn Jefferson", "E4"), - ("Evelyn Jefferson", "E5"), - ("Evelyn Jefferson", "E6"), - ("Evelyn Jefferson", "E8"), - ("Evelyn Jefferson", "E9"), - ("Laura Mandeville", "E1"), - ("Laura Mandeville", "E2"), - ("Laura Mandeville", "E3"), - ("Laura Mandeville", "E5"), - ("Laura Mandeville", "E6"), - ("Laura Mandeville", "E7"), - ("Laura Mandeville", "E8"), - ("Theresa Anderson", "E2"), - ("Theresa Anderson", "E3"), - ("Theresa Anderson", "E4"), - ("Theresa Anderson", "E5"), - ("Theresa Anderson", "E6"), - ("Theresa Anderson", "E7"), - ("Theresa Anderson", "E8"), - ("Theresa Anderson", "E9"), - ("Brenda Rogers", "E1"), - ("Brenda Rogers", "E3"), - ("Brenda Rogers", "E4"), - ("Brenda Rogers", "E5"), - ("Brenda Rogers", "E6"), - ("Brenda Rogers", "E7"), - ("Brenda Rogers", "E8"), - ("Charlotte McDowd", "E3"), - ("Charlotte McDowd", "E4"), - ("Charlotte McDowd", "E5"), - ("Charlotte McDowd", "E7"), - ("Frances Anderson", "E3"), - ("Frances Anderson", "E5"), - ("Frances Anderson", "E6"), - ("Frances Anderson", "E8"), - ("Eleanor Nye", "E5"), - ("Eleanor Nye", "E6"), - ("Eleanor Nye", "E7"), - ("Eleanor Nye", "E8"), - ("Pearl Oglethorpe", "E6"), - ("Pearl Oglethorpe", "E8"), - ("Pearl Oglethorpe", "E9"), - ("Ruth DeSand", "E5"), - ("Ruth DeSand", "E7"), - ("Ruth DeSand", "E8"), - ("Ruth DeSand", "E9"), - ("Verne Sanderson", "E7"), - ("Verne Sanderson", "E8"), - ("Verne Sanderson", "E9"), - ("Verne Sanderson", "E12"), - ("Myra Liddel", "E8"), - ("Myra Liddel", "E9"), - ("Myra Liddel", "E10"), - ("Myra Liddel", "E12"), - ("Katherina Rogers", "E8"), - ("Katherina Rogers", "E9"), - ("Katherina Rogers", "E10"), - ("Katherina Rogers", "E12"), - ("Katherina Rogers", "E13"), - ("Katherina Rogers", "E14"), - ("Sylvia Avondale", "E7"), - ("Sylvia Avondale", "E8"), - ("Sylvia Avondale", "E9"), - ("Sylvia Avondale", "E10"), - ("Sylvia Avondale", "E12"), - ("Sylvia Avondale", "E13"), - ("Sylvia Avondale", "E14"), - ("Nora Fayette", "E6"), - ("Nora Fayette", "E7"), - ("Nora Fayette", "E9"), - ("Nora Fayette", "E10"), - ("Nora Fayette", "E11"), - ("Nora Fayette", "E12"), - ("Nora Fayette", "E13"), - ("Nora Fayette", "E14"), - ("Helen Lloyd", "E7"), - ("Helen Lloyd", "E8"), - ("Helen Lloyd", "E10"), - ("Helen Lloyd", "E11"), - ("Helen Lloyd", "E12"), - ("Dorothy Murchison", "E8"), - ("Dorothy Murchison", "E9"), - ("Olivia Carleton", "E9"), - ("Olivia Carleton", "E11"), - ("Flora Price", "E9"), - ("Flora Price", "E11")]) - G.graph['top'] = women - G.graph['bottom'] = events + G.add_edges_from( + [ + ("Evelyn Jefferson", "E1"), + ("Evelyn Jefferson", "E2"), + ("Evelyn Jefferson", "E3"), + ("Evelyn Jefferson", "E4"), + ("Evelyn Jefferson", "E5"), + ("Evelyn Jefferson", "E6"), + ("Evelyn Jefferson", "E8"), + ("Evelyn Jefferson", "E9"), + ("Laura Mandeville", "E1"), + ("Laura Mandeville", "E2"), + ("Laura Mandeville", "E3"), + ("Laura Mandeville", "E5"), + ("Laura Mandeville", "E6"), + ("Laura Mandeville", "E7"), + ("Laura Mandeville", "E8"), + ("Theresa Anderson", "E2"), + ("Theresa Anderson", "E3"), + ("Theresa Anderson", "E4"), + ("Theresa Anderson", "E5"), + ("Theresa Anderson", "E6"), + ("Theresa Anderson", "E7"), + ("Theresa Anderson", "E8"), + ("Theresa Anderson", "E9"), + ("Brenda Rogers", "E1"), + ("Brenda Rogers", "E3"), + ("Brenda Rogers", "E4"), + ("Brenda Rogers", "E5"), + ("Brenda Rogers", "E6"), + ("Brenda Rogers", "E7"), + ("Brenda Rogers", "E8"), + ("Charlotte McDowd", "E3"), + ("Charlotte McDowd", "E4"), + ("Charlotte McDowd", "E5"), + ("Charlotte McDowd", "E7"), + ("Frances Anderson", "E3"), + ("Frances Anderson", "E5"), + ("Frances Anderson", "E6"), + ("Frances Anderson", "E8"), + ("Eleanor Nye", "E5"), + ("Eleanor Nye", "E6"), + ("Eleanor Nye", "E7"), + ("Eleanor Nye", "E8"), + ("Pearl Oglethorpe", "E6"), + ("Pearl Oglethorpe", "E8"), + ("Pearl Oglethorpe", "E9"), + ("Ruth DeSand", "E5"), + ("Ruth DeSand", "E7"), + ("Ruth DeSand", "E8"), + ("Ruth DeSand", "E9"), + ("Verne Sanderson", "E7"), + ("Verne Sanderson", "E8"), + ("Verne Sanderson", "E9"), + ("Verne Sanderson", "E12"), + ("Myra Liddel", "E8"), + ("Myra Liddel", "E9"), + ("Myra Liddel", "E10"), + ("Myra Liddel", "E12"), + ("Katherina Rogers", "E8"), + ("Katherina Rogers", "E9"), + ("Katherina Rogers", "E10"), + ("Katherina Rogers", "E12"), + ("Katherina Rogers", "E13"), + ("Katherina Rogers", "E14"), + ("Sylvia Avondale", "E7"), + ("Sylvia Avondale", "E8"), + ("Sylvia Avondale", "E9"), + ("Sylvia Avondale", "E10"), + ("Sylvia Avondale", "E12"), + ("Sylvia Avondale", "E13"), + ("Sylvia Avondale", "E14"), + ("Nora Fayette", "E6"), + ("Nora Fayette", "E7"), + ("Nora Fayette", "E9"), + ("Nora Fayette", "E10"), + ("Nora Fayette", "E11"), + ("Nora Fayette", "E12"), + ("Nora Fayette", "E13"), + ("Nora Fayette", "E14"), + ("Helen Lloyd", "E7"), + ("Helen Lloyd", "E8"), + ("Helen Lloyd", "E10"), + ("Helen Lloyd", "E11"), + ("Helen Lloyd", "E12"), + ("Dorothy Murchison", "E8"), + ("Dorothy Murchison", "E9"), + ("Olivia Carleton", "E9"), + ("Olivia Carleton", "E11"), + ("Flora Price", "E9"), + ("Flora Price", "E11"), + ] + ) + G.graph["top"] = women + G.graph["bottom"] = events return G @@ -243,26 +255,26 @@ def florentine_families_graph(): Social Networks, Volume 8, Issue 3, September 1986, Pages 215-256 """ G = nx.Graph() - G.add_edge('Acciaiuoli', 'Medici') - G.add_edge('Castellani', 'Peruzzi') - G.add_edge('Castellani', 'Strozzi') - G.add_edge('Castellani', 'Barbadori') - G.add_edge('Medici', 'Barbadori') - G.add_edge('Medici', 'Ridolfi') - G.add_edge('Medici', 'Tornabuoni') - G.add_edge('Medici', 'Albizzi') - G.add_edge('Medici', 'Salviati') - G.add_edge('Salviati', 'Pazzi') - G.add_edge('Peruzzi', 'Strozzi') - G.add_edge('Peruzzi', 'Bischeri') - G.add_edge('Strozzi', 'Ridolfi') - G.add_edge('Strozzi', 'Bischeri') - G.add_edge('Ridolfi', 'Tornabuoni') - G.add_edge('Tornabuoni', 'Guadagni') - G.add_edge('Albizzi', 'Ginori') - G.add_edge('Albizzi', 'Guadagni') - G.add_edge('Bischeri', 'Guadagni') - G.add_edge('Guadagni', 'Lamberteschi') + G.add_edge("Acciaiuoli", "Medici") + G.add_edge("Castellani", "Peruzzi") + G.add_edge("Castellani", "Strozzi") + G.add_edge("Castellani", "Barbadori") + G.add_edge("Medici", "Barbadori") + G.add_edge("Medici", "Ridolfi") + G.add_edge("Medici", "Tornabuoni") + G.add_edge("Medici", "Albizzi") + G.add_edge("Medici", "Salviati") + G.add_edge("Salviati", "Pazzi") + G.add_edge("Peruzzi", "Strozzi") + G.add_edge("Peruzzi", "Bischeri") + G.add_edge("Strozzi", "Ridolfi") + G.add_edge("Strozzi", "Bischeri") + G.add_edge("Ridolfi", "Tornabuoni") + G.add_edge("Tornabuoni", "Guadagni") + G.add_edge("Albizzi", "Ginori") + G.add_edge("Albizzi", "Guadagni") + G.add_edge("Bischeri", "Guadagni") + G.add_edge("Guadagni", "Lamberteschi") return G @@ -276,258 +288,258 @@ def les_miserables_graph(): pp. 74-87. New York: AcM Press. """ G = nx.Graph() - G.add_edge('Napoleon', 'Myriel', weight=1) - G.add_edge('MlleBaptistine', 'Myriel', weight=8) - G.add_edge('MmeMagloire', 'Myriel', weight=10) - G.add_edge('MmeMagloire', 'MlleBaptistine', weight=6) - G.add_edge('CountessDeLo', 'Myriel', weight=1) - G.add_edge('Geborand', 'Myriel', weight=1) - G.add_edge('Champtercier', 'Myriel', weight=1) - G.add_edge('Cravatte', 'Myriel', weight=1) - G.add_edge('Count', 'Myriel', weight=2) - G.add_edge('OldMan', 'Myriel', weight=1) - G.add_edge('Valjean', 'Labarre', weight=1) - G.add_edge('Valjean', 'MmeMagloire', weight=3) - G.add_edge('Valjean', 'MlleBaptistine', weight=3) - G.add_edge('Valjean', 'Myriel', weight=5) - G.add_edge('Marguerite', 'Valjean', weight=1) - G.add_edge('MmeDeR', 'Valjean', weight=1) - G.add_edge('Isabeau', 'Valjean', weight=1) - G.add_edge('Gervais', 'Valjean', weight=1) - G.add_edge('Listolier', 'Tholomyes', weight=4) - G.add_edge('Fameuil', 'Tholomyes', weight=4) - G.add_edge('Fameuil', 'Listolier', weight=4) - G.add_edge('Blacheville', 'Tholomyes', weight=4) - G.add_edge('Blacheville', 'Listolier', weight=4) - G.add_edge('Blacheville', 'Fameuil', weight=4) - G.add_edge('Favourite', 'Tholomyes', weight=3) - G.add_edge('Favourite', 'Listolier', weight=3) - G.add_edge('Favourite', 'Fameuil', weight=3) - G.add_edge('Favourite', 'Blacheville', weight=4) - G.add_edge('Dahlia', 'Tholomyes', weight=3) - G.add_edge('Dahlia', 'Listolier', weight=3) - G.add_edge('Dahlia', 'Fameuil', weight=3) - G.add_edge('Dahlia', 'Blacheville', weight=3) - G.add_edge('Dahlia', 'Favourite', weight=5) - G.add_edge('Zephine', 'Tholomyes', weight=3) - G.add_edge('Zephine', 'Listolier', weight=3) - G.add_edge('Zephine', 'Fameuil', weight=3) - G.add_edge('Zephine', 'Blacheville', weight=3) - G.add_edge('Zephine', 'Favourite', weight=4) - G.add_edge('Zephine', 'Dahlia', weight=4) - G.add_edge('Fantine', 'Tholomyes', weight=3) - G.add_edge('Fantine', 'Listolier', weight=3) - G.add_edge('Fantine', 'Fameuil', weight=3) - G.add_edge('Fantine', 'Blacheville', weight=3) - G.add_edge('Fantine', 'Favourite', weight=4) - G.add_edge('Fantine', 'Dahlia', weight=4) - G.add_edge('Fantine', 'Zephine', weight=4) - G.add_edge('Fantine', 'Marguerite', weight=2) - G.add_edge('Fantine', 'Valjean', weight=9) - G.add_edge('MmeThenardier', 'Fantine', weight=2) - G.add_edge('MmeThenardier', 'Valjean', weight=7) - G.add_edge('Thenardier', 'MmeThenardier', weight=13) - G.add_edge('Thenardier', 'Fantine', weight=1) - G.add_edge('Thenardier', 'Valjean', weight=12) - G.add_edge('Cosette', 'MmeThenardier', weight=4) - G.add_edge('Cosette', 'Valjean', weight=31) - G.add_edge('Cosette', 'Tholomyes', weight=1) - G.add_edge('Cosette', 'Thenardier', weight=1) - G.add_edge('Javert', 'Valjean', weight=17) - G.add_edge('Javert', 'Fantine', weight=5) - G.add_edge('Javert', 'Thenardier', weight=5) - G.add_edge('Javert', 'MmeThenardier', weight=1) - G.add_edge('Javert', 'Cosette', weight=1) - G.add_edge('Fauchelevent', 'Valjean', weight=8) - G.add_edge('Fauchelevent', 'Javert', weight=1) - G.add_edge('Bamatabois', 'Fantine', weight=1) - G.add_edge('Bamatabois', 'Javert', weight=1) - G.add_edge('Bamatabois', 'Valjean', weight=2) - G.add_edge('Perpetue', 'Fantine', weight=1) - G.add_edge('Simplice', 'Perpetue', weight=2) - G.add_edge('Simplice', 'Valjean', weight=3) - G.add_edge('Simplice', 'Fantine', weight=2) - G.add_edge('Simplice', 'Javert', weight=1) - G.add_edge('Scaufflaire', 'Valjean', weight=1) - G.add_edge('Woman1', 'Valjean', weight=2) - G.add_edge('Woman1', 'Javert', weight=1) - G.add_edge('Judge', 'Valjean', weight=3) - G.add_edge('Judge', 'Bamatabois', weight=2) - G.add_edge('Champmathieu', 'Valjean', weight=3) - G.add_edge('Champmathieu', 'Judge', weight=3) - G.add_edge('Champmathieu', 'Bamatabois', weight=2) - G.add_edge('Brevet', 'Judge', weight=2) - G.add_edge('Brevet', 'Champmathieu', weight=2) - G.add_edge('Brevet', 'Valjean', weight=2) - G.add_edge('Brevet', 'Bamatabois', weight=1) - G.add_edge('Chenildieu', 'Judge', weight=2) - G.add_edge('Chenildieu', 'Champmathieu', weight=2) - G.add_edge('Chenildieu', 'Brevet', weight=2) - G.add_edge('Chenildieu', 'Valjean', weight=2) - G.add_edge('Chenildieu', 'Bamatabois', weight=1) - G.add_edge('Cochepaille', 'Judge', weight=2) - G.add_edge('Cochepaille', 'Champmathieu', weight=2) - G.add_edge('Cochepaille', 'Brevet', weight=2) - G.add_edge('Cochepaille', 'Chenildieu', weight=2) - G.add_edge('Cochepaille', 'Valjean', weight=2) - G.add_edge('Cochepaille', 'Bamatabois', weight=1) - G.add_edge('Pontmercy', 'Thenardier', weight=1) - G.add_edge('Boulatruelle', 'Thenardier', weight=1) - G.add_edge('Eponine', 'MmeThenardier', weight=2) - G.add_edge('Eponine', 'Thenardier', weight=3) - G.add_edge('Anzelma', 'Eponine', weight=2) - G.add_edge('Anzelma', 'Thenardier', weight=2) - G.add_edge('Anzelma', 'MmeThenardier', weight=1) - G.add_edge('Woman2', 'Valjean', weight=3) - G.add_edge('Woman2', 'Cosette', weight=1) - G.add_edge('Woman2', 'Javert', weight=1) - G.add_edge('MotherInnocent', 'Fauchelevent', weight=3) - G.add_edge('MotherInnocent', 'Valjean', weight=1) - G.add_edge('Gribier', 'Fauchelevent', weight=2) - G.add_edge('MmeBurgon', 'Jondrette', weight=1) - G.add_edge('Gavroche', 'MmeBurgon', weight=2) - G.add_edge('Gavroche', 'Thenardier', weight=1) - G.add_edge('Gavroche', 'Javert', weight=1) - G.add_edge('Gavroche', 'Valjean', weight=1) - G.add_edge('Gillenormand', 'Cosette', weight=3) - G.add_edge('Gillenormand', 'Valjean', weight=2) - G.add_edge('Magnon', 'Gillenormand', weight=1) - G.add_edge('Magnon', 'MmeThenardier', weight=1) - G.add_edge('MlleGillenormand', 'Gillenormand', weight=9) - G.add_edge('MlleGillenormand', 'Cosette', weight=2) - G.add_edge('MlleGillenormand', 'Valjean', weight=2) - G.add_edge('MmePontmercy', 'MlleGillenormand', weight=1) - G.add_edge('MmePontmercy', 'Pontmercy', weight=1) - G.add_edge('MlleVaubois', 'MlleGillenormand', weight=1) - G.add_edge('LtGillenormand', 'MlleGillenormand', weight=2) - G.add_edge('LtGillenormand', 'Gillenormand', weight=1) - G.add_edge('LtGillenormand', 'Cosette', weight=1) - G.add_edge('Marius', 'MlleGillenormand', weight=6) - G.add_edge('Marius', 'Gillenormand', weight=12) - G.add_edge('Marius', 'Pontmercy', weight=1) - G.add_edge('Marius', 'LtGillenormand', weight=1) - G.add_edge('Marius', 'Cosette', weight=21) - G.add_edge('Marius', 'Valjean', weight=19) - G.add_edge('Marius', 'Tholomyes', weight=1) - G.add_edge('Marius', 'Thenardier', weight=2) - G.add_edge('Marius', 'Eponine', weight=5) - G.add_edge('Marius', 'Gavroche', weight=4) - G.add_edge('BaronessT', 'Gillenormand', weight=1) - G.add_edge('BaronessT', 'Marius', weight=1) - G.add_edge('Mabeuf', 'Marius', weight=1) - G.add_edge('Mabeuf', 'Eponine', weight=1) - G.add_edge('Mabeuf', 'Gavroche', weight=1) - G.add_edge('Enjolras', 'Marius', weight=7) - G.add_edge('Enjolras', 'Gavroche', weight=7) - G.add_edge('Enjolras', 'Javert', weight=6) - G.add_edge('Enjolras', 'Mabeuf', weight=1) - G.add_edge('Enjolras', 'Valjean', weight=4) - G.add_edge('Combeferre', 'Enjolras', weight=15) - G.add_edge('Combeferre', 'Marius', weight=5) - G.add_edge('Combeferre', 'Gavroche', weight=6) - G.add_edge('Combeferre', 'Mabeuf', weight=2) - G.add_edge('Prouvaire', 'Gavroche', weight=1) - G.add_edge('Prouvaire', 'Enjolras', weight=4) - G.add_edge('Prouvaire', 'Combeferre', weight=2) - G.add_edge('Feuilly', 'Gavroche', weight=2) - G.add_edge('Feuilly', 'Enjolras', weight=6) - G.add_edge('Feuilly', 'Prouvaire', weight=2) - G.add_edge('Feuilly', 'Combeferre', weight=5) - G.add_edge('Feuilly', 'Mabeuf', weight=1) - G.add_edge('Feuilly', 'Marius', weight=1) - G.add_edge('Courfeyrac', 'Marius', weight=9) - G.add_edge('Courfeyrac', 'Enjolras', weight=17) - G.add_edge('Courfeyrac', 'Combeferre', weight=13) - G.add_edge('Courfeyrac', 'Gavroche', weight=7) - G.add_edge('Courfeyrac', 'Mabeuf', weight=2) - G.add_edge('Courfeyrac', 'Eponine', weight=1) - G.add_edge('Courfeyrac', 'Feuilly', weight=6) - G.add_edge('Courfeyrac', 'Prouvaire', weight=3) - G.add_edge('Bahorel', 'Combeferre', weight=5) - G.add_edge('Bahorel', 'Gavroche', weight=5) - G.add_edge('Bahorel', 'Courfeyrac', weight=6) - G.add_edge('Bahorel', 'Mabeuf', weight=2) - G.add_edge('Bahorel', 'Enjolras', weight=4) - G.add_edge('Bahorel', 'Feuilly', weight=3) - G.add_edge('Bahorel', 'Prouvaire', weight=2) - G.add_edge('Bahorel', 'Marius', weight=1) - G.add_edge('Bossuet', 'Marius', weight=5) - G.add_edge('Bossuet', 'Courfeyrac', weight=12) - G.add_edge('Bossuet', 'Gavroche', weight=5) - G.add_edge('Bossuet', 'Bahorel', weight=4) - G.add_edge('Bossuet', 'Enjolras', weight=10) - G.add_edge('Bossuet', 'Feuilly', weight=6) - G.add_edge('Bossuet', 'Prouvaire', weight=2) - G.add_edge('Bossuet', 'Combeferre', weight=9) - G.add_edge('Bossuet', 'Mabeuf', weight=1) - G.add_edge('Bossuet', 'Valjean', weight=1) - G.add_edge('Joly', 'Bahorel', weight=5) - G.add_edge('Joly', 'Bossuet', weight=7) - G.add_edge('Joly', 'Gavroche', weight=3) - G.add_edge('Joly', 'Courfeyrac', weight=5) - G.add_edge('Joly', 'Enjolras', weight=5) - G.add_edge('Joly', 'Feuilly', weight=5) - G.add_edge('Joly', 'Prouvaire', weight=2) - G.add_edge('Joly', 'Combeferre', weight=5) - G.add_edge('Joly', 'Mabeuf', weight=1) - G.add_edge('Joly', 'Marius', weight=2) - G.add_edge('Grantaire', 'Bossuet', weight=3) - G.add_edge('Grantaire', 'Enjolras', weight=3) - G.add_edge('Grantaire', 'Combeferre', weight=1) - G.add_edge('Grantaire', 'Courfeyrac', weight=2) - G.add_edge('Grantaire', 'Joly', weight=2) - G.add_edge('Grantaire', 'Gavroche', weight=1) - G.add_edge('Grantaire', 'Bahorel', weight=1) - G.add_edge('Grantaire', 'Feuilly', weight=1) - G.add_edge('Grantaire', 'Prouvaire', weight=1) - G.add_edge('MotherPlutarch', 'Mabeuf', weight=3) - G.add_edge('Gueulemer', 'Thenardier', weight=5) - G.add_edge('Gueulemer', 'Valjean', weight=1) - G.add_edge('Gueulemer', 'MmeThenardier', weight=1) - G.add_edge('Gueulemer', 'Javert', weight=1) - G.add_edge('Gueulemer', 'Gavroche', weight=1) - G.add_edge('Gueulemer', 'Eponine', weight=1) - G.add_edge('Babet', 'Thenardier', weight=6) - G.add_edge('Babet', 'Gueulemer', weight=6) - G.add_edge('Babet', 'Valjean', weight=1) - G.add_edge('Babet', 'MmeThenardier', weight=1) - G.add_edge('Babet', 'Javert', weight=2) - G.add_edge('Babet', 'Gavroche', weight=1) - G.add_edge('Babet', 'Eponine', weight=1) - G.add_edge('Claquesous', 'Thenardier', weight=4) - G.add_edge('Claquesous', 'Babet', weight=4) - G.add_edge('Claquesous', 'Gueulemer', weight=4) - G.add_edge('Claquesous', 'Valjean', weight=1) - G.add_edge('Claquesous', 'MmeThenardier', weight=1) - G.add_edge('Claquesous', 'Javert', weight=1) - G.add_edge('Claquesous', 'Eponine', weight=1) - G.add_edge('Claquesous', 'Enjolras', weight=1) - G.add_edge('Montparnasse', 'Javert', weight=1) - G.add_edge('Montparnasse', 'Babet', weight=2) - G.add_edge('Montparnasse', 'Gueulemer', weight=2) - G.add_edge('Montparnasse', 'Claquesous', weight=2) - G.add_edge('Montparnasse', 'Valjean', weight=1) - G.add_edge('Montparnasse', 'Gavroche', weight=1) - G.add_edge('Montparnasse', 'Eponine', weight=1) - G.add_edge('Montparnasse', 'Thenardier', weight=1) - G.add_edge('Toussaint', 'Cosette', weight=2) - G.add_edge('Toussaint', 'Javert', weight=1) - G.add_edge('Toussaint', 'Valjean', weight=1) - G.add_edge('Child1', 'Gavroche', weight=2) - G.add_edge('Child2', 'Gavroche', weight=2) - G.add_edge('Child2', 'Child1', weight=3) - G.add_edge('Brujon', 'Babet', weight=3) - G.add_edge('Brujon', 'Gueulemer', weight=3) - G.add_edge('Brujon', 'Thenardier', weight=3) - G.add_edge('Brujon', 'Gavroche', weight=1) - G.add_edge('Brujon', 'Eponine', weight=1) - G.add_edge('Brujon', 'Claquesous', weight=1) - G.add_edge('Brujon', 'Montparnasse', weight=1) - G.add_edge('MmeHucheloup', 'Bossuet', weight=1) - G.add_edge('MmeHucheloup', 'Joly', weight=1) - G.add_edge('MmeHucheloup', 'Grantaire', weight=1) - G.add_edge('MmeHucheloup', 'Bahorel', weight=1) - G.add_edge('MmeHucheloup', 'Courfeyrac', weight=1) - G.add_edge('MmeHucheloup', 'Gavroche', weight=1) - G.add_edge('MmeHucheloup', 'Enjolras', weight=1) + G.add_edge("Napoleon", "Myriel", weight=1) + G.add_edge("MlleBaptistine", "Myriel", weight=8) + G.add_edge("MmeMagloire", "Myriel", weight=10) + G.add_edge("MmeMagloire", "MlleBaptistine", weight=6) + G.add_edge("CountessDeLo", "Myriel", weight=1) + G.add_edge("Geborand", "Myriel", weight=1) + G.add_edge("Champtercier", "Myriel", weight=1) + G.add_edge("Cravatte", "Myriel", weight=1) + G.add_edge("Count", "Myriel", weight=2) + G.add_edge("OldMan", "Myriel", weight=1) + G.add_edge("Valjean", "Labarre", weight=1) + G.add_edge("Valjean", "MmeMagloire", weight=3) + G.add_edge("Valjean", "MlleBaptistine", weight=3) + G.add_edge("Valjean", "Myriel", weight=5) + G.add_edge("Marguerite", "Valjean", weight=1) + G.add_edge("MmeDeR", "Valjean", weight=1) + G.add_edge("Isabeau", "Valjean", weight=1) + G.add_edge("Gervais", "Valjean", weight=1) + G.add_edge("Listolier", "Tholomyes", weight=4) + G.add_edge("Fameuil", "Tholomyes", weight=4) + G.add_edge("Fameuil", "Listolier", weight=4) + G.add_edge("Blacheville", "Tholomyes", weight=4) + G.add_edge("Blacheville", "Listolier", weight=4) + G.add_edge("Blacheville", "Fameuil", weight=4) + G.add_edge("Favourite", "Tholomyes", weight=3) + G.add_edge("Favourite", "Listolier", weight=3) + G.add_edge("Favourite", "Fameuil", weight=3) + G.add_edge("Favourite", "Blacheville", weight=4) + G.add_edge("Dahlia", "Tholomyes", weight=3) + G.add_edge("Dahlia", "Listolier", weight=3) + G.add_edge("Dahlia", "Fameuil", weight=3) + G.add_edge("Dahlia", "Blacheville", weight=3) + G.add_edge("Dahlia", "Favourite", weight=5) + G.add_edge("Zephine", "Tholomyes", weight=3) + G.add_edge("Zephine", "Listolier", weight=3) + G.add_edge("Zephine", "Fameuil", weight=3) + G.add_edge("Zephine", "Blacheville", weight=3) + G.add_edge("Zephine", "Favourite", weight=4) + G.add_edge("Zephine", "Dahlia", weight=4) + G.add_edge("Fantine", "Tholomyes", weight=3) + G.add_edge("Fantine", "Listolier", weight=3) + G.add_edge("Fantine", "Fameuil", weight=3) + G.add_edge("Fantine", "Blacheville", weight=3) + G.add_edge("Fantine", "Favourite", weight=4) + G.add_edge("Fantine", "Dahlia", weight=4) + G.add_edge("Fantine", "Zephine", weight=4) + G.add_edge("Fantine", "Marguerite", weight=2) + G.add_edge("Fantine", "Valjean", weight=9) + G.add_edge("MmeThenardier", "Fantine", weight=2) + G.add_edge("MmeThenardier", "Valjean", weight=7) + G.add_edge("Thenardier", "MmeThenardier", weight=13) + G.add_edge("Thenardier", "Fantine", weight=1) + G.add_edge("Thenardier", "Valjean", weight=12) + G.add_edge("Cosette", "MmeThenardier", weight=4) + G.add_edge("Cosette", "Valjean", weight=31) + G.add_edge("Cosette", "Tholomyes", weight=1) + G.add_edge("Cosette", "Thenardier", weight=1) + G.add_edge("Javert", "Valjean", weight=17) + G.add_edge("Javert", "Fantine", weight=5) + G.add_edge("Javert", "Thenardier", weight=5) + G.add_edge("Javert", "MmeThenardier", weight=1) + G.add_edge("Javert", "Cosette", weight=1) + G.add_edge("Fauchelevent", "Valjean", weight=8) + G.add_edge("Fauchelevent", "Javert", weight=1) + G.add_edge("Bamatabois", "Fantine", weight=1) + G.add_edge("Bamatabois", "Javert", weight=1) + G.add_edge("Bamatabois", "Valjean", weight=2) + G.add_edge("Perpetue", "Fantine", weight=1) + G.add_edge("Simplice", "Perpetue", weight=2) + G.add_edge("Simplice", "Valjean", weight=3) + G.add_edge("Simplice", "Fantine", weight=2) + G.add_edge("Simplice", "Javert", weight=1) + G.add_edge("Scaufflaire", "Valjean", weight=1) + G.add_edge("Woman1", "Valjean", weight=2) + G.add_edge("Woman1", "Javert", weight=1) + G.add_edge("Judge", "Valjean", weight=3) + G.add_edge("Judge", "Bamatabois", weight=2) + G.add_edge("Champmathieu", "Valjean", weight=3) + G.add_edge("Champmathieu", "Judge", weight=3) + G.add_edge("Champmathieu", "Bamatabois", weight=2) + G.add_edge("Brevet", "Judge", weight=2) + G.add_edge("Brevet", "Champmathieu", weight=2) + G.add_edge("Brevet", "Valjean", weight=2) + G.add_edge("Brevet", "Bamatabois", weight=1) + G.add_edge("Chenildieu", "Judge", weight=2) + G.add_edge("Chenildieu", "Champmathieu", weight=2) + G.add_edge("Chenildieu", "Brevet", weight=2) + G.add_edge("Chenildieu", "Valjean", weight=2) + G.add_edge("Chenildieu", "Bamatabois", weight=1) + G.add_edge("Cochepaille", "Judge", weight=2) + G.add_edge("Cochepaille", "Champmathieu", weight=2) + G.add_edge("Cochepaille", "Brevet", weight=2) + G.add_edge("Cochepaille", "Chenildieu", weight=2) + G.add_edge("Cochepaille", "Valjean", weight=2) + G.add_edge("Cochepaille", "Bamatabois", weight=1) + G.add_edge("Pontmercy", "Thenardier", weight=1) + G.add_edge("Boulatruelle", "Thenardier", weight=1) + G.add_edge("Eponine", "MmeThenardier", weight=2) + G.add_edge("Eponine", "Thenardier", weight=3) + G.add_edge("Anzelma", "Eponine", weight=2) + G.add_edge("Anzelma", "Thenardier", weight=2) + G.add_edge("Anzelma", "MmeThenardier", weight=1) + G.add_edge("Woman2", "Valjean", weight=3) + G.add_edge("Woman2", "Cosette", weight=1) + G.add_edge("Woman2", "Javert", weight=1) + G.add_edge("MotherInnocent", "Fauchelevent", weight=3) + G.add_edge("MotherInnocent", "Valjean", weight=1) + G.add_edge("Gribier", "Fauchelevent", weight=2) + G.add_edge("MmeBurgon", "Jondrette", weight=1) + G.add_edge("Gavroche", "MmeBurgon", weight=2) + G.add_edge("Gavroche", "Thenardier", weight=1) + G.add_edge("Gavroche", "Javert", weight=1) + G.add_edge("Gavroche", "Valjean", weight=1) + G.add_edge("Gillenormand", "Cosette", weight=3) + G.add_edge("Gillenormand", "Valjean", weight=2) + G.add_edge("Magnon", "Gillenormand", weight=1) + G.add_edge("Magnon", "MmeThenardier", weight=1) + G.add_edge("MlleGillenormand", "Gillenormand", weight=9) + G.add_edge("MlleGillenormand", "Cosette", weight=2) + G.add_edge("MlleGillenormand", "Valjean", weight=2) + G.add_edge("MmePontmercy", "MlleGillenormand", weight=1) + G.add_edge("MmePontmercy", "Pontmercy", weight=1) + G.add_edge("MlleVaubois", "MlleGillenormand", weight=1) + G.add_edge("LtGillenormand", "MlleGillenormand", weight=2) + G.add_edge("LtGillenormand", "Gillenormand", weight=1) + G.add_edge("LtGillenormand", "Cosette", weight=1) + G.add_edge("Marius", "MlleGillenormand", weight=6) + G.add_edge("Marius", "Gillenormand", weight=12) + G.add_edge("Marius", "Pontmercy", weight=1) + G.add_edge("Marius", "LtGillenormand", weight=1) + G.add_edge("Marius", "Cosette", weight=21) + G.add_edge("Marius", "Valjean", weight=19) + G.add_edge("Marius", "Tholomyes", weight=1) + G.add_edge("Marius", "Thenardier", weight=2) + G.add_edge("Marius", "Eponine", weight=5) + G.add_edge("Marius", "Gavroche", weight=4) + G.add_edge("BaronessT", "Gillenormand", weight=1) + G.add_edge("BaronessT", "Marius", weight=1) + G.add_edge("Mabeuf", "Marius", weight=1) + G.add_edge("Mabeuf", "Eponine", weight=1) + G.add_edge("Mabeuf", "Gavroche", weight=1) + G.add_edge("Enjolras", "Marius", weight=7) + G.add_edge("Enjolras", "Gavroche", weight=7) + G.add_edge("Enjolras", "Javert", weight=6) + G.add_edge("Enjolras", "Mabeuf", weight=1) + G.add_edge("Enjolras", "Valjean", weight=4) + G.add_edge("Combeferre", "Enjolras", weight=15) + G.add_edge("Combeferre", "Marius", weight=5) + G.add_edge("Combeferre", "Gavroche", weight=6) + G.add_edge("Combeferre", "Mabeuf", weight=2) + G.add_edge("Prouvaire", "Gavroche", weight=1) + G.add_edge("Prouvaire", "Enjolras", weight=4) + G.add_edge("Prouvaire", "Combeferre", weight=2) + G.add_edge("Feuilly", "Gavroche", weight=2) + G.add_edge("Feuilly", "Enjolras", weight=6) + G.add_edge("Feuilly", "Prouvaire", weight=2) + G.add_edge("Feuilly", "Combeferre", weight=5) + G.add_edge("Feuilly", "Mabeuf", weight=1) + G.add_edge("Feuilly", "Marius", weight=1) + G.add_edge("Courfeyrac", "Marius", weight=9) + G.add_edge("Courfeyrac", "Enjolras", weight=17) + G.add_edge("Courfeyrac", "Combeferre", weight=13) + G.add_edge("Courfeyrac", "Gavroche", weight=7) + G.add_edge("Courfeyrac", "Mabeuf", weight=2) + G.add_edge("Courfeyrac", "Eponine", weight=1) + G.add_edge("Courfeyrac", "Feuilly", weight=6) + G.add_edge("Courfeyrac", "Prouvaire", weight=3) + G.add_edge("Bahorel", "Combeferre", weight=5) + G.add_edge("Bahorel", "Gavroche", weight=5) + G.add_edge("Bahorel", "Courfeyrac", weight=6) + G.add_edge("Bahorel", "Mabeuf", weight=2) + G.add_edge("Bahorel", "Enjolras", weight=4) + G.add_edge("Bahorel", "Feuilly", weight=3) + G.add_edge("Bahorel", "Prouvaire", weight=2) + G.add_edge("Bahorel", "Marius", weight=1) + G.add_edge("Bossuet", "Marius", weight=5) + G.add_edge("Bossuet", "Courfeyrac", weight=12) + G.add_edge("Bossuet", "Gavroche", weight=5) + G.add_edge("Bossuet", "Bahorel", weight=4) + G.add_edge("Bossuet", "Enjolras", weight=10) + G.add_edge("Bossuet", "Feuilly", weight=6) + G.add_edge("Bossuet", "Prouvaire", weight=2) + G.add_edge("Bossuet", "Combeferre", weight=9) + G.add_edge("Bossuet", "Mabeuf", weight=1) + G.add_edge("Bossuet", "Valjean", weight=1) + G.add_edge("Joly", "Bahorel", weight=5) + G.add_edge("Joly", "Bossuet", weight=7) + G.add_edge("Joly", "Gavroche", weight=3) + G.add_edge("Joly", "Courfeyrac", weight=5) + G.add_edge("Joly", "Enjolras", weight=5) + G.add_edge("Joly", "Feuilly", weight=5) + G.add_edge("Joly", "Prouvaire", weight=2) + G.add_edge("Joly", "Combeferre", weight=5) + G.add_edge("Joly", "Mabeuf", weight=1) + G.add_edge("Joly", "Marius", weight=2) + G.add_edge("Grantaire", "Bossuet", weight=3) + G.add_edge("Grantaire", "Enjolras", weight=3) + G.add_edge("Grantaire", "Combeferre", weight=1) + G.add_edge("Grantaire", "Courfeyrac", weight=2) + G.add_edge("Grantaire", "Joly", weight=2) + G.add_edge("Grantaire", "Gavroche", weight=1) + G.add_edge("Grantaire", "Bahorel", weight=1) + G.add_edge("Grantaire", "Feuilly", weight=1) + G.add_edge("Grantaire", "Prouvaire", weight=1) + G.add_edge("MotherPlutarch", "Mabeuf", weight=3) + G.add_edge("Gueulemer", "Thenardier", weight=5) + G.add_edge("Gueulemer", "Valjean", weight=1) + G.add_edge("Gueulemer", "MmeThenardier", weight=1) + G.add_edge("Gueulemer", "Javert", weight=1) + G.add_edge("Gueulemer", "Gavroche", weight=1) + G.add_edge("Gueulemer", "Eponine", weight=1) + G.add_edge("Babet", "Thenardier", weight=6) + G.add_edge("Babet", "Gueulemer", weight=6) + G.add_edge("Babet", "Valjean", weight=1) + G.add_edge("Babet", "MmeThenardier", weight=1) + G.add_edge("Babet", "Javert", weight=2) + G.add_edge("Babet", "Gavroche", weight=1) + G.add_edge("Babet", "Eponine", weight=1) + G.add_edge("Claquesous", "Thenardier", weight=4) + G.add_edge("Claquesous", "Babet", weight=4) + G.add_edge("Claquesous", "Gueulemer", weight=4) + G.add_edge("Claquesous", "Valjean", weight=1) + G.add_edge("Claquesous", "MmeThenardier", weight=1) + G.add_edge("Claquesous", "Javert", weight=1) + G.add_edge("Claquesous", "Eponine", weight=1) + G.add_edge("Claquesous", "Enjolras", weight=1) + G.add_edge("Montparnasse", "Javert", weight=1) + G.add_edge("Montparnasse", "Babet", weight=2) + G.add_edge("Montparnasse", "Gueulemer", weight=2) + G.add_edge("Montparnasse", "Claquesous", weight=2) + G.add_edge("Montparnasse", "Valjean", weight=1) + G.add_edge("Montparnasse", "Gavroche", weight=1) + G.add_edge("Montparnasse", "Eponine", weight=1) + G.add_edge("Montparnasse", "Thenardier", weight=1) + G.add_edge("Toussaint", "Cosette", weight=2) + G.add_edge("Toussaint", "Javert", weight=1) + G.add_edge("Toussaint", "Valjean", weight=1) + G.add_edge("Child1", "Gavroche", weight=2) + G.add_edge("Child2", "Gavroche", weight=2) + G.add_edge("Child2", "Child1", weight=3) + G.add_edge("Brujon", "Babet", weight=3) + G.add_edge("Brujon", "Gueulemer", weight=3) + G.add_edge("Brujon", "Thenardier", weight=3) + G.add_edge("Brujon", "Gavroche", weight=1) + G.add_edge("Brujon", "Eponine", weight=1) + G.add_edge("Brujon", "Claquesous", weight=1) + G.add_edge("Brujon", "Montparnasse", weight=1) + G.add_edge("MmeHucheloup", "Bossuet", weight=1) + G.add_edge("MmeHucheloup", "Joly", weight=1) + G.add_edge("MmeHucheloup", "Grantaire", weight=1) + G.add_edge("MmeHucheloup", "Bahorel", weight=1) + G.add_edge("MmeHucheloup", "Courfeyrac", weight=1) + G.add_edge("MmeHucheloup", "Gavroche", weight=1) + G.add_edge("MmeHucheloup", "Enjolras", weight=1) return G diff --git a/networkx/generators/spectral_graph_forge.py b/networkx/generators/spectral_graph_forge.py index 1dcf9999..77bdb166 100644 --- a/networkx/generators/spectral_graph_forge.py +++ b/networkx/generators/spectral_graph_forge.py @@ -4,7 +4,7 @@ import networkx as nx from networkx.utils import np_random_state -__all__ = ['spectral_graph_forge'] +__all__ = ["spectral_graph_forge"] def _truncate(x): @@ -76,12 +76,12 @@ def _mat_spect_approx(A, level, sorteigs=True, reverse=False, absolute=True): for i in range(level, n): V[:, k[i]] = z - B = V*np.diag(d)*np.transpose(V) + B = V * np.diag(d) * np.transpose(V) return B @np_random_state(3) -def spectral_graph_forge(G, alpha, transformation='identity', seed=None): +def spectral_graph_forge(G, alpha, transformation="identity", seed=None): """Returns a random simple graph with spectrum resembling that of `G` This algorithm, called Spectral Graph Forge (SGF), computes the @@ -157,34 +157,34 @@ def spectral_graph_forge(G, alpha, transformation='identity', seed=None): import numpy as np import scipy.stats as stats - available_transformations = ['identity', 'modularity'] + available_transformations = ["identity", "modularity"] alpha = _truncate(alpha) A = nx.to_numpy_matrix(G) n = A.shape[1] - level = int(round(n*alpha)) + level = int(round(n * alpha)) if transformation not in available_transformations: - msg = f'\'{transformation}\' is not a valid transformation. ' - msg += f'Transformations: {available_transformations}' + msg = f"'{transformation}' is not a valid transformation. " + msg += f"Transformations: {available_transformations}" raise nx.NetworkXError(msg) K = np.ones((1, n)) * A B = A - if (transformation == 'modularity'): + if transformation == "modularity": B -= np.transpose(K) * K / float(sum(np.ravel(K))) B = _mat_spect_approx(B, level, sorteigs=True, absolute=True) - if (transformation == 'modularity'): + if transformation == "modularity": B += np.transpose(K) * K / float(sum(np.ravel(K))) B = np.vectorize(_truncate, otypes=[np.float])(B) np.fill_diagonal(B, np.zeros((1, n))) - for i in range(n-1): - B[i, i+1:] = stats.bernoulli.rvs(B[i, i+1:], random_state=seed) - B[i+1:, i] = np.transpose(B[i, i+1:]) + for i in range(n - 1): + B[i, i + 1 :] = stats.bernoulli.rvs(B[i, i + 1 :], random_state=seed) + B[i + 1 :, i] = np.transpose(B[i, i + 1 :]) H = nx.from_numpy_matrix(B) diff --git a/networkx/generators/stochastic.py b/networkx/generators/stochastic.py index 4eb31b99..36276581 100644 --- a/networkx/generators/stochastic.py +++ b/networkx/generators/stochastic.py @@ -7,11 +7,11 @@ from networkx.classes import DiGraph from networkx.classes import MultiDiGraph from networkx.utils import not_implemented_for -__all__ = ['stochastic_graph'] +__all__ = ["stochastic_graph"] -@not_implemented_for('undirected') -def stochastic_graph(G, copy=True, weight='weight'): +@not_implemented_for("undirected") +def stochastic_graph(G, copy=True, weight="weight"): """Returns a right-stochastic representation of directed graph `G`. A right-stochastic graph is a weighted digraph in which for each diff --git a/networkx/generators/sudoku.py b/networkx/generators/sudoku.py index c12e73ee..61f49aff 100644 --- a/networkx/generators/sudoku.py +++ b/networkx/generators/sudoku.py @@ -42,7 +42,7 @@ References import networkx as nx from networkx.exception import NetworkXError -__all__ = ['sudoku_graph'] +__all__ = ["sudoku_graph"] def sudoku_graph(n=3): diff --git a/networkx/generators/tests/test_atlas.py b/networkx/generators/tests/test_atlas.py index 91bb20ef..e9ce00b2 100644 --- a/networkx/generators/tests/test_atlas.py +++ b/networkx/generators/tests/test_atlas.py @@ -66,7 +66,7 @@ class TestAtlasGraphG: # There are three exceptions to this rule in the order given in # the "Atlas of Graphs" book, so we need to manually exclude # those. - exceptions = [('G55', 'G56'), ('G1007', 'G1008'), ('G1012', 'G1013')] + exceptions = [("G55", "G56"), ("G1007", "G1008"), ("G1012", "G1013")] for n, group in groupby(self.GAG, key=nx.number_of_nodes): for m, group in groupby(group, key=nx.number_of_edges): for G1, G2 in pairwise(group): diff --git a/networkx/generators/tests/test_classic.py b/networkx/generators/tests/test_classic.py index c34de283..c98eb8f5 100644 --- a/networkx/generators/tests/test_classic.py +++ b/networkx/generators/tests/test_classic.py @@ -16,20 +16,20 @@ from networkx.testing import assert_nodes_equal is_isomorphic = graph_could_be_isomorphic -class TestGeneratorClassic(): +class TestGeneratorClassic: def test_balanced_tree(self): # balanced_tree(r,h) is a tree with (r**(h+1)-1)/(r-1) edges for r, h in [(2, 2), (3, 3), (6, 2)]: t = nx.balanced_tree(r, h) order = t.order() - assert order == (r**(h + 1) - 1) / (r - 1) + assert order == (r ** (h + 1) - 1) / (r - 1) assert nx.is_connected(t) assert t.size() == order - 1 dh = nx.degree_histogram(t) assert dh[0] == 0 # no nodes of 0 - assert dh[1] == r**h # nodes of degree 1 are leaves + assert dh[1] == r ** h # nodes of degree 1 are leaves assert dh[r] == 1 # root is degree r - assert dh[r + 1] == order - r**h - 1 # everyone else is degree r+1 + assert dh[r + 1] == order - r ** h - 1 # everyone else is degree r+1 assert len(dh) == r + 2 def test_balanced_tree_star(self): @@ -130,8 +130,9 @@ class TestGeneratorClassic(): b = nx.barbell_graph(m1, m2) assert is_isomorphic(b, nx.path_graph(m2 + 4)) - pytest.raises(nx.NetworkXError, nx.barbell_graph, m1, m2, - create_using=nx.DiGraph()) + pytest.raises( + nx.NetworkXError, nx.barbell_graph, m1, m2, create_using=nx.DiGraph() + ) mb = nx.barbell_graph(m1, m2, create_using=nx.MultiGraph()) assert_edges_equal(mb.edges(), b.edges()) @@ -139,8 +140,8 @@ class TestGeneratorClassic(): def test_binomial_tree(self): for n in range(0, 4): b = nx.binomial_tree(n) - assert nx.number_of_nodes(b) == 2**n - assert nx.number_of_edges(b) == (2**n - 1) + assert nx.number_of_nodes(b) == 2 ** n + assert nx.number_of_edges(b) == (2 ** n - 1) def test_complete_graph(self): # complete_graph(m) is a connected graph with @@ -154,7 +155,7 @@ class TestGeneratorClassic(): assert_edges_equal(mg.edges(), g.edges()) g = nx.complete_graph("abc") - assert_nodes_equal(g.nodes(), ['a', 'b', 'c']) + assert_nodes_equal(g.nodes(), ["a", "b", "c"]) assert g.size() == 3 def test_complete_digraph(self): @@ -172,8 +173,9 @@ class TestGeneratorClassic(): def test_circular_ladder_graph(self): G = nx.circular_ladder_graph(5) - pytest.raises(nx.NetworkXError, nx.circular_ladder_graph, - 5, create_using=nx.DiGraph) + pytest.raises( + nx.NetworkXError, nx.circular_ladder_graph, 5, create_using=nx.DiGraph + ) mG = nx.circular_ladder_graph(5, create_using=nx.MultiGraph) assert_edges_equal(mG.edges(), G.edges()) @@ -226,12 +228,18 @@ class TestGeneratorClassic(): assert G.degree(1) == 1024 assert G.degree(2) == 1024 - pytest.raises(nx.NetworkXError, - nx.dorogovtsev_goltsev_mendes_graph, 7, - create_using=nx.DiGraph) - pytest.raises(nx.NetworkXError, - nx.dorogovtsev_goltsev_mendes_graph, 7, - create_using=nx.MultiGraph) + pytest.raises( + nx.NetworkXError, + nx.dorogovtsev_goltsev_mendes_graph, + 7, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + nx.dorogovtsev_goltsev_mendes_graph, + 7, + create_using=nx.MultiGraph, + ) def test_create_using(self): G = nx.empty_graph() @@ -294,12 +302,15 @@ class TestGeneratorClassic(): assert isinstance(G, nx.Graph) def test_ladder_graph(self): - for i, G in [(0, nx.empty_graph(0)), (1, nx.path_graph(2)), - (2, nx.hypercube_graph(2)), (10, nx.grid_graph([2, 10]))]: + for i, G in [ + (0, nx.empty_graph(0)), + (1, nx.path_graph(2)), + (2, nx.hypercube_graph(2)), + (10, nx.grid_graph([2, 10])), + ]: assert is_isomorphic(nx.ladder_graph(i), G) - pytest.raises(nx.NetworkXError, - nx.ladder_graph, 2, create_using=nx.DiGraph) + pytest.raises(nx.NetworkXError, nx.ladder_graph, 2, create_using=nx.DiGraph) g = nx.ladder_graph(2) mg = nx.ladder_graph(2, create_using=nx.MultiGraph) @@ -314,20 +325,19 @@ class TestGeneratorClassic(): assert nx.number_of_edges(b) == m1 * (m1 - 1) / 2 + m2 # Raise NetworkXError if m<2 - pytest.raises(nx.NetworkXError, - nx.lollipop_graph, 1, 20) + pytest.raises(nx.NetworkXError, nx.lollipop_graph, 1, 20) # Raise NetworkXError if n<0 - pytest.raises(nx.NetworkXError, - nx.lollipop_graph, 5, -2) + pytest.raises(nx.NetworkXError, nx.lollipop_graph, 5, -2) # lollipop_graph(2,m) = path_graph(m+2) for m1, m2 in [(2, 5), (2, 10), (2, 20)]: b = nx.lollipop_graph(m1, m2) assert is_isomorphic(b, nx.path_graph(m2 + 2)) - pytest.raises(nx.NetworkXError, - nx.lollipop_graph, m1, m2, create_using=nx.DiGraph) + pytest.raises( + nx.NetworkXError, nx.lollipop_graph, m1, m2, create_using=nx.DiGraph + ) mb = nx.lollipop_graph(m1, m2, create_using=nx.MultiGraph) assert_edges_equal(mb.edges(), b.edges()) @@ -348,8 +358,7 @@ class TestGeneratorClassic(): p = nx.path_graph(10) assert nx.is_connected(p) - assert (sorted(d for n, d in p.degree()) == - [1, 1, 2, 2, 2, 2, 2, 2, 2, 2]) + assert sorted(d for n, d in p.degree()) == [1, 1, 2, 2, 2, 2, 2, 2, 2, 2] assert p.order() - 1 == p.size() dp = nx.path_graph(3, create_using=nx.DiGraph) @@ -375,11 +384,9 @@ class TestGeneratorClassic(): assert is_isomorphic(star_graph(5), nx.complete_bipartite_graph(1, 5)) s = star_graph(10) - assert (sorted(d for n, d in s.degree()) == - [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10]) + assert sorted(d for n, d in s.degree()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10] - pytest.raises(nx.NetworkXError, - star_graph, 10, create_using=nx.DiGraph) + pytest.raises(nx.NetworkXError, star_graph, 10, create_using=nx.DiGraph) ms = star_graph(10, create_using=nx.MultiGraph) assert_edges_equal(ms.edges(), s.edges()) @@ -393,22 +400,25 @@ class TestGeneratorClassic(): def test_turan_graph(self): assert nx.number_of_edges(nx.turan_graph(13, 4)) == 63 - assert is_isomorphic(nx.turan_graph(13, 4), - nx.complete_multipartite_graph(3, 4, 3, 3)) + assert is_isomorphic( + nx.turan_graph(13, 4), nx.complete_multipartite_graph(3, 4, 3, 3) + ) def test_wheel_graph(self): - for n, G in [(0, nx.null_graph()), (1, nx.empty_graph(1)), - (2, nx.path_graph(2)), (3, nx.complete_graph(3)), - (4, nx.complete_graph(4))]: + for n, G in [ + (0, nx.null_graph()), + (1, nx.empty_graph(1)), + (2, nx.path_graph(2)), + (3, nx.complete_graph(3)), + (4, nx.complete_graph(4)), + ]: g = nx.wheel_graph(n) assert is_isomorphic(g, G) g = nx.wheel_graph(10) - assert (sorted(d for n, d in g.degree()) == - [3, 3, 3, 3, 3, 3, 3, 3, 3, 9]) + assert sorted(d for n, d in g.degree()) == [3, 3, 3, 3, 3, 3, 3, 3, 3, 9] - pytest.raises(nx.NetworkXError, - nx.wheel_graph, 10, create_using=nx.DiGraph) + pytest.raises(nx.NetworkXError, nx.wheel_graph, 10, create_using=nx.DiGraph) mg = nx.wheel_graph(10, create_using=nx.MultiGraph()) assert_edges_equal(mg.edges(), g.edges()) diff --git a/networkx/generators/tests/test_community.py b/networkx/generators/tests/test_community.py index 38538d0c..faf476d3 100644 --- a/networkx/generators/tests/test_community.py +++ b/networkx/generators/tests/test_community.py @@ -4,33 +4,32 @@ import pytest def test_random_partition_graph(): G = nx.random_partition_graph([3, 3, 3], 1, 0, seed=42) - C = G.graph['partition'] + C = G.graph["partition"] assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] assert len(G) == 9 assert len(list(G.edges())) == 9 G = nx.random_partition_graph([3, 3, 3], 0, 1) - C = G.graph['partition'] + C = G.graph["partition"] assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] assert len(G) == 9 assert len(list(G.edges())) == 27 G = nx.random_partition_graph([3, 3, 3], 1, 0, directed=True) - C = G.graph['partition'] + C = G.graph["partition"] assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] assert len(G) == 9 assert len(list(G.edges())) == 18 G = nx.random_partition_graph([3, 3, 3], 0, 1, directed=True) - C = G.graph['partition'] + C = G.graph["partition"] assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] assert len(G) == 9 assert len(list(G.edges())) == 54 G = nx.random_partition_graph([1, 2, 3, 4, 5], 0.5, 0.1) - C = G.graph['partition'] - assert C == [{0}, {1, 2}, {3, 4, 5}, - {6, 7, 8, 9}, {10, 11, 12, 13, 14}] + C = G.graph["partition"] + assert C == [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}, {10, 11, 12, 13, 14}] assert len(G) == 15 rpg = nx.random_partition_graph @@ -42,36 +41,36 @@ def test_random_partition_graph(): def test_planted_partition_graph(): G = nx.planted_partition_graph(4, 3, 1, 0, seed=42) - C = G.graph['partition'] + C = G.graph["partition"] assert len(C) == 4 assert len(G) == 12 assert len(list(G.edges())) == 12 G = nx.planted_partition_graph(4, 3, 0, 1) - C = G.graph['partition'] + C = G.graph["partition"] assert len(C) == 4 assert len(G) == 12 assert len(list(G.edges())) == 54 - G = nx.planted_partition_graph(10, 4, .5, .1, seed=42) - C = G.graph['partition'] + G = nx.planted_partition_graph(10, 4, 0.5, 0.1, seed=42) + C = G.graph["partition"] assert len(C) == 10 assert len(G) == 40 G = nx.planted_partition_graph(4, 3, 1, 0, directed=True) - C = G.graph['partition'] + C = G.graph["partition"] assert len(C) == 4 assert len(G) == 12 assert len(list(G.edges())) == 24 G = nx.planted_partition_graph(4, 3, 0, 1, directed=True) - C = G.graph['partition'] + C = G.graph["partition"] assert len(C) == 4 assert len(G) == 12 assert len(list(G.edges())) == 108 - G = nx.planted_partition_graph(10, 4, .5, .1, seed=42, directed=True) - C = G.graph['partition'] + G = nx.planted_partition_graph(10, 4, 0.5, 0.1, seed=42, directed=True) + C = G.graph["partition"] assert len(C) == 10 assert len(G) == 40 @@ -118,19 +117,21 @@ def test_caveman_graph(): def test_gaussian_random_partition_graph(): G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01) assert len(G) == 100 - G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01, - directed=True) + G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01, directed=True) assert len(G) == 100 - G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01, - directed=False, seed=42) + G = nx.gaussian_random_partition_graph( + 100, 10, 10, 0.3, 0.01, directed=False, seed=42 + ) assert len(G) == 100 assert not isinstance(G, nx.DiGraph) - G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01, - directed=True, seed=42) + G = nx.gaussian_random_partition_graph( + 100, 10, 10, 0.3, 0.01, directed=True, seed=42 + ) assert len(G) == 100 assert isinstance(G, nx.DiGraph) - pytest.raises(nx.NetworkXError, - nx.gaussian_random_partition_graph, 100, 101, 10, 1, 0) + pytest.raises( + nx.NetworkXError, nx.gaussian_random_partition_graph, 100, 101, 10, 1, 0 + ) def test_ring_of_cliques(): @@ -163,11 +164,9 @@ def test_windmill_graph(): def test_stochastic_block_model(): sizes = [75, 75, 300] - probs = [[0.25, 0.05, 0.02], - [0.05, 0.35, 0.07], - [0.02, 0.07, 0.40]] + probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] G = nx.stochastic_block_model(sizes, probs, seed=0) - C = G.graph['partition'] + C = G.graph["partition"] assert len(C) == 3 assert len(G) == 450 assert G.size() == 22160 @@ -178,20 +177,11 @@ def test_stochastic_block_model(): # Test Exceptions sbm = nx.stochastic_block_model badnodelist = list(range(400)) # not enough nodes to match sizes - badprobs1 = [[0.25, 0.05, 1.02], - [0.05, 0.35, 0.07], - [0.02, 0.07, 0.40]] - badprobs2 = [[0.25, 0.05, 0.02], - [0.05, -0.35, 0.07], - [0.02, 0.07, 0.40]] - probs_rect1 = [[0.25, 0.05, 0.02], - [0.05, -0.35, 0.07]] - probs_rect2 = [[0.25, 0.05], - [0.05, -0.35], - [0.02, 0.07]] - asymprobs = [[0.25, 0.05, 0.01], - [0.05, -0.35, 0.07], - [0.02, 0.07, 0.40]] + badprobs1 = [[0.25, 0.05, 1.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] + badprobs2 = [[0.25, 0.05, 0.02], [0.05, -0.35, 0.07], [0.02, 0.07, 0.40]] + probs_rect1 = [[0.25, 0.05, 0.02], [0.05, -0.35, 0.07]] + probs_rect2 = [[0.25, 0.05], [0.05, -0.35], [0.02, 0.07]] + asymprobs = [[0.25, 0.05, 0.01], [0.05, -0.35, 0.07], [0.02, 0.07, 0.40]] pytest.raises(nx.NetworkXException, sbm, sizes, badprobs1) pytest.raises(nx.NetworkXException, sbm, sizes, badprobs2) pytest.raises(nx.NetworkXException, sbm, sizes, probs_rect1, directed=True) @@ -215,10 +205,11 @@ def test_generator(): tau1 = 3 tau2 = 1.5 mu = 0.1 - G = nx.LFR_benchmark_graph(n, tau1, tau2, mu, average_degree=5, - min_community=20, seed=10) + G = nx.LFR_benchmark_graph( + n, tau1, tau2, mu, average_degree=5, min_community=20, seed=10 + ) assert len(G) == 250 - C = {frozenset(G.nodes[v]['community']) for v in G} + C = {frozenset(G.nodes[v]["community"]) for v in G} assert nx.community.is_partition(G.nodes(), C) diff --git a/networkx/generators/tests/test_degree_seq.py b/networkx/generators/tests/test_degree_seq.py index b62aee31..70a63b66 100644 --- a/networkx/generators/tests/test_degree_seq.py +++ b/networkx/generators/tests/test_degree_seq.py @@ -30,11 +30,32 @@ class TestConfigurationModel: """ deg_seq = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] G = nx.configuration_model(deg_seq, seed=12345678) - assert (sorted((d for n, d in G.degree()), reverse=True) == - [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]) - assert (sorted((d for n, d in G.degree(range(len(deg_seq)))), - reverse=True) == - [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]) + assert sorted((d for n, d in G.degree()), reverse=True) == [ + 5, + 3, + 3, + 3, + 3, + 2, + 2, + 2, + 1, + 1, + 1, + ] + assert sorted((d for n, d in G.degree(range(len(deg_seq)))), reverse=True) == [ + 5, + 3, + 3, + 3, + 3, + 2, + 2, + 2, + 1, + 1, + 1, + ] def test_random_seed(self): """Tests that each call with the same random seed generates the @@ -139,8 +160,7 @@ def test_havel_hakimi_construction(): G = nx.havel_hakimi_graph(z) - pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z, - create_using=nx.DiGraph()) + pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z, create_using=nx.DiGraph()) def test_directed_havel_hakimi(): @@ -160,8 +180,7 @@ def test_directed_havel_hakimi(): # Test non-graphical sequence dout = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] din = [103, 102, 102, 102, 102, 102, 102, 102, 102, 102] - pytest.raises(nx.exception.NetworkXError, - nx.directed_havel_hakimi_graph, din, dout) + pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) # Test valid sequences dout = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] din = [2, 2, 2, 2, 2, 2, 2, 2, 0, 2] @@ -172,12 +191,10 @@ def test_directed_havel_hakimi(): assert sorted(din) == sorted(din2) # Test unequal sums din = [2, 2, 2, 2, 2, 2, 2, 2, 2, 2] - pytest.raises(nx.exception.NetworkXError, - nx.directed_havel_hakimi_graph, din, dout) + pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) # Test for negative values din = [2, 2, 2, 2, 2, 2, 2, 2, 2, 2, -2] - pytest.raises(nx.exception.NetworkXError, - nx.directed_havel_hakimi_graph, din, dout) + pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) def test_degree_sequence_tree(): @@ -186,8 +203,9 @@ def test_degree_sequence_tree(): assert len(G) == len(z) assert len(list(G.edges())) == sum(z) / 2 - pytest.raises(nx.NetworkXError, nx.degree_sequence_tree, z, - create_using=nx.DiGraph()) + pytest.raises( + nx.NetworkXError, nx.degree_sequence_tree, z, create_using=nx.DiGraph() + ) z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] pytest.raises(nx.NetworkXError, nx.degree_sequence_tree, z) diff --git a/networkx/generators/tests/test_directed.py b/networkx/generators/tests/test_directed.py index da1bfaa7..6d6a40f6 100644 --- a/networkx/generators/tests/test_directed.py +++ b/networkx/generators/tests/test_directed.py @@ -27,14 +27,10 @@ class TestGeneratorsDirected: scale_free_graph(100, seed=42) def test_create_using_keyword_arguments(self): - pytest.raises(nx.NetworkXError, - gn_graph, 100, create_using=Graph()) - pytest.raises(nx.NetworkXError, - gnr_graph, 100, 0.5, create_using=Graph()) - pytest.raises(nx.NetworkXError, - gnc_graph, 100, create_using=Graph()) - pytest.raises(nx.NetworkXError, - scale_free_graph, 100, create_using=Graph()) + pytest.raises(nx.NetworkXError, gn_graph, 100, create_using=Graph()) + pytest.raises(nx.NetworkXError, gnr_graph, 100, 0.5, create_using=Graph()) + pytest.raises(nx.NetworkXError, gnc_graph, 100, create_using=Graph()) + pytest.raises(nx.NetworkXError, scale_free_graph, 100, create_using=Graph()) G = gn_graph(100, seed=1) MG = gn_graph(100, create_using=MultiDiGraph(), seed=1) assert sorted(G.edges()) == sorted(MG.edges()) @@ -45,9 +41,16 @@ class TestGeneratorsDirected: MG = gnc_graph(100, create_using=MultiDiGraph(), seed=1) assert sorted(G.edges()) == sorted(MG.edges()) - G = scale_free_graph(100, alpha=0.3, beta=0.4, gamma=0.3, - delta_in=0.3, delta_out=0.1, - create_using=MultiDiGraph, seed=1) + G = scale_free_graph( + 100, + alpha=0.3, + beta=0.4, + gamma=0.3, + delta_in=0.3, + delta_out=0.1, + create_using=MultiDiGraph, + seed=1, + ) pytest.raises(ValueError, scale_free_graph, 100, 0.5, 0.4, 0.3) pytest.raises(ValueError, scale_free_graph, 100, alpha=-0.3) pytest.raises(ValueError, scale_free_graph, 100, beta=-0.3) @@ -85,6 +88,7 @@ class TestUniformRandomKOutGraph: function. """ + def test_regularity(self): """Tests that the generated graph is `k`-out-regular.""" n = 10 diff --git a/networkx/generators/tests/test_ego.py b/networkx/generators/tests/test_ego.py index d3db0f0e..3b16b0fe 100644 --- a/networkx/generators/tests/test_ego.py +++ b/networkx/generators/tests/test_ego.py @@ -7,7 +7,7 @@ import networkx as nx from networkx.testing.utils import assert_edges_equal, assert_nodes_equal -class TestGeneratorEgo(): +class TestGeneratorEgo: def test_ego(self): G = nx.star_graph(3) H = nx.ego_graph(G, 0) @@ -31,9 +31,9 @@ class TestGeneratorEgo(): G.add_edge(1, 2, weight=2, distance=2) G.add_edge(2, 3, weight=2, distance=1) assert_nodes_equal(nx.ego_graph(G, 0, radius=3).nodes(), [0, 1, 2, 3]) - eg = nx.ego_graph(G, 0, radius=3, distance='weight') + eg = nx.ego_graph(G, 0, radius=3, distance="weight") assert_nodes_equal(eg.nodes(), [0, 1]) - eg = nx.ego_graph(G, 0, radius=3, distance='weight', undirected=True) + eg = nx.ego_graph(G, 0, radius=3, distance="weight", undirected=True) assert_nodes_equal(eg.nodes(), [0, 1]) - eg = nx.ego_graph(G, 0, radius=3, distance='distance') + eg = nx.ego_graph(G, 0, radius=3, distance="distance") assert_nodes_equal(eg.nodes(), [0, 1, 2]) diff --git a/networkx/generators/tests/test_expanders.py b/networkx/generators/tests/test_expanders.py index 1418d452..a822e041 100644 --- a/networkx/generators/tests/test_expanders.py +++ b/networkx/generators/tests/test_expanders.py @@ -67,7 +67,5 @@ def test_paley_graph(): def test_margulis_gabber_galil_graph_badinput(): - pytest.raises(nx.NetworkXError, margulis_gabber_galil_graph, 3, - nx.DiGraph()) - pytest.raises(nx.NetworkXError, margulis_gabber_galil_graph, 3, - nx.Graph()) + pytest.raises(nx.NetworkXError, margulis_gabber_galil_graph, 3, nx.DiGraph()) + pytest.raises(nx.NetworkXError, margulis_gabber_galil_graph, 3, nx.Graph()) diff --git a/networkx/generators/tests/test_geometric.py b/networkx/generators/tests/test_geometric.py index 7d9d07a7..91151ec3 100644 --- a/networkx/generators/tests/test_geometric.py +++ b/networkx/generators/tests/test_geometric.py @@ -35,10 +35,10 @@ class TestRandomGeometricGraph: for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 # Nonadjacent vertices must be at greater distance. else: - assert not dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert not dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_p(self): """Tests for providing an alternate distance metric to the @@ -51,16 +51,17 @@ class TestRandomGeometricGraph: for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 # Nonadjacent vertices must be at greater distance. else: - assert not dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert not dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_node_names(self): """Tests using values other than sequential numbers as node IDs. """ import string + nodes = list(string.ascii_lowercase) G = nx.random_geometric_graph(nodes, 0.25) assert len(G) == len(nodes) @@ -69,10 +70,10 @@ class TestRandomGeometricGraph: for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 # Nonadjacent vertices must be at greater distance. else: - assert not dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert not dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 class TestSoftRandomGeometricGraph: @@ -94,12 +95,14 @@ class TestSoftRandomGeometricGraph: """ # Use the Euclidean metric, the default according to the # documentation. - def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + def dist(x, y): + return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + G = nx.soft_random_geometric_graph(50, 0.25) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_p(self): """Tests for providing an alternate distance metric to the @@ -107,27 +110,32 @@ class TestSoftRandomGeometricGraph: """ # Use the L1 metric. - def dist(x, y): return sum(abs(a - b) for a, b in zip(x, y)) + def dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + G = nx.soft_random_geometric_graph(50, 0.25, p=1) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_node_names(self): """Tests using values other than sequential numbers as node IDs. """ import string + nodes = list(string.ascii_lowercase) G = nx.soft_random_geometric_graph(nodes, 0.25) assert len(G) == len(nodes) - def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + def dist(x, y): + return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_p_dist_default(self): """Tests default p_dict = 0.5 returns graph with edge count <= RGG with @@ -145,6 +153,7 @@ class TestSoftRandomGeometricGraph: """Tests if p_dict = 0 returns disconencted graph with 0 edges """ + def p_dist(dist): return 0 @@ -165,8 +174,8 @@ def join(G, u, v, theta, alpha, metric): """ du, dv = G.nodes[u], G.nodes[v] - u_pos, v_pos = du['pos'], dv['pos'] - u_weight, v_weight = du['weight'], dv['weight'] + u_pos, v_pos = du["pos"], dv["pos"] + u_weight, v_weight = du["weight"], dv["weight"] return (u_weight + v_weight) * metric(u_pos, v_pos) ** alpha >= theta @@ -219,6 +228,7 @@ class TestGeographicalThresholdGraph: """Tests if p_dict = 0 returns disconencted graph with 0 edges """ + def p_dist(dist): return 0 @@ -253,7 +263,6 @@ class TestWaxmanGraph: class TestNavigableSmallWorldGraph: - def test_navigable_small_world(self): G = nx.navigable_small_world_graph(5, p=1, q=0, seed=42) gg = nx.grid_2d_graph(5, 5).to_directed() @@ -287,12 +296,14 @@ class TestThresholdedRandomGeometricGraph: """ # Use the Euclidean metric, the default according to the # documentation. - def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + def dist(x, y): + return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_p(self): """Tests for providing an alternate distance metric to the @@ -300,27 +311,32 @@ class TestThresholdedRandomGeometricGraph: """ # Use the L1 metric. - def dist(x, y): return sum(abs(a - b) for a, b in zip(x, y)) - G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, p=1) + def dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, p=1) for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_node_names(self): """Tests using values other than sequential numbers as node IDs. """ import string + nodes = list(string.ascii_lowercase) G = nx.thresholded_random_geometric_graph(nodes, 0.25, 0.1) assert len(G) == len(nodes) - def dist(x, y): return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + def dist(x, y): + return sqrt(sum((a - b) ** 2 for a, b in zip(x, y))) + for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert dist(G.nodes[u]['pos'], G.nodes[v]['pos']) <= 0.25 + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 def test_theta(self): """Tests that pairs of vertices adjacent if and only if their sum @@ -331,4 +347,4 @@ class TestThresholdedRandomGeometricGraph: for u, v in combinations(G, 2): # Adjacent vertices must be within the given distance. if v in G[u]: - assert (G.nodes[u]['weight'] + G.nodes[v]['weight']) >= 0.1 + assert (G.nodes[u]["weight"] + G.nodes[v]["weight"]) >= 0.1 diff --git a/networkx/generators/tests/test_harary_graph.py b/networkx/generators/tests/test_harary_graph.py index 51fbbf80..f1709ccf 100644 --- a/networkx/generators/tests/test_harary_graph.py +++ b/networkx/generators/tests/test_harary_graph.py @@ -13,13 +13,14 @@ class TestHararyGraph: """ Suppose n nodes, m >= n-1 edges, d = 2m // n, r = 2m % n """ + def test_hnm_harary_graph(self): # When d is even and r = 0, the hnm_harary_graph(n,m) is # the circulant_graph(n, list(range(1,d/2+1))) for (n, m) in [(5, 5), (6, 12), (7, 14)]: G1 = hnm_harary_graph(n, m) - d = 2*m // n - G2 = nx.circulant_graph(n, list(range(1, d//2 + 1))) + d = 2 * m // n + G2 = nx.circulant_graph(n, list(range(1, d // 2 + 1))) assert is_isomorphic(G1, G2) # When d is even and r > 0, the hnm_harary_graph(n,m) is @@ -27,8 +28,8 @@ class TestHararyGraph: # with r edges added arbitrarily for (n, m) in [(5, 7), (6, 13), (7, 16)]: G1 = hnm_harary_graph(n, m) - d = 2*m // n - G2 = nx.circulant_graph(n, list(range(1, d//2 + 1))) + d = 2 * m // n + G2 = nx.circulant_graph(n, list(range(1, d // 2 + 1))) assert set(G2.edges) < set(G1.edges) assert G1.number_of_edges() == m @@ -36,9 +37,9 @@ class TestHararyGraph: # is the circulant_graph(n, list(range(1,(d+1)/2) plus [n//2]) for (n, m) in [(6, 9), (8, 12), (10, 15)]: G1 = hnm_harary_graph(n, m) - d = 2*m // n - L = list(range(1, (d+1)//2)) - L.append(n//2) + d = 2 * m // n + L = list(range(1, (d + 1) // 2)) + L.append(n // 2) G2 = nx.circulant_graph(n, L) assert is_isomorphic(G1, G2) @@ -47,9 +48,9 @@ class TestHararyGraph: # with r edges added arbitrarily for (n, m) in [(6, 10), (8, 13), (10, 17)]: G1 = hnm_harary_graph(n, m) - d = 2*m // n - L = list(range(1, (d+1)//2)) - L.append(n//2) + d = 2 * m // n + L = list(range(1, (d + 1) // 2)) + L.append(n // 2) G2 = nx.circulant_graph(n, L) assert set(G2.edges) < set(G1.edges) assert G1.number_of_edges() == m @@ -59,8 +60,8 @@ class TestHararyGraph: # with m - n*(d-1)/2 edges added arbitrarily for (n, m) in [(5, 4), (7, 12), (9, 14)]: G1 = hnm_harary_graph(n, m) - d = 2*m // n - L = list(range(1, (d+1)//2)) + d = 2 * m // n + L = list(range(1, (d + 1) // 2)) G2 = nx.circulant_graph(n, L) assert set(G2.edges) < set(G1.edges) assert G1.number_of_edges() == m @@ -83,6 +84,7 @@ class TestHararyGraph: """ Suppose connectivity k, number of nodes n """ + def test_hkn_harary_graph(self): # When k == 1, the hkn_harary_graph(k,n) is # the path_graph(n) @@ -95,15 +97,15 @@ class TestHararyGraph: # the circulant_graph(n, list(range(1,k/2+1))) for (k, n) in [(2, 6), (2, 7), (4, 6), (4, 7)]: G1 = hkn_harary_graph(k, n) - G2 = nx.circulant_graph(n, list(range(1, k//2 + 1))) + G2 = nx.circulant_graph(n, list(range(1, k // 2 + 1))) assert is_isomorphic(G1, G2) # When k is odd and n is even, the hkn_harary_graph(k,n) is # the circulant_graph(n, list(range(1,(k+1)/2)) plus [n/2]) for (k, n) in [(3, 6), (5, 8), (7, 10)]: G1 = hkn_harary_graph(k, n) - L = list(range(1, (k+1)//2)) - L.append(n//2) + L = list(range(1, (k + 1) // 2)) + L.append(n // 2) G2 = nx.circulant_graph(n, L) assert is_isomorphic(G1, G2) @@ -112,14 +114,14 @@ class TestHararyGraph: # n//2+1 edges added between node i and node i+n//2+1 for (k, n) in [(3, 5), (5, 9), (7, 11)]: G1 = hkn_harary_graph(k, n) - G2 = nx.circulant_graph(n, list(range(1, (k+1)//2))) + G2 = nx.circulant_graph(n, list(range(1, (k + 1) // 2))) eSet1 = set(G1.edges) eSet2 = set(G2.edges) eSet3 = set() half = n // 2 - for i in range(0, half+1): + for i in range(0, half + 1): # add half+1 edges between i and i+half - eSet3.add((i, (i+half) % n)) + eSet3.add((i, (i + half) % n)) assert eSet1 == eSet2 | eSet3 # Raise NetworkXError if k<1 diff --git a/networkx/generators/tests/test_internet_as_graphs.py b/networkx/generators/tests/test_internet_as_graphs.py index 74cfb067..5eae15ba 100644 --- a/networkx/generators/tests/test_internet_as_graphs.py +++ b/networkx/generators/tests/test_internet_as_graphs.py @@ -3,8 +3,7 @@ from networkx.generators.internet_as_graphs import random_internet_as_graph from networkx.testing import almost_equal -class TestInternetASTopology(): - +class TestInternetASTopology: @classmethod def setup_class(cls): cls.n = 1000 @@ -18,17 +17,19 @@ class TestInternetASTopology(): cls.providers = {} for i in cls.G.nodes(): - if cls.G.nodes[i]['type'] == 'T': + if cls.G.nodes[i]["type"] == "T": cls.T.append(i) - elif cls.G.nodes[i]['type'] == 'M': + elif cls.G.nodes[i]["type"] == "M": cls.M.append(i) - elif cls.G.nodes[i]['type'] == 'C': + elif cls.G.nodes[i]["type"] == "C": cls.C.append(i) - elif cls.G.nodes[i]['type'] == 'CP': + elif cls.G.nodes[i]["type"] == "CP": cls.CP.append(i) else: - raise ValueError("Inconsistent data in the graph\ - node attributes") + raise ValueError( + "Inconsistent data in the graph\ + node attributes" + ) cls.set_customers(i) cls.set_providers(i) @@ -38,16 +39,17 @@ class TestInternetASTopology(): cls.customers[i] = set() for j in neighbors(cls.G, i): e = cls.G.edges[(i, j)] - if e['type'] == 'transit': - customer = int(e['customer']) + if e["type"] == "transit": + customer = int(e["customer"]) if j == customer: cls.set_customers(j) - cls.customers[i] = cls.customers[i].union( - cls.customers[j]) + cls.customers[i] = cls.customers[i].union(cls.customers[j]) cls.customers[i].add(j) elif i != customer: - raise ValueError("Inconsistent data in the graph\ - edge attributes") + raise ValueError( + "Inconsistent data in the graph\ + edge attributes" + ) @classmethod def set_providers(cls, i): @@ -55,16 +57,17 @@ class TestInternetASTopology(): cls.providers[i] = set() for j in neighbors(cls.G, i): e = cls.G.edges[(i, j)] - if e['type'] == 'transit': - customer = int(e['customer']) + if e["type"] == "transit": + customer = int(e["customer"]) if i == customer: cls.set_providers(j) - cls.providers[i] = cls.providers[i].union( - cls.providers[j]) + cls.providers[i] = cls.providers[i].union(cls.providers[j]) cls.providers[i].add(j) elif j != customer: - raise ValueError("Inconsistent data in the graph\ - edge attributes") + raise ValueError( + "Inconsistent data in the graph\ + edge attributes" + ) def test_wrong_input(self): G = random_internet_as_graph(0) @@ -79,8 +82,8 @@ class TestInternetASTopology(): def test_node_numbers(self): assert len(self.G.nodes()) == self.n assert len(self.T) < 7 - assert len(self.M) == int(round(self.n*0.15)) - assert len(self.CP) == int(round(self.n*0.05)) + assert len(self.M) == int(round(self.n * 0.15)) + assert len(self.CP) == int(round(self.n * 0.05)) numb = self.n - len(self.T) - len(self.M) - len(self.CP) assert len(self.C) == numb @@ -102,12 +105,11 @@ class TestInternetASTopology(): # test whether there is a customer-provider loop for i in self.G.nodes(): - assert len(self.customers[i].intersection( - self.providers[i])) == 0 + assert len(self.customers[i].intersection(self.providers[i])) == 0 # test whether there is a peering with a customer or provider for i, j in self.G.edges(): - if self.G.edges[(i, j)]['type'] == 'peer': + if self.G.edges[(i, j)]["type"] == "peer": assert j not in self.customers[i] assert i not in self.customers[j] assert j not in self.providers[i] @@ -126,54 +128,62 @@ class TestInternetASTopology(): for i, j in self.G.edges(): e = self.G.edges[(i, j)] - if e['type'] == 'transit': - cust = int(e['customer']) + if e["type"] == "transit": + cust = int(e["customer"]) if i == cust: prov = j elif j == cust: prov = i else: - raise ValueError("Inconsistent data in the graph edge\ - attributes") + raise ValueError( + "Inconsistent data in the graph edge\ + attributes" + ) if cust in self.M: d_m += 1 - if self.G.nodes[prov]['type'] == 'T': + if self.G.nodes[prov]["type"] == "T": t_m += 1 elif cust in self.C: d_c += 1 - if self.G.nodes[prov]['type'] == 'T': + if self.G.nodes[prov]["type"] == "T": t_c += 1 elif cust in self.CP: d_cp += 1 - if self.G.nodes[prov]['type'] == 'T': + if self.G.nodes[prov]["type"] == "T": t_cp += 1 else: - raise ValueError("Inconsistent data in the graph edge\ - attributes") - elif e['type'] == 'peer': - if self.G.nodes[i]['type'] == 'M' and\ - self.G.nodes[j]['type'] == 'M': + raise ValueError( + "Inconsistent data in the graph edge\ + attributes" + ) + elif e["type"] == "peer": + if self.G.nodes[i]["type"] == "M" and self.G.nodes[j]["type"] == "M": p_m_m += 1 - if self.G.nodes[i]['type'] == 'CP' and\ - self.G.nodes[j]['type'] == 'CP': + if self.G.nodes[i]["type"] == "CP" and self.G.nodes[j]["type"] == "CP": p_cp_cp += 1 - if self.G.nodes[i]['type'] == 'M' and\ - self.G.nodes[j]['type'] == 'CP' or\ - self.G.nodes[i]['type'] == 'CP' and\ - self.G.nodes[j]['type'] == 'M': + if ( + self.G.nodes[i]["type"] == "M" + and self.G.nodes[j]["type"] == "CP" + or self.G.nodes[i]["type"] == "CP" + and self.G.nodes[j]["type"] == "M" + ): p_cp_m += 1 else: - raise ValueError("Unexpected data in the graph edge\ - attributes") - - assert almost_equal(d_m/len(self.M), 2 + (2.5*self.n)/10000, places=0) - assert almost_equal(d_cp/len(self.CP), 2 + (1.5*self.n)/10000, places=0) - assert almost_equal(d_c/len(self.C), 1 + (5*self.n)/100000, places=0) - - assert almost_equal(p_m_m/len(self.M), 1 + (2*self.n)/10000, places=0) - assert almost_equal(p_cp_m/len(self.CP), 0.2 + (2*self.n)/10000, places=0) - assert almost_equal(p_cp_cp/len(self.CP), 0.05 + (2*self.n)/100000, places=0) - - assert almost_equal(t_m/d_m, 0.375, places=1) - assert almost_equal(t_cp/d_cp, 0.375, places=1) - assert almost_equal(t_c/d_c, 0.125, places=1) + raise ValueError( + "Unexpected data in the graph edge\ + attributes" + ) + + assert almost_equal(d_m / len(self.M), 2 + (2.5 * self.n) / 10000, places=0) + assert almost_equal(d_cp / len(self.CP), 2 + (1.5 * self.n) / 10000, places=0) + assert almost_equal(d_c / len(self.C), 1 + (5 * self.n) / 100000, places=0) + + assert almost_equal(p_m_m / len(self.M), 1 + (2 * self.n) / 10000, places=0) + assert almost_equal(p_cp_m / len(self.CP), 0.2 + (2 * self.n) / 10000, places=0) + assert almost_equal( + p_cp_cp / len(self.CP), 0.05 + (2 * self.n) / 100000, places=0 + ) + + assert almost_equal(t_m / d_m, 0.375, places=1) + assert almost_equal(t_cp / d_cp, 0.375, places=1) + assert almost_equal(t_c / d_c, 0.125, places=1) diff --git a/networkx/generators/tests/test_intersection.py b/networkx/generators/tests/test_intersection.py index 06bbe5e9..461b3513 100644 --- a/networkx/generators/tests/test_intersection.py +++ b/networkx/generators/tests/test_intersection.py @@ -2,7 +2,7 @@ import pytest import networkx as nx -class TestIntersectionGraph(): +class TestIntersectionGraph: def test_random_intersection_graph(self): G = nx.uniform_random_intersection_graph(10, 5, 0.5) assert len(G) == 10 @@ -18,5 +18,10 @@ class TestIntersectionGraph(): def test_general_random_intersection_graph(self): G = nx.general_random_intersection_graph(10, 5, [0.1, 0.2, 0.2, 0.1, 0.1]) assert len(G) == 10 - pytest.raises(ValueError, nx.general_random_intersection_graph, 10, 5, - [0.1, 0.2, 0.2, 0.1]) + pytest.raises( + ValueError, + nx.general_random_intersection_graph, + 10, + 5, + [0.1, 0.2, 0.2, 0.1], + ) diff --git a/networkx/generators/tests/test_interval_graph.py b/networkx/generators/tests/test_interval_graph.py index 7a149bef..ec442b81 100644 --- a/networkx/generators/tests/test_interval_graph.py +++ b/networkx/generators/tests/test_interval_graph.py @@ -11,6 +11,7 @@ from networkx.testing import assert_edges_equal class TestIntervalGraph: """Unit tests for :func:`networkx.generators.interval_graph.interval_graph`""" + def test_empty(self): """ Tests for trivial case of empty input""" assert len(interval_graph([])) == 0 @@ -92,13 +93,36 @@ class TestIntervalGraph: def test_interval_graph_4(self): """ test all possible overlaps """ - intervals = [(0, 2), (-2, -1), (-2, 0), (-2, 1), (-2, 2), (-2, 3), - (0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3), (3, 4)] + intervals = [ + (0, 2), + (-2, -1), + (-2, 0), + (-2, 1), + (-2, 2), + (-2, 3), + (0, 1), + (0, 2), + (0, 3), + (1, 2), + (1, 3), + (2, 3), + (3, 4), + ] expected_graph = nx.Graph() expected_graph.add_nodes_from(intervals) - expected_nbrs = {(-2, 0), (-2, 1), (-2, 2), (-2, 3), (0, 1), (0, 2), - (0, 3), (1, 2), (1, 3), (2, 3)} + expected_nbrs = { + (-2, 0), + (-2, 1), + (-2, 2), + (-2, 3), + (0, 1), + (0, 2), + (0, 3), + (1, 2), + (1, 3), + (2, 3), + } actual_g = nx.interval_graph(intervals) actual_nbrs = nx.neighbors(actual_g, (0, 2)) diff --git a/networkx/generators/tests/test_joint_degree_seq.py b/networkx/generators/tests/test_joint_degree_seq.py index 0d84668e..dd898d31 100644 --- a/networkx/generators/tests/test_joint_degree_seq.py +++ b/networkx/generators/tests/test_joint_degree_seq.py @@ -1,45 +1,55 @@ import time from networkx.algorithms.assortativity import degree_mixing_dict from networkx.generators import powerlaw_cluster_graph, gnm_random_graph -from networkx.generators.joint_degree_seq import is_valid_joint_degree, \ - joint_degree_graph, \ - directed_joint_degree_graph, \ - is_valid_directed_joint_degree +from networkx.generators.joint_degree_seq import ( + is_valid_joint_degree, + joint_degree_graph, + directed_joint_degree_graph, + is_valid_directed_joint_degree, +) def test_is_valid_joint_degree(): - ''' Tests for conditions that invalidate a joint degree dict ''' + """ Tests for conditions that invalidate a joint degree dict """ # valid joint degree that satisfies all five conditions - joint_degrees = {1: {4: 1}, - 2: {2: 2, 3: 2, 4: 2}, - 3: {2: 2, 4: 1}, - 4: {1: 1, 2: 2, 3: 1}} + joint_degrees = { + 1: {4: 1}, + 2: {2: 2, 3: 2, 4: 2}, + 3: {2: 2, 4: 1}, + 4: {1: 1, 2: 2, 3: 1}, + } assert is_valid_joint_degree(joint_degrees) # test condition 1 # joint_degrees_1[1][4] not integer - joint_degrees_1 = {1: {4: 1.5}, - 2: {2: 2, 3: 2, 4: 2}, - 3: {2: 2, 4: 1}, - 4: {1: 1.5, 2: 2, 3: 1}} + joint_degrees_1 = { + 1: {4: 1.5}, + 2: {2: 2, 3: 2, 4: 2}, + 3: {2: 2, 4: 1}, + 4: {1: 1.5, 2: 2, 3: 1}, + } assert not is_valid_joint_degree(joint_degrees_1) # test condition 2 # degree_count[2] = sum(joint_degrees_2[2][j)/2, is not an int # degree_count[4] = sum(joint_degrees_2[4][j)/4, is not an int - joint_degrees_2 = {1: {4: 1}, - 2: {2: 2, 3: 2, 4: 3}, - 3: {2: 2, 4: 1}, - 4: {1: 1, 2: 3, 3: 1}} + joint_degrees_2 = { + 1: {4: 1}, + 2: {2: 2, 3: 2, 4: 3}, + 3: {2: 2, 4: 1}, + 4: {1: 1, 2: 3, 3: 1}, + } assert not is_valid_joint_degree(joint_degrees_2) # test conditions 3 and 4 # joint_degrees_3[1][4]>degree_count[1]*degree_count[4] - joint_degrees_3 = {1: {4: 2}, - 2: {2: 2, 3: 2, 4: 2}, - 3: {2: 2, 4: 1}, - 4: {1: 2, 2: 2, 3: 1}} + joint_degrees_3 = { + 1: {4: 2}, + 2: {2: 2, 3: 2, 4: 2}, + 3: {2: 2, 4: 1}, + 4: {1: 2, 2: 2, 3: 1}, + } assert not is_valid_joint_degree(joint_degrees_3) # test condition 5 diff --git a/networkx/generators/tests/test_lattice.py b/networkx/generators/tests/test_lattice.py index 839b88e8..3f72d095 100644 --- a/networkx/generators/tests/test_lattice.py +++ b/networkx/generators/tests/test_lattice.py @@ -38,12 +38,15 @@ class TestGrid2DGraph: G = nx.grid_2d_graph(0, 0, periodic=True) assert dict(G.degree()) == {} - for m, n, H in [(2, 2, nx.cycle_graph(4)), (1, 7, nx.cycle_graph(7)), - (7, 1, nx.cycle_graph(7)), - (2, 5, nx.circular_ladder_graph(5)), - (5, 2, nx.circular_ladder_graph(5)), - (2, 4, nx.cubical_graph()), - (4, 2, nx.cubical_graph())]: + for m, n, H in [ + (2, 2, nx.cycle_graph(4)), + (1, 7, nx.cycle_graph(7)), + (7, 1, nx.cycle_graph(7)), + (2, 5, nx.circular_ladder_graph(5)), + (5, 2, nx.circular_ladder_graph(5)), + (2, 4, nx.cubical_graph()), + (4, 2, nx.cubical_graph()), + ]: G = nx.grid_2d_graph(m, n, periodic=True) assert nx.could_be_isomorphic(G, H) @@ -89,8 +92,13 @@ class TestGridGraph: dim = [n, m] g = nx.grid_graph(dim) assert len(g) == n * m - assert nx.degree_histogram(g) == [0, 0, 4, 2 * (n + m) - 8, - (n - 2) * (m - 2)] + assert nx.degree_histogram(g) == [ + 0, + 0, + 4, + 2 * (n + m) - 8, + (n - 2) * (m - 2), + ] for n, m in [(1, 5), (5, 1)]: dim = [n, m] @@ -98,8 +106,8 @@ class TestGridGraph: assert len(g) == n * m assert nx.is_isomorphic(g, nx.path_graph(5)) -# mg = nx.grid_graph([n,m], create_using=MultiGraph()) -# assert_equal(mg.edges(), g.edges()) + # mg = nx.grid_graph([n,m], create_using=MultiGraph()) + # assert_equal(mg.edges(), g.edges()) def test_node_input(self): G = nx.grid_graph([range(7, 9), range(3, 6)]) @@ -110,9 +118,7 @@ class TestGridGraph: m, n, k = 3, 7, 5 for a, b, c in product([0, 1], [0, 1], [0, 1]): G = nx.grid_graph([m, n, k], periodic=(a, b, c)) - num_e = ((m + a - 1) * n * k - + (n + b - 1) * m * k - + (k + c - 1) * m * n) + num_e = (m + a - 1) * n * k + (n + b - 1) * m * k + (k + c - 1) * m * n assert G.number_of_nodes() == m * n * k assert G.number_of_edges() == num_e @@ -121,8 +127,12 @@ class TestHypercubeGraph: """Unit tests for :func:`networkx.generators.lattice.hypercube_graph`""" def test_special_cases(self): - for n, H in [(0, nx.null_graph()), (1, nx.path_graph(2)), - (2, nx.cycle_graph(4)), (3, nx.cubical_graph())]: + for n, H in [ + (0, nx.null_graph()), + (1, nx.path_graph(2)), + (2, nx.cycle_graph(4)), + (3, nx.cubical_graph()), + ]: G = nx.hypercube_graph(n) assert nx.could_be_isomorphic(G, H) @@ -204,7 +214,7 @@ class TestHexagonalLatticeGraph: G = nx.hexagonal_lattice_graph(3, 5, create_using=nx.Graph()) H = nx.hexagonal_lattice_graph(3, 5, create_using=nx.DiGraph()) assert H.is_directed() - pos = nx.get_node_attributes(H, 'pos') + pos = nx.get_node_attributes(H, "pos") for u, v in H.edges(): assert pos[v][1] >= pos[u][1] if pos[v][1] == pos[u][1]: diff --git a/networkx/generators/tests/test_line.py b/networkx/generators/tests/test_line.py index d1659c58..fec8893c 100644 --- a/networkx/generators/tests/test_line.py +++ b/networkx/generators/tests/test_line.py @@ -47,7 +47,7 @@ def test_sorted_edge(): assert (1, 2) == line._sorted_edge(2, 1) -class TestGeneratorLine(): +class TestGeneratorLine: def test_star(self): G = nx.star_graph(5) L = nx.line_graph(G) @@ -89,28 +89,47 @@ class TestGeneratorLine(): assert_edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) -class TestGeneratorInverseLine(): +class TestGeneratorInverseLine: def test_example(self): G = nx.Graph() - G_edges = [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 5], [2, 6], - [2, 7], [3, 4], [3, 5], [6, 7], [6, 8], [7, 8]] + G_edges = [ + [1, 2], + [1, 3], + [1, 4], + [1, 5], + [2, 3], + [2, 5], + [2, 6], + [2, 7], + [3, 4], + [3, 5], + [6, 7], + [6, 8], + [7, 8], + ] G.add_edges_from(G_edges) H = nx.inverse_line_graph(G) solution = nx.Graph() - solution_edges = [('a', 'b'), ('a', 'c'), ('a', 'd'), ('a', 'e'), - ('c', 'd'), ('e', 'f'), ('e', 'g'), ('f', 'g')] + solution_edges = [ + ("a", "b"), + ("a", "c"), + ("a", "d"), + ("a", "e"), + ("c", "d"), + ("e", "f"), + ("e", "g"), + ("f", "g"), + ] solution.add_edges_from(solution_edges) assert nx.is_isomorphic(H, solution) def test_example_2(self): G = nx.Graph() - G_edges = [[1, 2], [1, 3], [2, 3], - [3, 4], [3, 5], [4, 5]] + G_edges = [[1, 2], [1, 3], [2, 3], [3, 4], [3, 5], [4, 5]] G.add_edges_from(G_edges) H = nx.inverse_line_graph(G) solution = nx.Graph() - solution_edges = [('a', 'c'), ('b', 'c'), ('c', 'd'), - ('d', 'e'), ('d', 'f')] + solution_edges = [("a", "c"), ("b", "c"), ("c", "d"), ("d", "e"), ("d", "f")] solution.add_edges_from(solution_edges) assert nx.is_isomorphic(H, solution) @@ -133,8 +152,7 @@ class TestGeneratorInverseLine(): alternative_solution.add_edges_from([[0, 1], [0, 2], [0, 3]]) # there are two alternative inverse line graphs for this case # so long as we get one of them the test should pass - assert (nx.is_isomorphic(H, G) or - nx.is_isomorphic(H, alternative_solution)) + assert nx.is_isomorphic(H, G) or nx.is_isomorphic(H, alternative_solution) def test_cycle(self): G = nx.cycle_graph(5) @@ -164,8 +182,18 @@ class TestGeneratorInverseLine(): # wheel graph with 6 nodes G = nx.Graph() - G_edges = [[0, 1], [0, 2], [0, 3], [0, 4], [0, 5], [1, 2], - [2, 3], [3, 4], [4, 5], [5, 1]] + G_edges = [ + [0, 1], + [0, 2], + [0, 3], + [0, 4], + [0, 5], + [1, 2], + [2, 3], + [3, 4], + [4, 5], + [5, 1], + ] G.add_edges_from(G_edges) pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) @@ -173,8 +201,17 @@ class TestGeneratorInverseLine(): # / \ / \ / # 0---1---2 G = nx.Graph() - G_edges = [[0, 1], [1, 2], [3, 4], [4, 5], [0, 3], [1, 3], - [1, 4], [2, 4], [2, 5]] + G_edges = [ + [0, 1], + [1, 2], + [3, 4], + [4, 5], + [0, 3], + [1, 3], + [1, 4], + [2, 4], + [2, 5], + ] G.add_edges_from(G_edges) pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) diff --git a/networkx/generators/tests/test_mycielski.py b/networkx/generators/tests/test_mycielski.py index 082d95d2..d71260ed 100644 --- a/networkx/generators/tests/test_mycielski.py +++ b/networkx/generators/tests/test_mycielski.py @@ -4,7 +4,6 @@ import networkx as nx class TestMycielski: - def test_construction(self): G = nx.path_graph(2) M = nx.mycielskian(G) diff --git a/networkx/generators/tests/test_nonisomorphic_trees.py b/networkx/generators/tests/test_nonisomorphic_trees.py index 46f639de..cb6eea86 100644 --- a/networkx/generators/tests/test_nonisomorphic_trees.py +++ b/networkx/generators/tests/test_nonisomorphic_trees.py @@ -9,11 +9,12 @@ import networkx as nx from networkx.testing import assert_edges_equal -class TestGeneratorNonIsomorphicTrees(): - +class TestGeneratorNonIsomorphicTrees: def test_tree_structure(self): # test for tree structure for nx.nonisomorphic_trees() - def f(x): return list(nx.nonisomorphic_trees(x)) + def f(x): + return list(nx.nonisomorphic_trees(x)) + for i in f(6): assert nx.is_tree(i) for i in f(8): @@ -21,7 +22,9 @@ class TestGeneratorNonIsomorphicTrees(): def test_nonisomorphism(self): # test for nonisomorphism of trees for nx.nonisomorphic_trees() - def f(x): return list(nx.nonisomorphic_trees(x)) + def f(x): + return list(nx.nonisomorphic_trees(x)) + trees = f(6) for i in range(len(trees)): for j in range(i + 1, len(trees)): @@ -42,7 +45,9 @@ class TestGeneratorNonIsomorphicTrees(): assert nx.number_of_nonisomorphic_trees(8) == 23 def test_nonisomorphic_trees(self): - def f(x): return list(nx.nonisomorphic_trees(x)) + def f(x): + return list(nx.nonisomorphic_trees(x)) + assert_edges_equal(f(3)[0].edges(), [(0, 1), (0, 2)]) assert_edges_equal(f(4)[0].edges(), [(0, 1), (0, 3), (1, 2)]) assert_edges_equal(f(4)[1].edges(), [(0, 1), (0, 2), (0, 3)]) @@ -52,6 +57,8 @@ class TestGeneratorNonIsomorphicTrees(): assert list(nx.nonisomorphic_trees(2, create="matrix")) == trees_2 trees_3 = [[[0, 1, 1], [1, 0, 0], [1, 0, 0]]] assert list(nx.nonisomorphic_trees(3, create="matrix")) == trees_3 - trees_4 = [[[0, 1, 0, 1], [1, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0]], - [[0, 1, 1, 1], [1, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]]] + trees_4 = [ + [[0, 1, 0, 1], [1, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0]], + [[0, 1, 1, 1], [1, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]], + ] assert list(nx.nonisomorphic_trees(4, create="matrix")) == trees_4 diff --git a/networkx/generators/tests/test_random_clustered.py b/networkx/generators/tests/test_random_clustered.py index 916c6f0c..319c1d7f 100644 --- a/networkx/generators/tests/test_random_clustered.py +++ b/networkx/generators/tests/test_random_clustered.py @@ -3,7 +3,6 @@ import networkx class TestRandomClusteredGraph: - def test_valid(self): node = [1, 1, 1, 2, 1, 2, 0, 0] tri = [0, 0, 0, 0, 0, 1, 1, 1] @@ -14,14 +13,21 @@ class TestRandomClusteredGraph: def test_valid2(self): G = networkx.random_clustered_graph( - [(1, 2), (2, 1), (1, 1), (1, 1), (1, 1), (2, 0)]) + [(1, 2), (2, 1), (1, 1), (1, 1), (1, 1), (2, 0)] + ) assert G.number_of_nodes() == 6 assert G.number_of_edges() == 10 def test_invalid1(self): - pytest.raises((TypeError, networkx.NetworkXError), - networkx.random_clustered_graph, [[1, 1], [2, 1], [0, 1]]) + pytest.raises( + (TypeError, networkx.NetworkXError), + networkx.random_clustered_graph, + [[1, 1], [2, 1], [0, 1]], + ) def test_invalid2(self): - pytest.raises((TypeError, networkx.NetworkXError), - networkx.random_clustered_graph, [[1, 1], [1, 2], [0, 1]]) + pytest.raises( + (TypeError, networkx.NetworkXError), + networkx.random_clustered_graph, + [[1, 1], [1, 2], [0, 1]], + ) diff --git a/networkx/generators/tests/test_random_graphs.py b/networkx/generators/tests/test_random_graphs.py index 8f2d6841..6835e880 100644 --- a/networkx/generators/tests/test_random_graphs.py +++ b/networkx/generators/tests/test_random_graphs.py @@ -26,7 +26,6 @@ from networkx.generators.random_graphs import watts_strogatz_graph class TestGeneratorsRandom: - def test_random_graph(self): seed = 42 G = gnp_random_graph(100, 0.25, seed) @@ -46,8 +45,9 @@ class TestGeneratorsRandom: G = connected_watts_strogatz_graph(10, 2, 0.1, tries=10, seed=seed) assert len(G) == 10 assert G.number_of_edges() == 10 - pytest.raises(NetworkXError, connected_watts_strogatz_graph, - 10, 2, 0.1, tries=0) + pytest.raises( + NetworkXError, connected_watts_strogatz_graph, 10, 2, 0.1, tries=0 + ) G = watts_strogatz_graph(10, 4, 0.25, seed) assert len(G) == 10 @@ -211,8 +211,12 @@ class TestGeneratorsRandom: assert sum(1 for _ in G.edges()) == 0 def test_gnp(self): - for generator in [gnp_random_graph, binomial_graph, erdos_renyi_graph, - fast_gnp_random_graph]: + for generator in [ + gnp_random_graph, + binomial_graph, + erdos_renyi_graph, + fast_gnp_random_graph, + ]: G = generator(10, -1.1) assert len(G) == 10 assert sum(1 for _ in G.edges()) == 0 @@ -289,6 +293,7 @@ class TestGeneratorsRandom: def root(u, w, r): return r / c + w + c = 1 graph = random_kernel_graph(1000, integral, root) graph = random_kernel_graph(1000, integral, root, seed=42) diff --git a/networkx/generators/tests/test_small.py b/networkx/generators/tests/test_small.py index 9c9b70dd..882ecef7 100644 --- a/networkx/generators/tests/test_small.py +++ b/networkx/generators/tests/test_small.py @@ -1,6 +1,7 @@ import pytest import networkx as nx from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic + is_isomorphic = graph_could_be_isomorphic """Generators - Small @@ -12,17 +13,15 @@ Some small graphs null = nx.null_graph() -class TestGeneratorsSmall(): +class TestGeneratorsSmall: def test_make_small_graph(self): - d = ["adjacencylist", "Bull Graph", 5, [[2, 3], [1, 3, 4], [1, 2, 5], - [2], [3]]] + d = ["adjacencylist", "Bull Graph", 5, [[2, 3], [1, 3, 4], [1, 2, 5], [2], [3]]] G = nx.make_small_graph(d) assert is_isomorphic(G, nx.bull_graph()) # Test small graph creation error with wrong ltype d[0] = "erroneouslist" - pytest.raises(nx.NetworkXError, nx.make_small_graph, - graph_description=d) + pytest.raises(nx.NetworkXError, nx.make_small_graph, graph_description=d) def test__LCF_graph(self): # If n<=0, then return the null_graph @@ -121,16 +120,14 @@ class TestGeneratorsSmall(): G = nx.icosahedral_graph() assert G.number_of_nodes() == 12 assert G.number_of_edges() == 30 - assert (list(d for n, d in G.degree()) == - [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5]) + assert list(d for n, d in G.degree()) == [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5] assert nx.diameter(G) == 3 assert nx.radius(G) == 3 G = nx.krackhardt_kite_graph() assert G.number_of_nodes() == 10 assert G.number_of_edges() == 18 - assert (sorted(d for n, d in G.degree()) == - [1, 2, 3, 3, 3, 4, 4, 5, 5, 6]) + assert sorted(d for n, d in G.degree()) == [1, 2, 3, 3, 3, 4, 4, 5, 5, 6] G = nx.moebius_kantor_graph() assert G.number_of_nodes() == 16 @@ -186,7 +183,6 @@ class TestGeneratorsSmall(): assert list(d for n, d in G.degree()) == 46 * [3] # Test create_using with directed or multigraphs on small graphs - pytest.raises(nx.NetworkXError, nx.tutte_graph, - create_using=nx.DiGraph) + pytest.raises(nx.NetworkXError, nx.tutte_graph, create_using=nx.DiGraph) MG = nx.tutte_graph(create_using=nx.MultiGraph) assert sorted(MG.edges()) == sorted(G.edges()) diff --git a/networkx/generators/tests/test_spectral_graph_forge.py b/networkx/generators/tests/test_spectral_graph_forge.py index f1da0a0c..3f27a17d 100644 --- a/networkx/generators/tests/test_spectral_graph_forge.py +++ b/networkx/generators/tests/test_spectral_graph_forge.py @@ -8,8 +8,8 @@ from networkx.generators import karate_club_graph def test_spectral_graph_forge(): - numpy = pytest.importorskip('numpy') - scipy = pytest.importorskip('scipy') + numpy = pytest.importorskip("numpy") + scipy = pytest.importorskip("scipy") G = karate_club_graph() @@ -17,32 +17,32 @@ def test_spectral_graph_forge(): # common cases, just checking node number preserving and difference # between identity and modularity cases - H = spectral_graph_forge(G, 0.1, transformation='identity', seed=seed) + H = spectral_graph_forge(G, 0.1, transformation="identity", seed=seed) assert_nodes_equal(G, H) - I = spectral_graph_forge(G, 0.1, transformation='identity', seed=seed) + I = spectral_graph_forge(G, 0.1, transformation="identity", seed=seed) assert_nodes_equal(G, H) assert is_isomorphic(I, H) - I = spectral_graph_forge(G, 0.1, transformation='modularity', seed=seed) + I = spectral_graph_forge(G, 0.1, transformation="modularity", seed=seed) assert_nodes_equal(G, I) assert not is_isomorphic(I, H) # with all the eigenvectors, output graph is identical to the input one - H = spectral_graph_forge(G, 1, transformation='modularity', seed=seed) + H = spectral_graph_forge(G, 1, transformation="modularity", seed=seed) assert_nodes_equal(G, H) assert is_isomorphic(G, H) # invalid alpha input value, it is silently truncated in [0,1] - H = spectral_graph_forge(G, -1, transformation='identity', seed=seed) + H = spectral_graph_forge(G, -1, transformation="identity", seed=seed) assert_nodes_equal(G, H) - H = spectral_graph_forge(G, 10, transformation='identity', seed=seed) + H = spectral_graph_forge(G, 10, transformation="identity", seed=seed) assert_nodes_equal(G, H) assert is_isomorphic(G, H) # invalid transformation mode, checking the error raising - pytest.raises(NetworkXError, - spectral_graph_forge, G, 0.1, transformation='unknown', - seed=seed) + pytest.raises( + NetworkXError, spectral_graph_forge, G, 0.1, transformation="unknown", seed=seed + ) diff --git a/networkx/generators/tests/test_stochastic.py b/networkx/generators/tests/test_stochastic.py index ed335762..1a2a96fa 100644 --- a/networkx/generators/tests/test_stochastic.py +++ b/networkx/generators/tests/test_stochastic.py @@ -14,8 +14,10 @@ class TestStochasticGraph: G.add_edge(0, 2) S = nx.stochastic_graph(G) assert nx.is_isomorphic(G, S) - assert (sorted(S.edges(data=True)) == - [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] def test_in_place(self): """Tests for an in-place reweighting of the edges of the graph. @@ -25,24 +27,32 @@ class TestStochasticGraph: G.add_edge(0, 1, weight=1) G.add_edge(0, 2, weight=1) nx.stochastic_graph(G, copy=False) - assert (sorted(G.edges(data=True)) == - [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) + assert sorted(G.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] def test_arbitrary_weights(self): G = nx.DiGraph() G.add_edge(0, 1, weight=1) G.add_edge(0, 2, weight=1) S = nx.stochastic_graph(G) - assert (sorted(S.edges(data=True)) == - [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] def test_multidigraph(self): G = nx.MultiDiGraph() G.add_edges_from([(0, 1), (0, 1), (0, 2), (0, 2)]) S = nx.stochastic_graph(G) d = dict(weight=0.25) - assert (sorted(S.edges(data=True)) == - [(0, 1, d), (0, 1, d), (0, 2, d), (0, 2, d)]) + assert sorted(S.edges(data=True)) == [ + (0, 1, d), + (0, 1, d), + (0, 2, d), + (0, 2, d), + ] def test_graph_disallowed(self): with pytest.raises(nx.NetworkXNotImplemented): diff --git a/networkx/generators/tests/test_sudoku.py b/networkx/generators/tests/test_sudoku.py index 4faf5870..366701d3 100644 --- a/networkx/generators/tests/test_sudoku.py +++ b/networkx/generators/tests/test_sudoku.py @@ -9,7 +9,7 @@ def test_sudoku_negative(): pytest.raises(nx.NetworkXError, nx.sudoku_graph, n=-1) -@pytest.mark.parametrize('n', [0, 1, 2, 3, 4]) +@pytest.mark.parametrize("n", [0, 1, 2, 3, 4]) def test_sudoku_generator(n): """Generate Sudoku graphs of various sizes and verify their properties.""" G = nx.sudoku_graph(n) @@ -26,12 +26,66 @@ def test_sudoku_generator(n): assert sorted(G.neighbors(6)) == [2, 3, 4, 5, 7, 10, 14] elif n == 3: assert sorted(G.neighbors(42)) == [ - 6, 15, 24, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 43, 44, 51, 52, 53, 60, 69, 78 + 6, + 15, + 24, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 43, + 44, + 51, + 52, + 53, + 60, + 69, + 78, ] elif n == 4: assert sorted(G.neighbors(0)) == [ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - 16, 17, 18, 19, 32, 33, 34, 35, 48, 49, 50, 51, - 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240 + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 32, + 33, + 34, + 35, + 48, + 49, + 50, + 51, + 64, + 80, + 96, + 112, + 128, + 144, + 160, + 176, + 192, + 208, + 224, + 240, ] diff --git a/networkx/generators/tests/test_trees.py b/networkx/generators/tests/test_trees.py index 26fa531a..d7a76d96 100644 --- a/networkx/generators/tests/test_trees.py +++ b/networkx/generators/tests/test_trees.py @@ -9,10 +9,11 @@ class TestPrefixTree: def test_basic(self): # This example is from the Wikipedia article "Trie" # <https://en.wikipedia.org/wiki/Trie>. - strings = ['a', 'to', 'tea', 'ted', 'ten', 'i', 'in', 'inn'] + strings = ["a", "to", "tea", "ted", "ten", "i", "in", "inn"] T, root = nx.prefix_tree(strings) - def source_label(v): return T.nodes[v]['source'] + def source_label(v): + return T.nodes[v]["source"] # First, we check that the tree has the expected # structure. Recall that each node that corresponds to one of @@ -54,16 +55,16 @@ class TestPrefixTree: # rightmost letter in the string corresponding to the path to # that node. assert source_label(root) is None - assert source_label(a) == 'a' - assert source_label(i) == 'i' - assert source_label(t) == 't' - assert source_label(in_) == 'n' - assert source_label(inn) == 'n' - assert source_label(to) == 'o' - assert source_label(te) == 'e' - assert source_label(tea) == 'a' - assert source_label(ted) == 'd' - assert source_label(ten) == 'n' + assert source_label(a) == "a" + assert source_label(i) == "i" + assert source_label(t) == "t" + assert source_label(in_) == "n" + assert source_label(inn) == "n" + assert source_label(to) == "o" + assert source_label(te) == "e" + assert source_label(tea) == "a" + assert source_label(ted) == "d" + assert source_label(ten) == "n" assert source_label(NIL) == NIL diff --git a/networkx/generators/tests/test_triads.py b/networkx/generators/tests/test_triads.py index a750e6ef..6fc51ae1 100644 --- a/networkx/generators/tests/test_triads.py +++ b/networkx/generators/tests/test_triads.py @@ -5,10 +5,10 @@ from networkx import triad_graph def test_triad_graph(): - G = triad_graph('030T') - assert [tuple(e) for e in ('ab', 'ac', 'cb')] == sorted(G.edges()) + G = triad_graph("030T") + assert [tuple(e) for e in ("ab", "ac", "cb")] == sorted(G.edges()) def test_invalid_name(): with pytest.raises(ValueError): - triad_graph('bogus') + triad_graph("bogus") diff --git a/networkx/generators/trees.py b/networkx/generators/trees.py index e1b6280b..91ffae5d 100644 --- a/networkx/generators/trees.py +++ b/networkx/generators/trees.py @@ -5,13 +5,13 @@ import networkx as nx from networkx.utils import generate_unique_node from networkx.utils import py_random_state -__all__ = ['prefix_tree', 'random_tree'] +__all__ = ["prefix_tree", "random_tree"] #: The nil node, the only leaf node in a prefix tree. #: #: Each predecessor of the nil node corresponds to the end of a path #: used to generate the prefix tree. -NIL = 'NIL' +NIL = "NIL" def prefix_tree(paths): @@ -92,6 +92,7 @@ def prefix_tree(paths): ['ab', 'abs', 'ad'] """ + def _helper(paths, root, B): """Recursively create a trie from the given list of paths. @@ -185,7 +186,7 @@ def random_tree(n, seed=None): """ if n == 0: - raise nx.NetworkXPointlessConcept('the null graph is not a tree') + raise nx.NetworkXPointlessConcept("the null graph is not a tree") # Cannot create a Prüfer sequence unless `n` is at least two. if n == 1: return nx.empty_graph(1) diff --git a/networkx/generators/triads.py b/networkx/generators/triads.py index 2946a932..7557d42e 100644 --- a/networkx/generators/triads.py +++ b/networkx/generators/triads.py @@ -8,27 +8,28 @@ digraphs on three nodes. """ from networkx.classes import DiGraph -__all__ = ['triad_graph'] +__all__ = ["triad_graph"] #: Dictionary mapping triad name to list of directed edges in the #: digraph representation of that triad (with nodes 'a', 'b', and 'c'). -TRIAD_EDGES = {'003': [], - '012': ['ab'], - '102': ['ab', 'ba'], - '021D': ['ba', 'bc'], - '021U': ['ab', 'cb'], - '021C': ['ab', 'bc'], - '111D': ['ac', 'ca', 'bc'], - '111U': ['ac', 'ca', 'cb'], - '030T': ['ab', 'cb', 'ac'], - '030C': ['ba', 'cb', 'ac'], - '201': ['ab', 'ba', 'ac', 'ca'], - '120D': ['bc', 'ba', 'ac', 'ca'], - '120U': ['ab', 'cb', 'ac', 'ca'], - '120C': ['ab', 'bc', 'ac', 'ca'], - '210': ['ab', 'bc', 'cb', 'ac', 'ca'], - '300': ['ab', 'ba', 'bc', 'cb', 'ac', 'ca'] - } +TRIAD_EDGES = { + "003": [], + "012": ["ab"], + "102": ["ab", "ba"], + "021D": ["ba", "bc"], + "021U": ["ab", "cb"], + "021C": ["ab", "bc"], + "111D": ["ac", "ca", "bc"], + "111U": ["ac", "ca", "cb"], + "030T": ["ab", "cb", "ac"], + "030C": ["ba", "cb", "ac"], + "201": ["ab", "ba", "ac", "ca"], + "120D": ["bc", "ba", "ac", "ca"], + "120U": ["ab", "cb", "ac", "ca"], + "120C": ["ab", "bc", "ac", "ca"], + "210": ["ab", "bc", "cb", "ac", "ca"], + "300": ["ab", "ba", "bc", "cb", "ac", "ca"], +} def triad_graph(triad_name): @@ -64,9 +65,11 @@ def triad_graph(triad_name): """ if triad_name not in TRIAD_EDGES: - raise ValueError(f'unknown triad name "{triad_name}"; use one of the triad names' - ' in the TRIAD_NAMES constant') + raise ValueError( + f'unknown triad name "{triad_name}"; use one of the triad names' + " in the TRIAD_NAMES constant" + ) G = DiGraph() - G.add_nodes_from('abc') + G.add_nodes_from("abc") G.add_edges_from(TRIAD_EDGES[triad_name]) return G diff --git a/networkx/linalg/algebraicconnectivity.py b/networkx/linalg/algebraicconnectivity.py index 728c5876..e4b3aabb 100644 --- a/networkx/linalg/algebraicconnectivity.py +++ b/networkx/linalg/algebraicconnectivity.py @@ -8,9 +8,7 @@ from networkx.utils import reverse_cuthill_mckee_ordering from networkx.utils import random_state try: - from numpy import ( - array, asarray, dot, ndarray, ones, sqrt, zeros, atleast_2d - ) + from numpy import array, asarray, dot, ndarray, ones, sqrt, zeros, atleast_2d from numpy.linalg import norm, qr from scipy.linalg import eigh, inv from scipy.sparse import csc_matrix, spdiags diff --git a/networkx/linalg/attrmatrix.py b/networkx/linalg/attrmatrix.py index 35fbe548..ae528abc 100644 --- a/networkx/linalg/attrmatrix.py +++ b/networkx/linalg/attrmatrix.py @@ -434,8 +434,9 @@ def attr_sparse_matrix( import numpy as np from scipy import sparse except ImportError as e: - raise ImportError("attr_sparse_matrix() requires scipy: " - "http://scipy.org/ ") from e + raise ImportError( + "attr_sparse_matrix() requires scipy: " "http://scipy.org/ " + ) from e edge_value = _edge_value(G, edge_attr) node_value = _node_value(G, node_attr) diff --git a/networkx/linalg/bethehessianmatrix.py b/networkx/linalg/bethehessianmatrix.py index a772594f..ea999c9b 100644 --- a/networkx/linalg/bethehessianmatrix.py +++ b/networkx/linalg/bethehessianmatrix.py @@ -68,7 +68,8 @@ def bethe_hessian_matrix(G, r=None, nodelist=None): nodelist = list(G) if r is None: r = ( - sum([d ** 2 for v, d in nx.degree(G)]) / sum([d for v, d in nx.degree(G)]) - 1 + sum([d ** 2 for v, d in nx.degree(G)]) / sum([d for v, d in nx.degree(G)]) + - 1 ) A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, format="csr") n, m = A.shape diff --git a/networkx/linalg/graphmatrix.py b/networkx/linalg/graphmatrix.py index c3d6ef92..c1a83e99 100644 --- a/networkx/linalg/graphmatrix.py +++ b/networkx/linalg/graphmatrix.py @@ -75,8 +75,9 @@ def incidence_matrix(G, nodelist=None, edgelist=None, oriented=False, weight=Non ui = node_index[u] vi = node_index[v] except KeyError as e: - raise nx.NetworkXError(f"node {u} or {v} in edgelist " - f"but not in nodelist") from e + raise nx.NetworkXError( + f"node {u} or {v} in edgelist " f"but not in nodelist" + ) from e if weight is None: wt = 1 else: diff --git a/networkx/linalg/tests/test_algebraic_connectivity.py b/networkx/linalg/tests/test_algebraic_connectivity.py index 500b88f9..f9197e89 100644 --- a/networkx/linalg/tests/test_algebraic_connectivity.py +++ b/networkx/linalg/tests/test_algebraic_connectivity.py @@ -37,18 +37,15 @@ def check_eigenvector(A, l, x): class TestAlgebraicConnectivity: - - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_directed(self, method): G = nx.DiGraph() pytest.raises( nx.NetworkXNotImplemented, nx.algebraic_connectivity, G, method=method ) - pytest.raises( - nx.NetworkXNotImplemented, nx.fiedler_vector, G, method=method - ) + pytest.raises(nx.NetworkXNotImplemented, nx.fiedler_vector, G, method=method) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_null_and_singleton(self, method): G = nx.Graph() pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method=method) @@ -57,7 +54,7 @@ class TestAlgebraicConnectivity: pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method=method) pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_disconnected(self, method): G = nx.Graph() G.add_nodes_from(range(2)) @@ -72,18 +69,16 @@ class TestAlgebraicConnectivity: pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method="unknown") pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method="unknown") - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_two_nodes(self, method): G = nx.Graph() G.add_edge(0, 1, weight=1) A = nx.laplacian_matrix(G) - assert almost_equal( - nx.algebraic_connectivity(G, tol=1e-12, method=method), 2 - ) + assert almost_equal(nx.algebraic_connectivity(G, tol=1e-12, method=method), 2) x = nx.fiedler_vector(G, tol=1e-12, method=method) check_eigenvector(A, 2, x) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_two_nodes_multigraph(self, method): G = nx.MultiGraph() G.add_edge(0, 0, spam=1e8) @@ -105,7 +100,7 @@ class TestAlgebraicConnectivity: x = nx.fiedler_vector(G, tol=1e-12, method="tracemin") check_eigenvector(A, sigma, x) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_path(self, method): G = nx.path_graph(8) A = nx.laplacian_matrix(G) @@ -115,7 +110,7 @@ class TestAlgebraicConnectivity: x = nx.fiedler_vector(G, tol=1e-12, method=method) check_eigenvector(A, sigma, x) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_problematic_graph_issue_2381(self, method): G = nx.path_graph(4) G.add_edges_from([(4, 2), (5, 1)]) @@ -126,7 +121,7 @@ class TestAlgebraicConnectivity: x = nx.fiedler_vector(G, tol=1e-12, method=method) check_eigenvector(A, sigma, x) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_cycle(self, method): G = nx.cycle_graph(8) A = nx.laplacian_matrix(G) @@ -136,7 +131,7 @@ class TestAlgebraicConnectivity: x = nx.fiedler_vector(G, tol=1e-12, method=method) check_eigenvector(A, sigma, x) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_seed_argument(self, method): G = nx.cycle_graph(8) A = nx.laplacian_matrix(G) @@ -146,14 +141,15 @@ class TestAlgebraicConnectivity: x = nx.fiedler_vector(G, tol=1e-12, method=method, seed=1) check_eigenvector(A, sigma, x) - @pytest.mark.parametrize(('normalized', 'sigma', 'laplacian_fn'), ( - (False, 0.2434017461399311, nx.laplacian_matrix), - (True, 0.08113391537997749, nx.normalized_laplacian_matrix) - )) - @pytest.mark.parametrize('method', methods) - def test_buckminsterfullerene( - self, normalized, sigma, laplacian_fn, method - ): + @pytest.mark.parametrize( + ("normalized", "sigma", "laplacian_fn"), + ( + (False, 0.2434017461399311, nx.laplacian_matrix), + (True, 0.08113391537997749, nx.normalized_laplacian_matrix), + ), + ) + @pytest.mark.parametrize("method", methods) + def test_buckminsterfullerene(self, normalized, sigma, laplacian_fn, method): G = nx.Graph( [ (1, 10), @@ -256,9 +252,7 @@ class TestAlgebraicConnectivity: ), sigma, ) - x = nx.fiedler_vector( - G, normalized=normalized, tol=1e-12, method=method - ) + x = nx.fiedler_vector(G, normalized=normalized, tol=1e-12, method=method) check_eigenvector(A, sigma, x) except nx.NetworkXError as e: if e.args not in ( @@ -271,12 +265,12 @@ class TestAlgebraicConnectivity: class TestSpectralOrdering: _graphs = (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) - @pytest.mark.parametrize('graph', _graphs) + @pytest.mark.parametrize("graph", _graphs) def test_nullgraph(self, graph): G = graph() pytest.raises(nx.NetworkXError, nx.spectral_ordering, G) - @pytest.mark.parametrize('graph', _graphs) + @pytest.mark.parametrize("graph", _graphs) def test_singleton(self, graph): G = graph() G.add_node("x") @@ -289,7 +283,7 @@ class TestSpectralOrdering: G = nx.path_graph(4) pytest.raises(nx.NetworkXError, nx.spectral_ordering, G, method="unknown") - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_three_nodes(self, method): G = nx.Graph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1)], weight="spam") @@ -297,7 +291,7 @@ class TestSpectralOrdering: assert set(order) == set(G) assert {1, 3} in (set(order[:-1]), set(order[1:])) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_three_nodes_multigraph(self, method): G = nx.MultiDiGraph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1), (2, 3, 2)]) @@ -305,7 +299,7 @@ class TestSpectralOrdering: assert set(order) == set(G) assert {2, 3} in (set(order[:-1]), set(order[1:])) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_path(self, method): # based on setup_class numpy is installed if we get here from numpy.random import shuffle @@ -317,7 +311,7 @@ class TestSpectralOrdering: order = nx.spectral_ordering(G, method=method) assert order in [path, list(reversed(path))] - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_seed_argument(self, method): # based on setup_class numpy is installed if we get here from numpy.random import shuffle @@ -329,7 +323,7 @@ class TestSpectralOrdering: order = nx.spectral_ordering(G, method=method, seed=1) assert order in [path, list(reversed(path))] - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize("method", methods) def test_disconnected(self, method): G = nx.Graph() nx.add_path(G, range(0, 10, 2)) @@ -345,11 +339,14 @@ class TestSpectralOrdering: assert order[:5] in seqs assert order[5:] in seqs - @pytest.mark.parametrize(('normalized', 'expected_order'), ( - (False, [[1, 2, 0, 3, 4, 5, 6, 9, 7, 8], [8, 7, 9, 6, 5, 4, 3, 0, 2, 1]]), - (True, [[1, 2, 3, 0, 4, 5, 9, 6, 7, 8], [8, 7, 6, 9, 5, 4, 0, 3, 2, 1]]), - )) - @pytest.mark.parametrize('method', methods) + @pytest.mark.parametrize( + ("normalized", "expected_order"), + ( + (False, [[1, 2, 0, 3, 4, 5, 6, 9, 7, 8], [8, 7, 9, 6, 5, 4, 3, 0, 2, 1]]), + (True, [[1, 2, 3, 0, 4, 5, 9, 6, 7, 8], [8, 7, 6, 9, 5, 4, 0, 3, 2, 1]]), + ), + ) + @pytest.mark.parametrize("method", methods) def test_cycle(self, normalized, expected_order, method): path = list(range(10)) G = nx.Graph() @@ -357,9 +354,7 @@ class TestSpectralOrdering: G.add_edge(path[-1], path[0], weight=1) A = nx.laplacian_matrix(G).todense() try: - order = nx.spectral_ordering( - G, normalized=normalized, method=method - ) + order = nx.spectral_ordering(G, normalized=normalized, method=method) except nx.NetworkXError as e: if e.args not in ( ("Cholesky solver unavailable.",), @@ -368,4 +363,3 @@ class TestSpectralOrdering: raise else: assert order in expected_order - diff --git a/networkx/linalg/tests/test_attrmatrix.py b/networkx/linalg/tests/test_attrmatrix.py index 1e87f2cb..fb1ea479 100644 --- a/networkx/linalg/tests/test_attrmatrix.py +++ b/networkx/linalg/tests/test_attrmatrix.py @@ -14,13 +14,13 @@ def test_attr_matrix(): G.add_edge(1, 2, thickness=3) def node_attr(u): - return G.nodes[u].get("size", .5) * 3 + return G.nodes[u].get("size", 0.5) * 3 def edge_attr(u, v): - return G[u][v].get("thickness", .5) + return G[u][v].get("thickness", 0.5) M = nx.attr_matrix(G, edge_attr=edge_attr, node_attr=node_attr) - npt.assert_equal(M[0], np.array([[6.]])) + npt.assert_equal(M[0], np.array([[6.0]])) assert M[1] == [1.5] diff --git a/networkx/linalg/tests/test_laplacian.py b/networkx/linalg/tests/test_laplacian.py index 16dac8c6..d7638b83 100644 --- a/networkx/linalg/tests/test_laplacian.py +++ b/networkx/linalg/tests/test_laplacian.py @@ -10,7 +10,6 @@ from networkx.generators.expanders import margulis_gabber_galil_graph class TestLaplacian: - @classmethod def setup_class(cls): deg = [3, 2, 2, 1, 0] @@ -41,7 +40,7 @@ class TestLaplacian: npt.assert_equal(nx.laplacian_matrix(self.MG).todense(), NL) npt.assert_equal( nx.laplacian_matrix(self.G, nodelist=[0, 1]).todense(), - np.array([[1, -1], [-1, 1]]) + np.array([[1, -1], [-1, 1]]), ) npt.assert_equal(nx.laplacian_matrix(self.WG).todense(), WL) npt.assert_equal(nx.laplacian_matrix(self.WG, weight=None).todense(), NL) @@ -68,7 +67,9 @@ class TestLaplacian: # fmt: on npt.assert_almost_equal( - nx.normalized_laplacian_matrix(self.G, nodelist=range(5)).todense(), G, decimal=3 + nx.normalized_laplacian_matrix(self.G, nodelist=range(5)).todense(), + G, + decimal=3, ) npt.assert_almost_equal( nx.normalized_laplacian_matrix(self.G).todense(), GL, decimal=3 @@ -80,7 +81,9 @@ class TestLaplacian: nx.normalized_laplacian_matrix(self.WG).todense(), GL, decimal=3 ) npt.assert_almost_equal( - nx.normalized_laplacian_matrix(self.WG, weight="other").todense(), GL, decimal=3 + nx.normalized_laplacian_matrix(self.WG, weight="other").todense(), + GL, + decimal=3, ) npt.assert_almost_equal( nx.normalized_laplacian_matrix(self.Gsl).todense(), Lsl, decimal=3 @@ -127,7 +130,9 @@ class TestLaplacian: [0., -0.3162, -0.0913, -0.5, 1., -0.25], [-0.3227, 0., 0., -0.5, -0.25, 1.]]) # fmt: on - L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G), walk_type="random") + L = nx.directed_laplacian_matrix( + G, alpha=0.9, nodelist=sorted(G), walk_type="random" + ) npt.assert_almost_equal(L, GL, decimal=3) # fmt: off @@ -138,7 +143,9 @@ class TestLaplacian: [0., -0.1581, -0.0456, -0.25, 0.5, -0.125], [-0.1614, 0., 0., -0.25, -0.125, 0.5]]) # fmt: on - L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G), walk_type="lazy") + L = nx.directed_laplacian_matrix( + G, alpha=0.9, nodelist=sorted(G), walk_type="lazy" + ) npt.assert_almost_equal(L, GL, decimal=3) def test_directed_combinatorial_laplacian(self): diff --git a/networkx/linalg/tests/test_modularity.py b/networkx/linalg/tests/test_modularity.py index 02cf1d64..ccf730fe 100644 --- a/networkx/linalg/tests/test_modularity.py +++ b/networkx/linalg/tests/test_modularity.py @@ -1,4 +1,5 @@ import pytest + np = pytest.importorskip("numpy") npt = pytest.importorskip("numpy.testing") scipy = pytest.importorskip("scipy") @@ -8,7 +9,6 @@ from networkx.generators.degree_seq import havel_hakimi_graph class TestModularity: - @classmethod def setup_class(cls): deg = [3, 2, 2, 1, 0] diff --git a/networkx/readwrite/adjlist.py b/networkx/readwrite/adjlist.py index 911d4bfd..0c9b1083 100644 --- a/networkx/readwrite/adjlist.py +++ b/networkx/readwrite/adjlist.py @@ -22,16 +22,13 @@ adjacency list (anything following the # in a line is a comment):: d e """ -__all__ = ['generate_adjlist', - 'write_adjlist', - 'parse_adjlist', - 'read_adjlist'] +__all__ = ["generate_adjlist", "write_adjlist", "parse_adjlist", "read_adjlist"] from networkx.utils import open_file import networkx as nx -def generate_adjlist(G, delimiter=' '): +def generate_adjlist(G, delimiter=" "): """Generate a single line of the graph G in adjacency list format. Parameters @@ -78,11 +75,11 @@ def generate_adjlist(G, delimiter=' '): line += str(t) + delimiter if not directed: seen.add(s) - yield line[:-len(delimiter)] + yield line[: -len(delimiter)] -@open_file(1, mode='wb') -def write_adjlist(G, path, comments="#", delimiter=' ', encoding='utf-8'): +@open_file(1, mode="wb") +def write_adjlist(G, path, comments="#", delimiter=" ", encoding="utf-8"): """Write graph G in single-line adjacency-list format to path. @@ -124,19 +121,25 @@ def write_adjlist(G, path, comments="#", delimiter=' ', encoding='utf-8'): """ import sys import time - pargs = comments + " ".join(sys.argv) + '\n' - header = (pargs - + comments + f" GMT {time.asctime(time.gmtime())}\n" - + comments + f" {G.name}\n") + + pargs = comments + " ".join(sys.argv) + "\n" + header = ( + pargs + + comments + + f" GMT {time.asctime(time.gmtime())}\n" + + comments + + f" {G.name}\n" + ) path.write(header.encode(encoding)) for line in generate_adjlist(G, delimiter): - line += '\n' + line += "\n" path.write(line.encode(encoding)) -def parse_adjlist(lines, comments='#', delimiter=None, - create_using=None, nodetype=None): +def parse_adjlist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None +): """Parse lines of a graph adjacency list representation. Parameters @@ -195,22 +198,31 @@ def parse_adjlist(lines, comments='#', delimiter=None, try: u = nodetype(u) except BaseException as e: - raise TypeError(f"Failed to convert node ({u}) to type " - f"{nodetype}") from e + raise TypeError( + f"Failed to convert node ({u}) to type " f"{nodetype}" + ) from e G.add_node(u) if nodetype is not None: try: vlist = list(map(nodetype, vlist)) except BaseException as e: - raise TypeError(f"Failed to convert nodes ({','.join(vlist)}) " - f"to type {nodetype}") from e + raise TypeError( + f"Failed to convert nodes ({','.join(vlist)}) " + f"to type {nodetype}" + ) from e G.add_edges_from([(u, v) for v in vlist]) return G -@open_file(0, mode='rb') -def read_adjlist(path, comments="#", delimiter=None, create_using=None, - nodetype=None, encoding='utf-8'): +@open_file(0, mode="rb") +def read_adjlist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + encoding="utf-8", +): """Read graph in adjacency list format from path. Parameters @@ -279,8 +291,10 @@ def read_adjlist(path, comments="#", delimiter=None, create_using=None, write_adjlist """ lines = (line.decode(encoding) for line in path) - return parse_adjlist(lines, - comments=comments, - delimiter=delimiter, - create_using=create_using, - nodetype=nodetype) + return parse_adjlist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + ) diff --git a/networkx/readwrite/edgelist.py b/networkx/readwrite/edgelist.py index bdb8b7ff..72afe1a7 100644 --- a/networkx/readwrite/edgelist.py +++ b/networkx/readwrite/edgelist.py @@ -27,18 +27,20 @@ Arbitrary data:: 1 2 7 green """ -__all__ = ['generate_edgelist', - 'write_edgelist', - 'parse_edgelist', - 'read_edgelist', - 'read_weighted_edgelist', - 'write_weighted_edgelist'] +__all__ = [ + "generate_edgelist", + "write_edgelist", + "parse_edgelist", + "read_edgelist", + "read_weighted_edgelist", + "write_weighted_edgelist", +] from networkx.utils import open_file import networkx as nx -def generate_edgelist(G, delimiter=' ', data=True): +def generate_edgelist(G, delimiter=" ", data=True): """Generate a single line of the graph G in edge list format. Parameters @@ -121,9 +123,8 @@ def generate_edgelist(G, delimiter=' ', data=True): yield delimiter.join(map(str, e)) -@open_file(1, mode='wb') -def write_edgelist(G, path, comments="#", delimiter=' ', data=True, - encoding='utf-8'): +@open_file(1, mode="wb") +def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"): """Write graph as a list of edges. Parameters @@ -168,12 +169,13 @@ def write_edgelist(G, path, comments="#", delimiter=' ', data=True, """ for line in generate_edgelist(G, delimiter, data): - line += '\n' + line += "\n" path.write(line.encode(encoding)) -def parse_edgelist(lines, comments='#', delimiter=None, - create_using=None, nodetype=None, data=True): +def parse_edgelist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True +): """Parse lines of an edge list representation of a graph. Parameters @@ -238,6 +240,7 @@ def parse_edgelist(lines, comments='#', delimiter=None, read_weighted_edgelist """ from ast import literal_eval + G = nx.empty_graph(0, create_using) for line in lines: p = line.find(comments) @@ -257,8 +260,9 @@ def parse_edgelist(lines, comments='#', delimiter=None, u = nodetype(u) v = nodetype(v) except BaseException as e: - raise TypeError(f"Failed to convert nodes {u},{v} " - f"to type {nodetype}.") from e + raise TypeError( + f"Failed to convert nodes {u},{v} " f"to type {nodetype}." + ) from e if len(d) == 0 or data is False: # no data or data type specified @@ -266,29 +270,42 @@ def parse_edgelist(lines, comments='#', delimiter=None, elif data is True: # no edge types specified try: # try to evaluate as dictionary - edgedata = dict(literal_eval(' '.join(d))) + edgedata = dict(literal_eval(" ".join(d))) except BaseException as e: - raise TypeError(f"Failed to convert edge data ({d}) " - f"to dictionary.") from e + raise TypeError( + f"Failed to convert edge data ({d}) " f"to dictionary." + ) from e else: # convert edge data to dictionary with specified keys and type if len(d) != len(data): - raise IndexError(f"Edge data {d} and data_keys {data} are not the same length") + raise IndexError( + f"Edge data {d} and data_keys {data} are not the same length" + ) edgedata = {} for (edge_key, edge_type), edge_value in zip(data, d): try: edge_value = edge_type(edge_value) except BaseException as e: - raise TypeError(f"Failed to convert {edge_key} data {edge_value} " - f"to type {edge_type}.") from e + raise TypeError( + f"Failed to convert {edge_key} data {edge_value} " + f"to type {edge_type}." + ) from e edgedata.update({edge_key: edge_value}) G.add_edge(u, v, **edgedata) return G -@open_file(0, mode='rb') -def read_edgelist(path, comments="#", delimiter=None, create_using=None, - nodetype=None, data=True, edgetype=None, encoding='utf-8'): +@open_file(0, mode="rb") +def read_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + data=True, + edgetype=None, + encoding="utf-8", +): """Read a graph from a list of edges. Parameters @@ -354,13 +371,17 @@ def read_edgelist(path, comments="#", delimiter=None, create_using=None, types (e.g. int, float, str, frozenset - or tuples of those, etc.) """ lines = (line if isinstance(line, str) else line.decode(encoding) for line in path) - return parse_edgelist(lines, comments=comments, delimiter=delimiter, - create_using=create_using, nodetype=nodetype, - data=data) + return parse_edgelist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=data, + ) -def write_weighted_edgelist(G, path, comments="#", - delimiter=' ', encoding='utf-8'): +def write_weighted_edgelist(G, path, comments="#", delimiter=" ", encoding="utf-8"): """Write graph G as a list of edges with numeric weights. Parameters @@ -390,12 +411,24 @@ def write_weighted_edgelist(G, path, comments="#", write_edgelist read_weighted_edgelist """ - write_edgelist(G, path, comments=comments, delimiter=delimiter, - data=('weight',), encoding=encoding) - - -def read_weighted_edgelist(path, comments="#", delimiter=None, - create_using=None, nodetype=None, encoding='utf-8'): + write_edgelist( + G, + path, + comments=comments, + delimiter=delimiter, + data=("weight",), + encoding=encoding, + ) + + +def read_weighted_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + encoding="utf-8", +): """Read a graph as list of edges with numeric weights. Parameters @@ -440,11 +473,12 @@ def read_weighted_edgelist(path, comments="#", delimiter=None, -------- write_weighted_edgelist """ - return read_edgelist(path, - comments=comments, - delimiter=delimiter, - create_using=create_using, - nodetype=nodetype, - data=(('weight', float),), - encoding=encoding - ) + return read_edgelist( + path, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=(("weight", float),), + encoding=encoding, + ) diff --git a/networkx/readwrite/gml.py b/networkx/readwrite/gml.py index 1c3eab12..833665c5 100644 --- a/networkx/readwrite/gml.py +++ b/networkx/readwrite/gml.py @@ -261,6 +261,7 @@ def parse_gml(lines, label="label", destringizer=None): class Pattern(Enum): """ encodes the index of each token-matching pattern in `tokenize`. """ + KEYS = 0 REALS = 1 INTS = 2 @@ -283,6 +284,7 @@ LIST_START_VALUE = "_networkx_list_start" def parse_gml_lines(lines, label, destringizer): """Parse GML `lines` into a graph. """ + def tokenize(): patterns = [ r"[A-Za-z][0-9A-Za-z_]*\b", # keys @@ -315,7 +317,7 @@ def parse_gml_lines(lines, label, destringizer): value = int(group) else: value = group - if i != 6: # comments and whitespaces + if i != 6: # comments and whitespaces yield Token(Pattern(i), value, lineno + 1, pos + 1) pos += len(group) break @@ -673,7 +675,7 @@ def generate_gml(G, stringizer=None): elif value is False: yield indent + key + " 0" # GML only supports signed 32-bit integers - elif value < -2 ** 31 or value >= 2 ** 31: + elif value < -(2 ** 31) or value >= 2 ** 31: yield indent + key + ' "' + str(value) + '"' else: yield indent + key + " " + str(value) @@ -694,11 +696,15 @@ def generate_gml(G, stringizer=None): next_indent = indent + " " for key, value in value.items(): yield from stringize(key, value, (), next_indent) - yield indent + ']' - elif isinstance(value, (list, tuple)) and key != 'label' \ - and value and not in_list: + yield indent + "]" + elif ( + isinstance(value, (list, tuple)) + and key != "label" + and value + and not in_list + ): if len(value) == 1: - yield indent + key + ' ' + f'"{LIST_START_VALUE}"' + yield indent + key + " " + f'"{LIST_START_VALUE}"' for val in value: yield from stringize(key, val, (), indent, True) else: diff --git a/networkx/readwrite/gpickle.py b/networkx/readwrite/gpickle.py index 6d7fa6d2..9f8cd1d5 100644 --- a/networkx/readwrite/gpickle.py +++ b/networkx/readwrite/gpickle.py @@ -21,14 +21,14 @@ Format See https://docs.python.org/3/library/pickle.html """ -__all__ = ['read_gpickle', 'write_gpickle'] +__all__ = ["read_gpickle", "write_gpickle"] from networkx.utils import open_file import pickle -@open_file(1, mode='wb') +@open_file(1, mode="wb") def write_gpickle(G, path, protocol=pickle.HIGHEST_PROTOCOL): """Write graph in Python pickle format. @@ -59,7 +59,7 @@ def write_gpickle(G, path, protocol=pickle.HIGHEST_PROTOCOL): pickle.dump(G, path, protocol) -@open_file(0, mode='rb') +@open_file(0, mode="rb") def read_gpickle(path): """Read graph object in Python pickle format. diff --git a/networkx/readwrite/graph6.py b/networkx/readwrite/graph6.py index 92fc84a8..8111d965 100644 --- a/networkx/readwrite/graph6.py +++ b/networkx/readwrite/graph6.py @@ -16,8 +16,7 @@ import networkx as nx from networkx.exception import NetworkXError from networkx.utils import open_file, not_implemented_for -__all__ = ['from_graph6_bytes', 'read_graph6', 'to_graph6_bytes', - 'write_graph6'] +__all__ = ["from_graph6_bytes", "read_graph6", "to_graph6_bytes", "write_graph6"] def _generate_graph6_bytes(G, nodes, header): @@ -43,10 +42,11 @@ def _generate_graph6_bytes(G, nodes, header): """ n = len(G) if n >= 2 ** 36: - raise ValueError('graph6 is only defined if number of nodes is less ' - 'than 2 ** 36') + raise ValueError( + "graph6 is only defined if number of nodes is less " "than 2 ** 36" + ) if header: - yield b'>>graph6<<' + yield b">>graph6<<" for d in n_to_data(n): yield str.encode(chr(d + 63)) # This generates the same as `(v in G[u] for u, v in combinations(G, 2))`, @@ -57,7 +57,7 @@ def _generate_graph6_bytes(G, nodes, header): d = sum(b << 5 - i for i, b in enumerate(chunk)) yield str.encode(chr(d + 63)) chunk = list(islice(bits, 6)) - yield b'\n' + yield b"\n" def from_graph6_bytes(bytes_in): @@ -97,6 +97,7 @@ def from_graph6_bytes(bytes_in): <http://users.cecs.anu.edu.au/~bdm/data/formats.html> """ + def bits(): """Returns sequence of individual bits from 6-bit-per-value list of data values.""" @@ -104,18 +105,19 @@ def from_graph6_bytes(bytes_in): for i in [5, 4, 3, 2, 1, 0]: yield (d >> i) & 1 - if bytes_in.startswith(b'>>graph6<<'): + if bytes_in.startswith(b">>graph6<<"): bytes_in = bytes_in[10:] data = [c - 63 for c in bytes_in] if any(c > 63 for c in data): - raise ValueError('each input character must be in range(63, 127)') + raise ValueError("each input character must be in range(63, 127)") n, data = data_to_n(data) nd = (n * (n - 1) // 2 + 5) // 6 if len(data) != nd: raise NetworkXError( - f'Expected {n * (n - 1) // 2} bits but got {len(data) * 6} in graph6') + f"Expected {n * (n - 1) // 2} bits but got {len(data) * 6} in graph6" + ) G = nx.Graph() G.add_nodes_from(range(n)) @@ -175,10 +177,10 @@ def to_graph6_bytes(G, nodes=None, header=True): G = G.subgraph(nodes) H = nx.convert_node_labels_to_integers(G) nodes = sorted(H.nodes()) - return b''.join(_generate_graph6_bytes(H, nodes, header)) + return b"".join(_generate_graph6_bytes(H, nodes, header)) -@open_file(0, mode='rb') +@open_file(0, mode="rb") def read_graph6(path): """Read simple undirected graphs in graph6 format from path. @@ -241,9 +243,9 @@ def read_graph6(path): return glist -@not_implemented_for('directed') -@not_implemented_for('multigraph') -@open_file(1, mode='wb') +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@open_file(1, mode="wb") def write_graph6(G, path, nodes=None, header=True): """Write a simple undirected graph to a path in graph6 format. @@ -302,8 +304,8 @@ def write_graph6(G, path, nodes=None, header=True): return write_graph6_file(G, path, nodes=nodes, header=header) -@not_implemented_for('directed') -@not_implemented_for('multigraph') +@not_implemented_for("directed") +@not_implemented_for("multigraph") def write_graph6_file(G, f, nodes=None, header=True): """Write a simple undirected graph to a file-like object in graph6 format. @@ -376,8 +378,15 @@ def data_to_n(data): return data[0], data[1:] if data[1] <= 62: return (data[1] << 12) + (data[2] << 6) + data[3], data[4:] - return ((data[2] << 30) + (data[3] << 24) + (data[4] << 18) + - (data[5] << 12) + (data[6] << 6) + data[7], data[8:]) + return ( + (data[2] << 30) + + (data[3] << 24) + + (data[4] << 18) + + (data[5] << 12) + + (data[6] << 6) + + data[7], + data[8:], + ) def n_to_data(n): @@ -389,8 +398,15 @@ def n_to_data(n): if n <= 62: return [n] elif n <= 258047: - return [63, (n >> 12) & 0x3f, (n >> 6) & 0x3f, n & 0x3f] + return [63, (n >> 12) & 0x3F, (n >> 6) & 0x3F, n & 0x3F] else: # if n <= 68719476735: - return [63, 63, - (n >> 30) & 0x3f, (n >> 24) & 0x3f, (n >> 18) & 0x3f, - (n >> 12) & 0x3f, (n >> 6) & 0x3f, n & 0x3f] + return [ + 63, + 63, + (n >> 30) & 0x3F, + (n >> 24) & 0x3F, + (n >> 18) & 0x3F, + (n >> 12) & 0x3F, + (n >> 6) & 0x3F, + n & 0x3F, + ] diff --git a/networkx/readwrite/graphml.py b/networkx/readwrite/graphml.py index ec2c4c08..df4982c4 100644 --- a/networkx/readwrite/graphml.py +++ b/networkx/readwrite/graphml.py @@ -59,9 +59,15 @@ __all__ = [ ] -@open_file(1, mode='wb') -def write_graphml_xml(G, path, encoding='utf-8', prettyprint=True, - infer_numeric_types=False, named_key_ids=False): +@open_file(1, mode="wb") +def write_graphml_xml( + G, + path, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, +): """Write G in GraphML XML format to path Parameters @@ -92,16 +98,25 @@ def write_graphml_xml(G, path, encoding='utf-8', prettyprint=True, This implementation does not support mixed graphs (directed and unidirected edges together) hyperedges, nested graphs, or ports. """ - writer = GraphMLWriter(encoding=encoding, prettyprint=prettyprint, - infer_numeric_types=infer_numeric_types, - named_key_ids=named_key_ids) + writer = GraphMLWriter( + encoding=encoding, + prettyprint=prettyprint, + infer_numeric_types=infer_numeric_types, + named_key_ids=named_key_ids, + ) writer.add_graph_element(G) writer.dump(path) -@open_file(1, mode='wb') -def write_graphml_lxml(G, path, encoding='utf-8', prettyprint=True, - infer_numeric_types=False, named_key_ids=False): +@open_file(1, mode="wb") +def write_graphml_lxml( + G, + path, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, +): """Write G in GraphML XML format to path This function uses the LXML framework and should be faster than @@ -135,15 +150,18 @@ def write_graphml_lxml(G, path, encoding='utf-8', prettyprint=True, This implementation does not support mixed graphs (directed and unidirected edges together) hyperedges, nested graphs, or ports. """ - writer = GraphMLWriterLxml(path, graph=G, encoding=encoding, - prettyprint=prettyprint, - infer_numeric_types=infer_numeric_types, - named_key_ids=named_key_ids) + writer = GraphMLWriterLxml( + path, + graph=G, + encoding=encoding, + prettyprint=prettyprint, + infer_numeric_types=infer_numeric_types, + named_key_ids=named_key_ids, + ) writer.dump() -def generate_graphml(G, encoding='utf-8', prettyprint=True, - named_key_ids=False): +def generate_graphml(G, encoding="utf-8", prettyprint=True, named_key_ids=False): """Generate GraphML lines for G Parameters @@ -170,13 +188,14 @@ def generate_graphml(G, encoding='utf-8', prettyprint=True, This implementation does not support mixed graphs (directed and unidirected edges together) hyperedges, nested graphs, or ports. """ - writer = GraphMLWriter(encoding=encoding, prettyprint=prettyprint, - named_key_ids=named_key_ids) + writer = GraphMLWriter( + encoding=encoding, prettyprint=prettyprint, named_key_ids=named_key_ids + ) writer.add_graph_element(G) yield from str(writer).splitlines() -@open_file(0, mode='rb') +@open_file(0, mode="rb") def read_graphml(path, node_type=str, edge_key_type=int): """Read graph in GraphML format from path. @@ -237,10 +256,10 @@ def read_graphml(path, node_type=str, edge_key_type=int): header = b'<graphml xmlns="http://graphml.graphdrawing.org/xmlns">' path.seek(0) old_bytes = path.read() - new_bytes = old_bytes.replace(b'<graphml>', header) + new_bytes = old_bytes.replace(b"<graphml>", header) glist = list(reader(string=new_bytes)) if len(glist) == 0: - raise nx.NetworkXError('file not successfully read as graphml') + raise nx.NetworkXError("file not successfully read as graphml") return glist[0] @@ -299,10 +318,10 @@ def parse_graphml(graphml_string, node_type=str): if len(glist) == 0: # If no graph comes back, try looking for an incomplete header header = '<graphml xmlns="http://graphml.graphdrawing.org/xmlns">' - new_string = graphml_string.replace('<graphml>', header) + new_string = graphml_string.replace("<graphml>", header) glist = list(reader(string=new_string)) if len(glist) == 0: - raise nx.NetworkXError('file not successfully read as graphml') + raise nx.NetworkXError("file not successfully read as graphml") return glist[0] @@ -311,15 +330,22 @@ class GraphML: NS_XSI = "http://www.w3.org/2001/XMLSchema-instance" # xmlns:y="http://www.yworks.com/xml/graphml" NS_Y = "http://www.yworks.com/xml/graphml" - SCHEMALOCATION = \ - ' '.join(['http://graphml.graphdrawing.org/xmlns', - 'http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd']) - - types = [(int, "integer"), # for Gephi GraphML bug - (str, "yfiles"), (str, "string"), - (int, "int"), - (float, "float"), (float, "double"), - (bool, "boolean")] + SCHEMALOCATION = " ".join( + [ + "http://graphml.graphdrawing.org/xmlns", + "http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd", + ] + ) + + types = [ + (int, "integer"), # for Gephi GraphML bug + (str, "yfiles"), + (str, "string"), + (int, "int"), + (float, "float"), + (float, "double"), + (bool, "boolean"), + ] # These additions to types allow writing numpy types try: @@ -328,15 +354,24 @@ class GraphML: pass else: # prepend so that python types are created upon read (last entry wins) - types = [(np.float64, "float"), (np.float32, "float"), - (np.float16, "float"), (np.float_, "float"), - (np.int_, "int"), (np.int8, "int"), - (np.int16, "int"), (np.int32, "int"), - (np.int64, "int"), (np.uint8, "int"), - (np.uint16, "int"), (np.uint32, "int"), - (np.uint64, "int"), (np.int_, "int"), - (np.intc, "int"), (np.intp, "int"), - ] + types + types = [ + (np.float64, "float"), + (np.float32, "float"), + (np.float16, "float"), + (np.float_, "float"), + (np.int_, "int"), + (np.int8, "int"), + (np.int16, "int"), + (np.int32, "int"), + (np.int64, "int"), + (np.uint8, "int"), + (np.uint16, "int"), + (np.uint32, "int"), + (np.uint64, "int"), + (np.int_, "int"), + (np.intc, "int"), + (np.intp, "int"), + ] + types xml_type = dict(types) python_type = dict(reversed(a) for a in types) @@ -347,26 +382,39 @@ class GraphML: # http://en.wikibooks.org/wiki/Java_Programming/Literals#Boolean_Literals convert_bool = { # We use data.lower() in actual use. - 'true': True, 'false': False, + "true": True, + "false": False, # Include integer strings for convenience. - '0': False, 0: False, - '1': True, 1: True + "0": False, + 0: False, + "1": True, + 1: True, } class GraphMLWriter(GraphML): - def __init__(self, graph=None, encoding="utf-8", prettyprint=True, - infer_numeric_types=False, named_key_ids=False): + def __init__( + self, + graph=None, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + ): self.myElement = Element self.infer_numeric_types = infer_numeric_types self.prettyprint = prettyprint self.named_key_ids = named_key_ids self.encoding = encoding - self.xml = self.myElement("graphml", - {'xmlns': self.NS_GRAPHML, - 'xmlns:xsi': self.NS_XSI, - 'xsi:schemaLocation': self.SCHEMALOCATION}) + self.xml = self.myElement( + "graphml", + { + "xmlns": self.NS_GRAPHML, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + }, + ) self.keys = {} self.attributes = defaultdict(list) self.attribute_types = defaultdict(set) @@ -416,10 +464,12 @@ class GraphMLWriter(GraphML): new_id = f"d{len(list(self.keys))}" self.keys[keys_key] = new_id - key_kwargs = {"id": new_id, - "for": scope, - "attr.name": name, - "attr.type": attr_type} + key_kwargs = { + "id": new_id, + "for": scope, + "attr.name": name, + "attr.type": attr_type, + } key_element = self.myElement("key", **key_kwargs) # add subelement for data default value if present if default is not None: @@ -429,9 +479,7 @@ class GraphMLWriter(GraphML): self.xml.insert(0, key_element) return new_id - def add_data(self, name, element_type, value, - scope="all", - default=None): + def add_data(self, name, element_type, value, scope="all", default=None): """ Make a data element for an edge or a node. Keep a log of the type in the keys table. @@ -453,7 +501,7 @@ class GraphMLWriter(GraphML): self.attributes[xml_obj].append([k, v, scope, default.get(k)]) def add_nodes(self, G, graph_element): - default = G.graph.get('node_default', {}) + default = G.graph.get("node_default", {}) for node, data in G.nodes(data=True): node_element = self.myElement("node", id=str(node)) self.add_attributes("node", node_element, data, default) @@ -462,17 +510,16 @@ class GraphMLWriter(GraphML): def add_edges(self, G, graph_element): if G.is_multigraph(): for u, v, key, data in G.edges(data=True, keys=True): - edge_element = self.myElement("edge", source=str(u), - target=str(v), - id=str(key)) - default = G.graph.get('edge_default', {}) + edge_element = self.myElement( + "edge", source=str(u), target=str(v), id=str(key) + ) + default = G.graph.get("edge_default", {}) self.add_attributes("edge", edge_element, data, default) graph_element.append(edge_element) else: for u, v, data in G.edges(data=True): - edge_element = self.myElement("edge", source=str(u), - target=str(v)) - default = G.graph.get('edge_default', {}) + edge_element = self.myElement("edge", source=str(u), target=str(v)) + default = G.graph.get("edge_default", {}) self.add_attributes("edge", edge_element, data, default) graph_element.append(edge_element) @@ -481,21 +528,23 @@ class GraphMLWriter(GraphML): Serialize graph G in GraphML to the stream. """ if G.is_directed(): - default_edge_type = 'directed' + default_edge_type = "directed" else: - default_edge_type = 'undirected' + default_edge_type = "undirected" - graphid = G.graph.pop('id', None) + graphid = G.graph.pop("id", None) if graphid is None: - graph_element = self.myElement("graph", - edgedefault=default_edge_type) + graph_element = self.myElement("graph", edgedefault=default_edge_type) else: - graph_element = self.myElement("graph", - edgedefault=default_edge_type, - id=graphid) + graph_element = self.myElement( + "graph", edgedefault=default_edge_type, id=graphid + ) default = {} - data = {k: v for (k, v) in G.graph.items() - if k not in ['node_default', 'edge_default']} + data = { + k: v + for (k, v) in G.graph.items() + if k not in ["node_default", "edge_default"] + } self.add_attributes("graph", graph_element, data, default) self.add_nodes(G, graph_element) self.add_edges(G, graph_element) @@ -506,9 +555,11 @@ class GraphMLWriter(GraphML): # See self.attr_type for (xml_obj, data) in self.attributes.items(): for (k, v, scope, default) in data: - xml_obj.append(self.add_data(str(k), - self.attr_type(k, scope, v), - str(v), scope, default)) + xml_obj.append( + self.add_data( + str(k), self.attr_type(k, scope, v), str(v), scope, default + ) + ) self.xml.append(graph_element) def add_graphs(self, graph_list): @@ -555,8 +606,15 @@ class IncrementalElement: class GraphMLWriterLxml(GraphMLWriter): - def __init__(self, path, graph=None, encoding='utf-8', prettyprint=True, - infer_numeric_types=False, named_key_ids=False): + def __init__( + self, + path, + graph=None, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + ): self.myElement = lxmletree.Element self._encoding = encoding @@ -575,12 +633,13 @@ class GraphMLWriterLxml(GraphMLWriter): self.xml = [] self._keys = self.xml self._graphml = self._xml.element( - 'graphml', + "graphml", { - 'xmlns': self.NS_GRAPHML, - 'xmlns:xsi': self.NS_XSI, - 'xsi:schemaLocation': self.SCHEMALOCATION - }) + "xmlns": self.NS_GRAPHML, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + }, + ) self._graphml.__enter__() self.keys = {} self.attribute_types = defaultdict(set) @@ -593,26 +652,28 @@ class GraphMLWriterLxml(GraphMLWriter): Serialize graph G in GraphML to the stream. """ if G.is_directed(): - default_edge_type = 'directed' + default_edge_type = "directed" else: - default_edge_type = 'undirected' + default_edge_type = "undirected" - graphid = G.graph.pop('id', None) + graphid = G.graph.pop("id", None) if graphid is None: - graph_element = self._xml.element('graph', - edgedefault=default_edge_type) + graph_element = self._xml.element("graph", edgedefault=default_edge_type) else: - graph_element = self._xml.element('graph', - edgedefault=default_edge_type, - id=graphid) + graph_element = self._xml.element( + "graph", edgedefault=default_edge_type, id=graphid + ) # gather attributes types for the whole graph # to find the most general numeric format needed. # Then pass through attributes to create key_id for each. - graphdata = {k: v for k, v in G.graph.items() - if k not in ('node_default', 'edge_default')} - node_default = G.graph.get('node_default', {}) - edge_default = G.graph.get('edge_default', {}) + graphdata = { + k: v + for k, v in G.graph.items() + if k not in ("node_default", "edge_default") + } + node_default = G.graph.get("node_default", {}) + edge_default = G.graph.get("edge_default", {}) # Graph attributes for k, v in graphdata.items(): self.attribute_types[(str(k), "graph")].add(type(v)) @@ -652,16 +713,16 @@ class GraphMLWriterLxml(GraphMLWriter): # The incremental_writer writes each node/edge as it is created incremental_writer = IncrementalElement(self._xml, self._prettyprint) with graph_element: - self.add_attributes('graph', incremental_writer, graphdata, {}) + self.add_attributes("graph", incremental_writer, graphdata, {}) self.add_nodes(G, incremental_writer) # adds attributes too self.add_edges(G, incremental_writer) # adds attributes too def add_attributes(self, scope, xml_obj, data, default): """Appends attribute data.""" for k, v in data.items(): - data_element = self.add_data(str(k), - self.attr_type(str(k), scope, v), - str(v), scope, default.get(k)) + data_element = self.add_data( + str(k), self.attr_type(str(k), scope, v), str(v), scope, default.get(k) + ) xml_obj.append(data_element) def __str__(self): @@ -703,21 +764,21 @@ class GraphMLReader(GraphML): # set default graph type edgedefault = graph_xml.get("edgedefault", None) if G is None: - if edgedefault == 'directed': + if edgedefault == "directed": G = nx.MultiDiGraph() else: G = nx.MultiGraph() # set defaults for graph attributes - G.graph['node_default'] = {} - G.graph['edge_default'] = {} + G.graph["node_default"] = {} + G.graph["edge_default"] = {} for key_id, value in defaults.items(): - key_for = graphml_keys[key_id]['for'] - name = graphml_keys[key_id]['name'] - python_type = graphml_keys[key_id]['type'] - if key_for == 'node': - G.graph['node_default'].update({name: python_type(value)}) - if key_for == 'edge': - G.graph['edge_default'].update({name: python_type(value)}) + key_for = graphml_keys[key_id]["for"] + name = graphml_keys[key_id]["name"] + python_type = graphml_keys[key_id]["type"] + if key_for == "node": + G.graph["node_default"].update({name: python_type(value)}) + if key_for == "edge": + G.graph["edge_default"].update({name: python_type(value)}) # hyperedges are not supported hyperedge = graph_xml.find(f"{{{self.NS_GRAPHML}}}hyperedge") if hyperedge is not None: @@ -738,7 +799,7 @@ class GraphMLReader(GraphML): G = nx.DiGraph(G) else: G = nx.Graph(G) - nx.set_edge_attributes(G, values=self.edge_ids, name='id') + nx.set_edge_attributes(G, values=self.edge_ids, name="id") return G @@ -755,7 +816,7 @@ class GraphMLReader(GraphML): data = self.decode_data_elements(graphml_keys, node_xml) G.add_node(node_id, **data) # get child nodes - if node_xml.attrib.get('yfiles.foldertype') == 'group': + if node_xml.attrib.get("yfiles.foldertype") == "group": graph_xml = node_xml.find(f"{{{self.NS_GRAPHML}}}graph") self.make_graph(graph_xml, graphml_keys, defaults, G) @@ -769,10 +830,10 @@ class GraphMLReader(GraphML): # raise error if we find mixed directed and undirected edges directed = edge_element.get("directed") - if G.is_directed() and directed == 'false': + if G.is_directed() and directed == "false": msg = "directed=false edge found in directed graph." raise nx.NetworkXError(msg) - if (not G.is_directed()) and directed == 'true': + if (not G.is_directed()) and directed == "true": msg = "directed=true edge found in undirected graph." raise nx.NetworkXError(msg) @@ -791,7 +852,7 @@ class GraphMLReader(GraphML): except ValueError: # Could not convert. pass else: - edge_id = data.get('key') + edge_id = data.get("key") if G.has_edge(source, target): # mark this as a multigraph @@ -806,8 +867,8 @@ class GraphMLReader(GraphML): for data_element in obj_xml.findall(f"{{{self.NS_GRAPHML}}}data"): key = data_element.get("key") try: - data_name = graphml_keys[key]['name'] - data_type = graphml_keys[key]['type'] + data_name = graphml_keys[key]["name"] + data_type = graphml_keys[key]["type"] except KeyError as e: raise nx.NetworkXError(f"Bad GraphML data: no key {key}") from e text = data_element.text @@ -823,27 +884,32 @@ class GraphMLReader(GraphML): elif len(list(data_element)) > 0: # Assume yfiles as subelements, try to extract node_label node_label = None - for node_type in ['ShapeNode', 'SVGNode', 'ImageNode']: + for node_type in ["ShapeNode", "SVGNode", "ImageNode"]: pref = f"{{{self.NS_Y}}}{node_type}/{{{self.NS_Y}}}" geometry = data_element.find(f"{pref}Geometry") if geometry is not None: - data['x'] = geometry.get('x') - data['y'] = geometry.get('y') + data["x"] = geometry.get("x") + data["y"] = geometry.get("y") if node_label is None: node_label = data_element.find(f"{pref}NodeLabel") if node_label is not None: - data['label'] = node_label.text + data["label"] = node_label.text # check all the different types of edges avaivable in yEd. - for e in ['PolyLineEdge', 'SplineEdge', 'QuadCurveEdge', - 'BezierEdge', 'ArcEdge']: + for e in [ + "PolyLineEdge", + "SplineEdge", + "QuadCurveEdge", + "BezierEdge", + "ArcEdge", + ]: pref = f"{{{self.NS_Y}}}{e}/{{{self.NS_Y}}}" edge_label = data_element.find(f"{pref}EdgeLabel") if edge_label is not None: break if edge_label is not None: - data['label'] = edge_label.text + data["label"] = edge_label.text return data def find_graphml_keys(self, graph_element): @@ -853,20 +919,22 @@ class GraphMLReader(GraphML): graphml_key_defaults = {} for k in graph_element.findall(f"{{{self.NS_GRAPHML}}}key"): attr_id = k.get("id") - attr_type = k.get('attr.type') + attr_type = k.get("attr.type") attr_name = k.get("attr.name") yfiles_type = k.get("yfiles.type") if yfiles_type is not None: attr_name = yfiles_type - attr_type = 'yfiles' + attr_type = "yfiles" if attr_type is None: attr_type = "string" warnings.warn(f"No key type for id {attr_id}. Using string") if attr_name is None: raise nx.NetworkXError(f"Unknown key for id {attr_id}.") - graphml_keys[attr_id] = {"name": attr_name, - "type": self.python_type[attr_type], - "for": k.get("for")} + graphml_keys[attr_id] = { + "name": attr_name, + "type": self.python_type[attr_type], + "for": k.get("for"), + } # check for "default" subelement of key element default = k.find(f"{{{self.NS_GRAPHML}}}default") if default is not None: diff --git a/networkx/readwrite/json_graph/adjacency.py b/networkx/readwrite/json_graph/adjacency.py index f4a42d2a..ec6d21a4 100644 --- a/networkx/readwrite/json_graph/adjacency.py +++ b/networkx/readwrite/json_graph/adjacency.py @@ -1,9 +1,9 @@ from itertools import chain import networkx as nx -__all__ = ['adjacency_data', 'adjacency_graph'] +__all__ = ["adjacency_data", "adjacency_graph"] -_attrs = dict(id='id', key='key') +_attrs = dict(id="id", key="key") def adjacency_data(G, attrs=_attrs): @@ -57,19 +57,19 @@ def adjacency_data(G, attrs=_attrs): adjacency_graph, node_link_data, tree_data """ multigraph = G.is_multigraph() - id_ = attrs['id'] + id_ = attrs["id"] # Allow 'key' to be omitted from attrs if the graph is not a multigraph. - key = None if not multigraph else attrs['key'] + key = None if not multigraph else attrs["key"] if id_ == key: - raise nx.NetworkXError('Attribute names are not unique.') + raise nx.NetworkXError("Attribute names are not unique.") data = {} - data['directed'] = G.is_directed() - data['multigraph'] = multigraph - data['graph'] = list(G.graph.items()) - data['nodes'] = [] - data['adjacency'] = [] + data["directed"] = G.is_directed() + data["multigraph"] = multigraph + data["graph"] = list(G.graph.items()) + data["nodes"] = [] + data["adjacency"] = [] for n, nbrdict in G.adjacency(): - data['nodes'].append(dict(chain(G.nodes[n].items(), [(id_, n)]))) + data["nodes"].append(dict(chain(G.nodes[n].items(), [(id_, n)]))) adj = [] if multigraph: for nbr, keys in nbrdict.items(): @@ -78,7 +78,7 @@ def adjacency_data(G, attrs=_attrs): else: for nbr, d in nbrdict.items(): adj.append(dict(chain(d.items(), [(id_, nbr)]))) - data['adjacency'].append(adj) + data["adjacency"].append(adj) return data @@ -122,26 +122,26 @@ def adjacency_graph(data, directed=False, multigraph=True, attrs=_attrs): -------- adjacency_graph, node_link_data, tree_data """ - multigraph = data.get('multigraph', multigraph) - directed = data.get('directed', directed) + multigraph = data.get("multigraph", multigraph) + directed = data.get("directed", directed) if multigraph: graph = nx.MultiGraph() else: graph = nx.Graph() if directed: graph = graph.to_directed() - id_ = attrs['id'] + id_ = attrs["id"] # Allow 'key' to be omitted from attrs if the graph is not a multigraph. - key = None if not multigraph else attrs['key'] - graph.graph = dict(data.get('graph', [])) + key = None if not multigraph else attrs["key"] + graph.graph = dict(data.get("graph", [])) mapping = [] - for d in data['nodes']: + for d in data["nodes"]: node_data = d.copy() node = node_data.pop(id_) mapping.append(node) graph.add_node(node) graph.nodes[node].update(node_data) - for i, d in enumerate(data['adjacency']): + for i, d in enumerate(data["adjacency"]): source = mapping[i] for tdata in d: target_data = tdata.copy() diff --git a/networkx/readwrite/json_graph/cytoscape.py b/networkx/readwrite/json_graph/cytoscape.py index 202d6a03..1a6f0c06 100644 --- a/networkx/readwrite/json_graph/cytoscape.py +++ b/networkx/readwrite/json_graph/cytoscape.py @@ -1,8 +1,8 @@ import networkx as nx -__all__ = ['cytoscape_data', 'cytoscape_graph'] +__all__ = ["cytoscape_data", "cytoscape_graph"] -_attrs = dict(name='name', ident='id') +_attrs = dict(name="name", ident="id") def cytoscape_data(G, attrs=None): @@ -31,11 +31,11 @@ def cytoscape_data(G, attrs=None): ident = attrs["ident"] if len({name, ident}) < 2: - raise nx.NetworkXError('Attribute names are not unique.') + raise nx.NetworkXError("Attribute names are not unique.") jsondata = {"data": list(G.graph.items())} - jsondata['directed'] = G.is_directed() - jsondata['multigraph'] = G.is_multigraph() + jsondata["directed"] = G.is_directed() + jsondata["multigraph"] = G.is_multigraph() jsondata["elements"] = {"nodes": [], "edges": []} nodes = jsondata["elements"]["nodes"] edges = jsondata["elements"]["edges"] @@ -73,17 +73,17 @@ def cytoscape_graph(data, attrs=None): ident = attrs["ident"] if len({ident, name}) < 2: - raise nx.NetworkXError('Attribute names are not unique.') + raise nx.NetworkXError("Attribute names are not unique.") - multigraph = data.get('multigraph') - directed = data.get('directed') + multigraph = data.get("multigraph") + directed = data.get("directed") if multigraph: graph = nx.MultiGraph() else: graph = nx.Graph() if directed: graph = graph.to_directed() - graph.graph = dict(data.get('data')) + graph.graph = dict(data.get("data")) for d in data["elements"]["nodes"]: node_data = d["data"].copy() node = d["data"]["value"] diff --git a/networkx/readwrite/json_graph/jit.py b/networkx/readwrite/json_graph/jit.py index f404003c..ccef18b6 100644 --- a/networkx/readwrite/json_graph/jit.py +++ b/networkx/readwrite/json_graph/jit.py @@ -30,7 +30,7 @@ import json import networkx as nx from networkx.utils.decorators import not_implemented_for -__all__ = ['jit_graph', 'jit_data'] +__all__ = ["jit_graph", "jit_data"] def jit_graph(data, create_using=None): @@ -57,14 +57,14 @@ def jit_graph(data, create_using=None): data = json.loads(data) for node in data: - G.add_node(node['id'], **node['data']) - if node.get('adjacencies') is not None: - for adj in node['adjacencies']: - G.add_edge(node['id'], adj['nodeTo'], **adj['data']) + G.add_node(node["id"], **node["data"]) + if node.get("adjacencies") is not None: + for adj in node["adjacencies"]: + G.add_edge(node["id"], adj["nodeTo"], **adj["data"]) return G -@not_implemented_for('multigraph') +@not_implemented_for("multigraph") def jit_data(G, indent=None, default=None): """Returns data in JIT JSON format. @@ -88,10 +88,7 @@ def jit_data(G, indent=None, default=None): """ json_graph = [] for node in G.nodes(): - json_node = { - "id": node, - "name": node - } + json_node = {"id": node, "name": node} # node data json_node["data"] = G.nodes[node] # adjacencies diff --git a/networkx/readwrite/json_graph/node_link.py b/networkx/readwrite/json_graph/node_link.py index 721f0034..42c4d07f 100644 --- a/networkx/readwrite/json_graph/node_link.py +++ b/networkx/readwrite/json_graph/node_link.py @@ -1,11 +1,11 @@ from itertools import chain, count import networkx as nx from networkx.utils import to_tuple -__all__ = ['node_link_data', 'node_link_graph'] +__all__ = ["node_link_data", "node_link_graph"] -_attrs = dict(source='source', target='target', name='id', - key='key', link='links') + +_attrs = dict(source="source", target="target", name="id", key="key", link="links") def node_link_data(G, attrs=None): @@ -69,26 +69,30 @@ def node_link_data(G, attrs=None): attrs = _attrs else: attrs.update({k: v for (k, v) in _attrs.items() if k not in attrs}) - name = attrs['name'] - source = attrs['source'] - target = attrs['target'] - links = attrs['link'] + name = attrs["name"] + source = attrs["source"] + target = attrs["target"] + links = attrs["link"] # Allow 'key' to be omitted from attrs if the graph is not a multigraph. - key = None if not multigraph else attrs['key'] + key = None if not multigraph else attrs["key"] if len({source, target, key}) < 3: - raise nx.NetworkXError('Attribute names are not unique.') - data = {'directed': G.is_directed(), 'multigraph': multigraph, 'graph': G.graph, - 'nodes': [dict(chain(G.nodes[n].items(), [(name, n)])) for n in G]} + raise nx.NetworkXError("Attribute names are not unique.") + data = { + "directed": G.is_directed(), + "multigraph": multigraph, + "graph": G.graph, + "nodes": [dict(chain(G.nodes[n].items(), [(name, n)])) for n in G], + } if multigraph: data[links] = [ - dict(chain(d.items(), - [(source, u), (target, v), (key, k)])) - for u, v, k, d in G.edges(keys=True, data=True)] + dict(chain(d.items(), [(source, u), (target, v), (key, k)])) + for u, v, k, d in G.edges(keys=True, data=True) + ] else: data[links] = [ - dict(chain(d.items(), - [(source, u), (target, v)])) - for u, v, d in G.edges(data=True)] + dict(chain(d.items(), [(source, u), (target, v)])) + for u, v, d in G.edges(data=True) + ] return data @@ -139,23 +143,23 @@ def node_link_graph(data, directed=False, multigraph=True, attrs=None): attrs = _attrs else: attrs.update({k: v for k, v in _attrs.items() if k not in attrs}) - multigraph = data.get('multigraph', multigraph) - directed = data.get('directed', directed) + multigraph = data.get("multigraph", multigraph) + directed = data.get("directed", directed) if multigraph: graph = nx.MultiGraph() else: graph = nx.Graph() if directed: graph = graph.to_directed() - name = attrs['name'] - source = attrs['source'] - target = attrs['target'] - links = attrs['link'] + name = attrs["name"] + source = attrs["source"] + target = attrs["target"] + links = attrs["link"] # Allow 'key' to be omitted from attrs if the graph is not a multigraph. - key = None if not multigraph else attrs['key'] - graph.graph = data.get('graph', {}) + key = None if not multigraph else attrs["key"] + graph.graph = data.get("graph", {}) c = count() - for d in data['nodes']: + for d in data["nodes"]: node = to_tuple(d.get(name, next(c))) nodedata = {str(k): v for k, v in d.items() if k != name} graph.add_node(node, **nodedata) @@ -163,12 +167,14 @@ def node_link_graph(data, directed=False, multigraph=True, attrs=None): src = tuple(d[source]) if isinstance(d[source], list) else d[source] tgt = tuple(d[target]) if isinstance(d[target], list) else d[target] if not multigraph: - edgedata = {str(k): v for k, v in d.items() - if k != source and k != target} + edgedata = {str(k): v for k, v in d.items() if k != source and k != target} graph.add_edge(src, tgt, **edgedata) else: ky = d.get(key, None) - edgedata = {str(k): v for k, v in d.items() - if k != source and k != target and k != key} + edgedata = { + str(k): v + for k, v in d.items() + if k != source and k != target and k != key + } graph.add_edge(src, tgt, ky, **edgedata) return graph diff --git a/networkx/readwrite/json_graph/tests/test_adjacency.py b/networkx/readwrite/json_graph/tests/test_adjacency.py index 08bbb5fe..57a2a6b1 100644 --- a/networkx/readwrite/json_graph/tests/test_adjacency.py +++ b/networkx/readwrite/json_graph/tests/test_adjacency.py @@ -5,7 +5,6 @@ from networkx.readwrite.json_graph import adjacency_data, adjacency_graph class TestAdjacency: - def test_graph(self): G = nx.path_graph(4) H = adjacency_graph(adjacency_data(G)) @@ -13,22 +12,22 @@ class TestAdjacency: def test_graph_attributes(self): G = nx.path_graph(4) - G.add_node(1, color='red') + G.add_node(1, color="red") G.add_edge(1, 2, width=7) - G.graph['foo'] = 'bar' - G.graph[1] = 'one' + G.graph["foo"] = "bar" + G.graph[1] = "one" H = adjacency_graph(adjacency_data(G)) - assert H.graph['foo'] == 'bar' - assert H.nodes[1]['color'] == 'red' - assert H[1][2]['width'] == 7 + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 d = json.dumps(adjacency_data(G)) H = adjacency_graph(json.loads(d)) - assert H.graph['foo'] == 'bar' - assert H.graph[1] == 'one' - assert H.nodes[1]['color'] == 'red' - assert H[1][2]['width'] == 7 + assert H.graph["foo"] == "bar" + assert H.graph[1] == "one" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 def test_digraph(self): G = nx.DiGraph() @@ -46,14 +45,14 @@ class TestAdjacency: def test_multigraph(self): G = nx.MultiGraph() - G.add_edge(1, 2, key='first') - G.add_edge(1, 2, key='second', color='blue') + G.add_edge(1, 2, key="first") + G.add_edge(1, 2, key="second", color="blue") H = adjacency_graph(adjacency_data(G)) nx.is_isomorphic(G, H) - assert H[1][2]['second']['color'] == 'blue' + assert H[1][2]["second"]["color"] == "blue" def test_exception(self): with pytest.raises(nx.NetworkXError): G = nx.MultiDiGraph() - attrs = dict(id='node', key='node') + attrs = dict(id="node", key="node") adjacency_data(G, attrs) diff --git a/networkx/readwrite/json_graph/tests/test_cytoscape.py b/networkx/readwrite/json_graph/tests/test_cytoscape.py index 16c917d9..ee4799fb 100644 --- a/networkx/readwrite/json_graph/tests/test_cytoscape.py +++ b/networkx/readwrite/json_graph/tests/test_cytoscape.py @@ -5,7 +5,6 @@ from networkx.readwrite.json_graph import cytoscape_data, cytoscape_graph class TestCytoscape: - def test_graph(self): G = nx.path_graph(4) H = cytoscape_graph(cytoscape_data(G)) @@ -13,27 +12,27 @@ class TestCytoscape: def test_graph_attributes(self): G = nx.path_graph(4) - G.add_node(1, color='red') + G.add_node(1, color="red") G.add_edge(1, 2, width=7) - G.graph['foo'] = 'bar' - G.graph[1] = 'one' + G.graph["foo"] = "bar" + G.graph[1] = "one" G.add_node(3, name="node", id="123") H = cytoscape_graph(cytoscape_data(G)) - assert H.graph['foo'] == 'bar' - assert H.nodes[1]['color'] == 'red' - assert H[1][2]['width'] == 7 - assert H.nodes[3]['name'] == 'node' - assert H.nodes[3]['id'] == '123' + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + assert H.nodes[3]["name"] == "node" + assert H.nodes[3]["id"] == "123" d = json.dumps(cytoscape_data(G)) H = cytoscape_graph(json.loads(d)) - assert H.graph['foo'] == 'bar' - assert H.graph[1] == 'one' - assert H.nodes[1]['color'] == 'red' - assert H[1][2]['width'] == 7 - assert H.nodes[3]['name'] == 'node' - assert H.nodes[3]['id'] == '123' + assert H.graph["foo"] == "bar" + assert H.graph[1] == "one" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + assert H.nodes[3]["name"] == "node" + assert H.nodes[3]["id"] == "123" def test_digraph(self): G = nx.DiGraph() @@ -51,14 +50,14 @@ class TestCytoscape: def test_multigraph(self): G = nx.MultiGraph() - G.add_edge(1, 2, key='first') - G.add_edge(1, 2, key='second', color='blue') + G.add_edge(1, 2, key="first") + G.add_edge(1, 2, key="second", color="blue") H = cytoscape_graph(cytoscape_data(G)) assert nx.is_isomorphic(G, H) - assert H[1][2]['second']['color'] == 'blue' + assert H[1][2]["second"]["color"] == "blue" def test_exception(self): with pytest.raises(nx.NetworkXError): G = nx.MultiDiGraph() - attrs = dict(name='node', ident='node') + attrs = dict(name="node", ident="node") cytoscape_data(G, attrs) diff --git a/networkx/readwrite/json_graph/tests/test_jit.py b/networkx/readwrite/json_graph/tests/test_jit.py index a251242f..9a2ef682 100644 --- a/networkx/readwrite/json_graph/tests/test_jit.py +++ b/networkx/readwrite/json_graph/tests/test_jit.py @@ -7,12 +7,12 @@ from networkx.readwrite.json_graph import jit_data, jit_graph class TestJIT: def test_jit(self): G = nx.Graph() - G.add_node('Node1', node_data='foobar') - G.add_node('Node3', node_data='bar') - G.add_node('Node4') - G.add_edge('Node1', 'Node2', weight=9, something='isSomething') - G.add_edge('Node2', 'Node3', weight=4, something='isNotSomething') - G.add_edge('Node1', 'Node2') + G.add_node("Node1", node_data="foobar") + G.add_node("Node3", node_data="bar") + G.add_node("Node4") + G.add_edge("Node1", "Node2", weight=9, something="isSomething") + G.add_edge("Node2", "Node3", weight=4, something="isNotSomething") + G.add_edge("Node1", "Node2") d = jit_data(G) K = jit_graph(json.loads(d)) assert nx.is_isomorphic(G, K) diff --git a/networkx/readwrite/json_graph/tests/test_node_link.py b/networkx/readwrite/json_graph/tests/test_node_link.py index 75fe142e..e5773d26 100644 --- a/networkx/readwrite/json_graph/tests/test_node_link.py +++ b/networkx/readwrite/json_graph/tests/test_node_link.py @@ -5,7 +5,6 @@ from networkx.readwrite.json_graph import node_link_data, node_link_graph class TestNodeLink: - def test_graph(self): G = nx.path_graph(4) H = node_link_graph(node_link_data(G)) @@ -13,22 +12,22 @@ class TestNodeLink: def test_graph_attributes(self): G = nx.path_graph(4) - G.add_node(1, color='red') + G.add_node(1, color="red") G.add_edge(1, 2, width=7) - G.graph[1] = 'one' - G.graph['foo'] = 'bar' + G.graph[1] = "one" + G.graph["foo"] = "bar" H = node_link_graph(node_link_data(G)) - assert H.graph['foo'] == 'bar' - assert H.nodes[1]['color'] == 'red' - assert H[1][2]['width'] == 7 + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 d = json.dumps(node_link_data(G)) H = node_link_graph(json.loads(d)) - assert H.graph['foo'] == 'bar' - assert H.graph['1'] == 'one' - assert H.nodes[1]['color'] == 'red' - assert H[1][2]['width'] == 7 + assert H.graph["foo"] == "bar" + assert H.graph["1"] == "one" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 def test_digraph(self): G = nx.DiGraph() @@ -37,11 +36,11 @@ class TestNodeLink: def test_multigraph(self): G = nx.MultiGraph() - G.add_edge(1, 2, key='first') - G.add_edge(1, 2, key='second', color='blue') + G.add_edge(1, 2, key="first") + G.add_edge(1, 2, key="second", color="blue") H = node_link_graph(node_link_data(G)) nx.is_isomorphic(G, H) - assert H[1][2]['second']['color'] == 'blue' + assert H[1][2]["second"]["color"] == "blue" def test_graph_with_tuple_nodes(self): G = nx.Graph() @@ -51,7 +50,7 @@ class TestNodeLink: dd = json.loads(dumped_d) H = node_link_graph(dd) assert H.nodes[(0, 0)] == G.nodes[(0, 0)] - assert H[(0, 0)][(1, 0)]['color'] == [255, 255, 0] + assert H[(0, 0)][(1, 0)]["color"] == [255, 255, 0] def test_unicode_keys(self): q = "qualité" @@ -66,32 +65,40 @@ class TestNodeLink: def test_exception(self): with pytest.raises(nx.NetworkXError): G = nx.MultiDiGraph() - attrs = dict(name='node', source='node', target='node', key='node') + attrs = dict(name="node", source="node", target="node", key="node") node_link_data(G, attrs) def test_string_ids(self): q = "qualité" G = nx.DiGraph() - G.add_node('A') + G.add_node("A") G.add_node(q) - G.add_edge('A', q) + G.add_edge("A", q) data = node_link_data(G) - assert data['links'][0]['source'] == 'A' - assert data['links'][0]['target'] == q + assert data["links"][0]["source"] == "A" + assert data["links"][0]["target"] == q H = node_link_graph(data) assert nx.is_isomorphic(G, H) def test_custom_attrs(self): G = nx.path_graph(4) - G.add_node(1, color='red') + G.add_node(1, color="red") G.add_edge(1, 2, width=7) - G.graph[1] = 'one' - G.graph['foo'] = 'bar' + G.graph[1] = "one" + G.graph["foo"] = "bar" - attrs = dict(source='c_source', target='c_target', name='c_id', key='c_key', link='c_links') + attrs = dict( + source="c_source", + target="c_target", + name="c_id", + key="c_key", + link="c_links", + ) - H = node_link_graph(node_link_data(G, attrs=attrs), multigraph=False, attrs=attrs) + H = node_link_graph( + node_link_data(G, attrs=attrs), multigraph=False, attrs=attrs + ) assert nx.is_isomorphic(G, H) - assert H.graph['foo'] == 'bar' - assert H.nodes[1]['color'] == 'red' - assert H[1][2]['width'] == 7 + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 diff --git a/networkx/readwrite/json_graph/tests/test_tree.py b/networkx/readwrite/json_graph/tests/test_tree.py index 5decfaf3..8deda52b 100644 --- a/networkx/readwrite/json_graph/tests/test_tree.py +++ b/networkx/readwrite/json_graph/tests/test_tree.py @@ -5,10 +5,9 @@ from networkx.readwrite.json_graph import tree_data, tree_graph class TestTree: - def test_graph(self): G = nx.DiGraph() - G.add_nodes_from([1, 2, 3], color='red') + G.add_nodes_from([1, 2, 3], color="red") G.add_edge(1, 2, foo=7) G.add_edge(1, 3, foo=10) G.add_edge(3, 4, foo=10) @@ -17,20 +16,20 @@ class TestTree: def test_graph_attributes(self): G = nx.DiGraph() - G.add_nodes_from([1, 2, 3], color='red') + G.add_nodes_from([1, 2, 3], color="red") G.add_edge(1, 2, foo=7) G.add_edge(1, 3, foo=10) G.add_edge(3, 4, foo=10) H = tree_graph(tree_data(G, 1)) - assert H.nodes[1]['color'] == 'red' + assert H.nodes[1]["color"] == "red" d = json.dumps(tree_data(G, 1)) H = tree_graph(json.loads(d)) - assert H.nodes[1]['color'] == 'red' + assert H.nodes[1]["color"] == "red" def test_exception(self): with pytest.raises(nx.NetworkXError): G = nx.MultiDiGraph() G.add_node(0) - attrs = dict(id='node', children='node') + attrs = dict(id="node", children="node") tree_data(G, 0, attrs) diff --git a/networkx/readwrite/json_graph/tree.py b/networkx/readwrite/json_graph/tree.py index 70ca186f..30ef1b53 100644 --- a/networkx/readwrite/json_graph/tree.py +++ b/networkx/readwrite/json_graph/tree.py @@ -1,9 +1,9 @@ from itertools import chain import networkx as nx -__all__ = ['tree_data', 'tree_graph'] +__all__ = ["tree_data", "tree_graph"] -_attrs = dict(id='id', children='children') +_attrs = dict(id="id", children="children") def tree_data(G, root, attrs=_attrs): @@ -66,10 +66,10 @@ def tree_data(G, root, attrs=_attrs): if not G.is_directed(): raise TypeError("G is not directed.") - id_ = attrs['id'] - children = attrs['children'] + id_ = attrs["id"] + children = attrs["children"] if id_ == children: - raise nx.NetworkXError('Attribute names are not unique.') + raise nx.NetworkXError("Attribute names are not unique.") def add_children(n, G): nbrs = G[n] @@ -123,8 +123,8 @@ def tree_graph(data, attrs=_attrs): tree_graph, node_link_data, adjacency_data """ graph = nx.DiGraph() - id_ = attrs['id'] - children = attrs['children'] + id_ = attrs["id"] + children = attrs["children"] def add_children(parent, children_): for data in children_: @@ -133,14 +133,14 @@ def tree_graph(data, attrs=_attrs): grandchildren = data.get(children, []) if grandchildren: add_children(child, grandchildren) - nodedata = {str(k): v for k, v in data.items() - if k != id_ and k != children} + nodedata = { + str(k): v for k, v in data.items() if k != id_ and k != children + } graph.add_node(child, **nodedata) root = data[id_] children_ = data.get(children, []) - nodedata = {str(k): v for k, v in data.items() - if k != id_ and k != children} + nodedata = {str(k): v for k, v in data.items() if k != id_ and k != children} graph.add_node(root, **nodedata) add_children(root, children_) return graph diff --git a/networkx/readwrite/leda.py b/networkx/readwrite/leda.py index 9a261c82..a9b353c5 100644 --- a/networkx/readwrite/leda.py +++ b/networkx/readwrite/leda.py @@ -11,15 +11,15 @@ See http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fi # Original author: D. Eppstein, UC Irvine, August 12, 2003. # The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. -__all__ = ['read_leda', 'parse_leda'] +__all__ = ["read_leda", "parse_leda"] import networkx as nx from networkx.exception import NetworkXError from networkx.utils import open_file -@open_file(0, mode='rb') -def read_leda(path, encoding='UTF-8'): +@open_file(0, mode="rb") +def read_leda(path, encoding="UTF-8"): """Read graph in LEDA format from path. Parameters @@ -66,9 +66,14 @@ def parse_leda(lines): .. [1] http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html """ if isinstance(lines, str): - lines = iter(lines.split('\n')) - lines = iter([line.rstrip('\n') for line in lines - if not (line.startswith('#') or line.startswith('\n') or line == '')]) + lines = iter(lines.split("\n")) + lines = iter( + [ + line.rstrip("\n") + for line in lines + if not (line.startswith("#") or line.startswith("\n") or line == "") + ] + ) for i in range(3): next(lines) # Graph @@ -82,7 +87,7 @@ def parse_leda(lines): n = int(next(lines)) # number of nodes node = {} for i in range(1, n + 1): # LEDA counts from 1 to n - symbol = next(lines).rstrip().strip('|{}| ') + symbol = next(lines).rstrip().strip("|{}| ") if symbol == "": symbol = str(i) # use int if no label - could be trouble node[i] = symbol @@ -95,7 +100,7 @@ def parse_leda(lines): try: s, t, reversal, label = next(lines).split() except BaseException as e: - raise NetworkXError(f'Too few fields in LEDA.GRAPH edge {i+1}') from e + raise NetworkXError(f"Too few fields in LEDA.GRAPH edge {i+1}") from e # BEWARE: no handling of reversal edges G.add_edge(node[int(s)], node[int(t)], label=label[2:-2]) return G diff --git a/networkx/readwrite/multiline_adjlist.py b/networkx/readwrite/multiline_adjlist.py index 702ec832..fb558c01 100644 --- a/networkx/readwrite/multiline_adjlist.py +++ b/networkx/readwrite/multiline_adjlist.py @@ -25,16 +25,18 @@ adjacency list (anything following the # in a line is a comment):: e """ -__all__ = ['generate_multiline_adjlist', - 'write_multiline_adjlist', - 'parse_multiline_adjlist', - 'read_multiline_adjlist'] +__all__ = [ + "generate_multiline_adjlist", + "write_multiline_adjlist", + "parse_multiline_adjlist", + "read_multiline_adjlist", +] from networkx.utils import open_file import networkx as nx -def generate_multiline_adjlist(G, delimiter=' '): +def generate_multiline_adjlist(G, delimiter=" "): """Generate a single line of the graph G in multiline adjacency list format. Parameters @@ -78,9 +80,11 @@ def generate_multiline_adjlist(G, delimiter=' '): if G.is_directed(): if G.is_multigraph(): for s, nbrs in G.adjacency(): - nbr_edges = [(u, data) - for u, datadict in nbrs.items() - for key, data in datadict.items()] + nbr_edges = [ + (u, data) + for u, datadict in nbrs.items() + for key, data in datadict.items() + ] deg = len(nbr_edges) yield str(s) + delimiter + str(deg) for u, d in nbr_edges: @@ -101,10 +105,12 @@ def generate_multiline_adjlist(G, delimiter=' '): if G.is_multigraph(): seen = set() # helper dict used to avoid duplicate edges for s, nbrs in G.adjacency(): - nbr_edges = [(u, data) - for u, datadict in nbrs.items() - if u not in seen - for key, data in datadict.items()] + nbr_edges = [ + (u, data) + for u, datadict in nbrs.items() + if u not in seen + for key, data in datadict.items() + ] deg = len(nbr_edges) yield str(s) + delimiter + str(deg) for u, d in nbr_edges: @@ -127,9 +133,8 @@ def generate_multiline_adjlist(G, delimiter=' '): seen.add(s) -@open_file(1, mode='wb') -def write_multiline_adjlist(G, path, delimiter=' ', - comments='#', encoding='utf-8'): +@open_file(1, mode="wb") +def write_multiline_adjlist(G, path, delimiter=" ", comments="#", encoding="utf-8"): """ Write the graph G in multiline adjacency list format to path Parameters @@ -168,19 +173,23 @@ def write_multiline_adjlist(G, path, delimiter=' ', import time pargs = comments + " ".join(sys.argv) - header = (f"{pargs}\n" - + comments + f" GMT {time.asctime(time.gmtime())}\n" - + comments + f" {G.name}\n") + header = ( + f"{pargs}\n" + + comments + + f" GMT {time.asctime(time.gmtime())}\n" + + comments + + f" {G.name}\n" + ) path.write(header.encode(encoding)) for multiline in generate_multiline_adjlist(G, delimiter): - multiline += '\n' + multiline += "\n" path.write(multiline.encode(encoding)) -def parse_multiline_adjlist(lines, comments='#', delimiter=None, - create_using=None, nodetype=None, - edgetype=None): +def parse_multiline_adjlist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, edgetype=None +): """Parse lines of a multiline adjacency list representation of a graph. Parameters @@ -218,6 +227,7 @@ def parse_multiline_adjlist(lines, comments='#', delimiter=None, """ from ast import literal_eval + G = nx.empty_graph(0, create_using) for line in lines: p = line.find(comments) @@ -234,8 +244,9 @@ def parse_multiline_adjlist(lines, comments='#', delimiter=None, try: u = nodetype(u) except BaseException as e: - raise TypeError(f"Failed to convert node ({u}) to " - f"type {nodetype}") from e + raise TypeError( + f"Failed to convert node ({u}) to " f"type {nodetype}" + ) from e G.add_node(u) for i in range(deg): while True: @@ -254,19 +265,21 @@ def parse_multiline_adjlist(lines, comments='#', delimiter=None, if numb < 1: continue # isolated node v = vlist.pop(0) - data = ''.join(vlist) + data = "".join(vlist) if nodetype is not None: try: v = nodetype(v) except BaseException as e: - raise TypeError(f"Failed to convert node ({v}) " - f"to type {nodetype}") from e + raise TypeError( + f"Failed to convert node ({v}) " f"to type {nodetype}" + ) from e if edgetype is not None: try: - edgedata = {'weight': edgetype(data)} + edgedata = {"weight": edgetype(data)} except BaseException as e: - raise TypeError(f"Failed to convert edge data ({data}) " - f"to type {edgetype}") from e + raise TypeError( + f"Failed to convert edge data ({data}) " f"to type {edgetype}" + ) from e else: try: # try to evaluate edgedata = literal_eval(data) @@ -277,11 +290,16 @@ def parse_multiline_adjlist(lines, comments='#', delimiter=None, return G -@open_file(0, mode='rb') -def read_multiline_adjlist(path, comments="#", delimiter=None, - create_using=None, - nodetype=None, edgetype=None, - encoding='utf-8'): +@open_file(0, mode="rb") +def read_multiline_adjlist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + edgetype=None, + encoding="utf-8", +): """Read graph in multi-line adjacency list format from path. Parameters @@ -354,9 +372,11 @@ def read_multiline_adjlist(path, comments="#", delimiter=None, write_multiline_adjlist """ lines = (line.decode(encoding) for line in path) - return parse_multiline_adjlist(lines, - comments=comments, - delimiter=delimiter, - create_using=create_using, - nodetype=nodetype, - edgetype=edgetype) + return parse_multiline_adjlist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + edgetype=edgetype, + ) diff --git a/networkx/readwrite/nx_shp.py b/networkx/readwrite/nx_shp.py index 15a74c61..3ff68192 100644 --- a/networkx/readwrite/nx_shp.py +++ b/networkx/readwrite/nx_shp.py @@ -13,7 +13,7 @@ See https://en.wikipedia.org/wiki/Shapefile for additional information. """ import networkx as nx -__all__ = ['read_shp', 'write_shp'] +__all__ = ["read_shp", "write_shp"] def read_shp(path, simplify=True, geom_attrs=True, strict=True): @@ -102,17 +102,16 @@ def read_shp(path, simplify=True, geom_attrs=True, strict=True): # Note: Using layer level geometry type if g.GetGeometryType() == ogr.wkbPoint: net.add_node((g.GetPoint_2D(0)), **attributes) - elif g.GetGeometryType() in (ogr.wkbLineString, - ogr.wkbMultiLineString): - for edge in edges_from_line(g, attributes, simplify, - geom_attrs): + elif g.GetGeometryType() in (ogr.wkbLineString, ogr.wkbMultiLineString): + for edge in edges_from_line(g, attributes, simplify, geom_attrs): e1, e2, attr = edge net.add_edge(e1, e2) net[e1][e2].update(attr) else: if strict: - raise nx.NetworkXError("GeometryType {} not supported". - format(g.GetGeometryType())) + raise nx.NetworkXError( + "GeometryType {} not supported".format(g.GetGeometryType()) + ) return net @@ -148,8 +147,9 @@ def edges_from_line(geom, attrs, simplify=True, geom_attrs=True): try: from osgeo import ogr except ImportError as e: - raise ImportError("edges_from_line requires OGR: " - "http://www.gdal.org/") from e + raise ImportError( + "edges_from_line requires OGR: " "http://www.gdal.org/" + ) from e if geom.GetGeometryType() == ogr.wkbLineString: if simplify: @@ -215,11 +215,11 @@ def write_shp(G, outdir): ogr.UseExceptions() def netgeometry(key, data): - if 'Wkb' in data: - geom = ogr.CreateGeometryFromWkb(data['Wkb']) - elif 'Wkt' in data: - geom = ogr.CreateGeometryFromWkt(data['Wkt']) - elif type(key[0]).__name__ == 'tuple': # edge keys are packed tuples + if "Wkb" in data: + geom = ogr.CreateGeometryFromWkb(data["Wkb"]) + elif "Wkt" in data: + geom = ogr.CreateGeometryFromWkt(data["Wkt"]) + elif type(key[0]).__name__ == "tuple": # edge keys are packed tuples geom = ogr.Geometry(ogr.wkbLineString) _from, _to = key[0], key[1] try: @@ -284,8 +284,7 @@ def write_shp(G, outdir): attributes = {} # storage for attribute data (indexed by field names) for key, value in data.items(): # Reject spatial data not required for attribute table - if (key != 'Json' and key != 'Wkt' and key != 'Wkb' - and key != 'ShpName'): + if key != "Json" and key != "Wkt" and key != "Wkb" and key != "ShpName": # Check/add field and data type to fields dict if key not in fields: add_fields_to_layer(key, value, fields, layer) @@ -306,7 +305,7 @@ def write_shp(G, outdir): edges = shpdir.CreateLayer("edges", None, ogr.wkbLineString) # New edge attribute write support merged into edge loop - edge_fields = {} # storage for field names and their data types + edge_fields = {} # storage for field names and their data types for e in G.edges(data=True): data = G.get_edge_data(*e) diff --git a/networkx/readwrite/nx_yaml.py b/networkx/readwrite/nx_yaml.py index 05dafe96..9d6719f3 100644 --- a/networkx/readwrite/nx_yaml.py +++ b/networkx/readwrite/nx_yaml.py @@ -14,12 +14,12 @@ http://pyyaml.org/wiki/PyYAML """ -__all__ = ['read_yaml', 'write_yaml'] +__all__ = ["read_yaml", "write_yaml"] from networkx.utils import open_file -@open_file(1, mode='w') +@open_file(1, mode="w") def write_yaml(G_to_be_yaml, path_for_yaml_output, **kwds): """Write graph G in YAML format to path. @@ -55,7 +55,7 @@ def write_yaml(G_to_be_yaml, path_for_yaml_output, **kwds): yaml.dump(G_to_be_yaml, path_for_yaml_output, **kwds) -@open_file(0, mode='r') +@open_file(0, mode="r") def read_yaml(path): """Read graph in YAML format from path. diff --git a/networkx/readwrite/p2g.py b/networkx/readwrite/p2g.py index 524d9031..6e23812b 100644 --- a/networkx/readwrite/p2g.py +++ b/networkx/readwrite/p2g.py @@ -35,8 +35,8 @@ import networkx from networkx.utils import open_file -@open_file(1, mode='w') -def write_p2g(G, path, encoding='utf-8'): +@open_file(1, mode="w") +def write_p2g(G, path, encoding="utf-8"): """Write NetworkX graph in p2g format. Notes @@ -56,8 +56,8 @@ def write_p2g(G, path, encoding='utf-8'): path.write("\n".encode(encoding)) -@open_file(0, mode='r') -def read_p2g(path, encoding='utf-8'): +@open_file(0, mode="r") +def read_p2g(path, encoding="utf-8"): """Read graph in p2g format from path. Returns diff --git a/networkx/readwrite/pajek.py b/networkx/readwrite/pajek.py index 0cec872d..762ad642 100644 --- a/networkx/readwrite/pajek.py +++ b/networkx/readwrite/pajek.py @@ -19,7 +19,7 @@ import warnings import networkx as nx from networkx.utils import open_file -__all__ = ['read_pajek', 'parse_pajek', 'generate_pajek', 'write_pajek'] +__all__ = ["read_pajek", "parse_pajek", "generate_pajek", "write_pajek"] def generate_pajek(G): @@ -35,8 +35,8 @@ def generate_pajek(G): See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm for format information. """ - if G.name == '': - name = 'NetworkX' + if G.name == "": + name = "NetworkX" else: name = G.name # Apparently many Pajek format readers can't process this line @@ -52,44 +52,52 @@ def generate_pajek(G): # copy node attributes and pop mandatory attributes # to avoid duplication. na = G.nodes.get(n, {}).copy() - x = na.pop('x', 0.0) - y = na.pop('y', 0.0) + x = na.pop("x", 0.0) + y = na.pop("y", 0.0) try: - id = int(na.pop('id', nodenumber[n])) + id = int(na.pop("id", nodenumber[n])) except ValueError as e: - e.args += (("Pajek format requires 'id' to be an int()." - " Refer to the 'Relabeling nodes' section."),) + e.args += ( + ( + "Pajek format requires 'id' to be an int()." + " Refer to the 'Relabeling nodes' section." + ), + ) raise nodenumber[n] = id - shape = na.pop('shape', 'ellipse') - s = ' '.join(map(make_qstr, (id, n, x, y, shape))) + shape = na.pop("shape", "ellipse") + s = " ".join(map(make_qstr, (id, n, x, y, shape))) # only optional attributes are left in na. for k, v in na.items(): - if isinstance(v, str) and v.strip() != '': + if isinstance(v, str) and v.strip() != "": s += f" {make_qstr(k)} {make_qstr(v)}" else: - warnings.warn(f"Node attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}.") + warnings.warn( + f"Node attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}." + ) yield s # write edges with attributes if G.is_directed(): - yield '*arcs' + yield "*arcs" else: - yield '*edges' + yield "*edges" for u, v, edgedata in G.edges(data=True): d = edgedata.copy() - value = d.pop('weight', 1.0) # use 1 as default edge value - s = ' '.join(map(make_qstr, (nodenumber[u], nodenumber[v], value))) + value = d.pop("weight", 1.0) # use 1 as default edge value + s = " ".join(map(make_qstr, (nodenumber[u], nodenumber[v], value))) for k, v in d.items(): - if isinstance(v, str) and v.strip() != '': + if isinstance(v, str) and v.strip() != "": s += f" {make_qstr(k)} {make_qstr(v)}" else: - warnings.warn(f"Edge attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}.") + warnings.warn( + f"Edge attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}." + ) yield s -@open_file(1, mode='wb') -def write_pajek(G, path, encoding='UTF-8'): +@open_file(1, mode="wb") +def write_pajek(G, path, encoding="UTF-8"): """Write graph in Pajek format to path. Parameters @@ -117,12 +125,12 @@ def write_pajek(G, path, encoding='UTF-8'): for format information. """ for line in generate_pajek(G): - line += '\n' + line += "\n" path.write(line.encode(encoding)) -@open_file(0, mode='rb') -def read_pajek(path, encoding='UTF-8'): +@open_file(0, mode="rb") +def read_pajek(path, encoding="UTF-8"): """Read graph in Pajek format from path. Parameters @@ -172,10 +180,11 @@ def parse_pajek(lines): """ import shlex + # multigraph=False if isinstance(lines, str): - lines = iter(lines.split('\n')) - lines = iter([line.rstrip('\n') for line in lines]) + lines = iter(lines.split("\n")) + lines = iter([line.rstrip("\n") for line in lines]) G = nx.MultiDiGraph() # are multiedges allowed in Pajek? assume yes labels = [] # in the order of the file, needed for matrix while lines: @@ -190,27 +199,28 @@ def parse_pajek(lines): # Line was not of the form: *network NAME pass else: - G.graph['name'] = name + G.graph["name"] = name elif l.lower().startswith("*vertices"): nodelabels = {} l, nnodes = l.split() for i in range(int(nnodes)): l = next(lines) try: - splitline = [x.decode('utf-8') for x in - shlex.split(str(l).encode('utf-8'))] + splitline = [ + x.decode("utf-8") for x in shlex.split(str(l).encode("utf-8")) + ] except AttributeError: splitline = shlex.split(str(l)) id, label = splitline[0:2] labels.append(label) G.add_node(label) nodelabels[id] = label - G.nodes[label]['id'] = id + G.nodes[label]["id"] = id try: x, y, shape = splitline[2:5] - G.nodes[label].update({'x': float(x), - 'y': float(y), - 'shape': shape}) + G.nodes[label].update( + {"x": float(x), "y": float(y), "shape": shape} + ) except: pass extra_attr = zip(splitline[5::2], splitline[6::2]) @@ -224,8 +234,9 @@ def parse_pajek(lines): G = G.to_directed() for l in lines: try: - splitline = [x.decode('utf-8') for x in - shlex.split(str(l).encode('utf-8'))] + splitline = [ + x.decode("utf-8") for x in shlex.split(str(l).encode("utf-8")) + ] except AttributeError: splitline = shlex.split(str(l)) @@ -239,11 +250,11 @@ def parse_pajek(lines): try: # there should always be a single value on the edge? w = splitline[2:3] - edge_data.update({'weight': float(w[0])}) + edge_data.update({"weight": float(w[0])}) except: pass # if there isn't, just assign a 1 -# edge_data.update({'value':1}) + # edge_data.update({'value':1}) extra_attr = zip(splitline[3::2], splitline[4::2]) edge_data.update(extra_attr) # if G.has_edge(u,v): @@ -251,10 +262,12 @@ def parse_pajek(lines): G.add_edge(u, v, **edge_data) elif l.lower().startswith("*matrix"): G = nx.DiGraph(G) - adj_list = ((labels[row], labels[col], {'weight': int(data)}) - for (row, line) in enumerate(lines) - for (col, data) in enumerate(line.split()) - if int(data) != 0) + adj_list = ( + (labels[row], labels[col], {"weight": int(data)}) + for (row, line) in enumerate(lines) + for (col, data) in enumerate(line.split()) + if int(data) != 0 + ) G.add_edges_from(adj_list) return G diff --git a/networkx/readwrite/sparse6.py b/networkx/readwrite/sparse6.py index 963a75d0..215d4b5a 100644 --- a/networkx/readwrite/sparse6.py +++ b/networkx/readwrite/sparse6.py @@ -16,8 +16,7 @@ from networkx.exception import NetworkXError from networkx.utils import open_file, not_implemented_for from networkx.readwrite.graph6 import data_to_n, n_to_data -__all__ = ['from_sparse6_bytes', 'read_sparse6', 'to_sparse6_bytes', - 'write_sparse6'] +__all__ = ["from_sparse6_bytes", "read_sparse6", "to_sparse6_bytes", "write_sparse6"] def _generate_sparse6_bytes(G, nodes, header): @@ -43,11 +42,12 @@ def _generate_sparse6_bytes(G, nodes, header): """ n = len(G) if n >= 2 ** 36: - raise ValueError('sparse6 is only defined if number of nodes is less ' - 'than 2 ** 36') + raise ValueError( + "sparse6 is only defined if number of nodes is less " "than 2 ** 36" + ) if header: - yield b'>>sparse6<<' - yield b':' + yield b">>sparse6<<" + yield b":" for d in n_to_data(n): yield str.encode(chr(d + 63)) @@ -86,12 +86,19 @@ def _generate_sparse6_bytes(G, nodes, header): else: bits.extend([1] * ((-len(bits)) % 6)) - data = [(bits[i + 0] << 5) + (bits[i + 1] << 4) + (bits[i + 2] << 3) + (bits[i + 3] << 2) + - (bits[i + 4] << 1) + (bits[i + 5] << 0) for i in range(0, len(bits), 6)] + data = [ + (bits[i + 0] << 5) + + (bits[i + 1] << 4) + + (bits[i + 2] << 3) + + (bits[i + 3] << 2) + + (bits[i + 4] << 1) + + (bits[i + 5] << 0) + for i in range(0, len(bits), 6) + ] for d in data: yield str.encode(chr(d + 63)) - yield b'\n' + yield b"\n" def from_sparse6_bytes(string): @@ -127,10 +134,10 @@ def from_sparse6_bytes(string): <http://users.cecs.anu.edu.au/~bdm/data/formats.html> """ - if string.startswith(b'>>sparse6<<'): + if string.startswith(b">>sparse6<<"): string = string[11:] - if not string.startswith(b':'): - raise NetworkXError('Expected leading colon in sparse6') + if not string.startswith(b":"): + raise NetworkXError("Expected leading colon in sparse6") chars = [c - 63 for c in string[1:]] n, data = data_to_n(chars) @@ -155,7 +162,7 @@ def from_sparse6_bytes(string): b = (d >> dLen) & 1 # grab top remaining bit x = d & ((1 << dLen) - 1) # partially built up value of x - xLen = dLen # how many bits included so far in x + xLen = dLen # how many bits included so far in x while xLen < k: # now grab full chunks until we have enough try: d = next(chunks) @@ -164,7 +171,7 @@ def from_sparse6_bytes(string): dLen = 6 x = (x << 6) + d xLen += 6 - x = (x >> (xLen - k)) # shift back the extra bits + x = x >> (xLen - k) # shift back the extra bits dLen = xLen - k yield b, x @@ -237,11 +244,11 @@ def to_sparse6_bytes(G, nodes=None, header=True): """ if nodes is not None: G = G.subgraph(nodes) - G = nx.convert_node_labels_to_integers(G, ordering='sorted') - return b''.join(_generate_sparse6_bytes(G, nodes, header)) + G = nx.convert_node_labels_to_integers(G, ordering="sorted") + return b"".join(_generate_sparse6_bytes(G, nodes, header)) -@open_file(0, mode='rb') +@open_file(0, mode="rb") def read_sparse6(path): """Read an undirected graph in sparse6 format from path. @@ -304,8 +311,8 @@ def read_sparse6(path): return glist -@not_implemented_for('directed') -@open_file(1, mode='wb') +@not_implemented_for("directed") +@open_file(1, mode="wb") def write_sparse6(G, path, nodes=None, header=True): """Write graph G to given path in sparse6 format. @@ -362,6 +369,6 @@ def write_sparse6(G, path, nodes=None, header=True): """ if nodes is not None: G = G.subgraph(nodes) - G = nx.convert_node_labels_to_integers(G, ordering='sorted') + G = nx.convert_node_labels_to_integers(G, ordering="sorted") for b in _generate_sparse6_bytes(G, nodes, header): path.write(b) diff --git a/networkx/readwrite/tests/test_edgelist.py b/networkx/readwrite/tests/test_edgelist.py index 371216b0..2e1a2424 100644 --- a/networkx/readwrite/tests/test_edgelist.py +++ b/networkx/readwrite/tests/test_edgelist.py @@ -7,18 +7,16 @@ import tempfile import os import networkx as nx -from networkx.testing import (assert_edges_equal, assert_nodes_equal, - assert_graphs_equal) +from networkx.testing import assert_edges_equal, assert_nodes_equal, assert_graphs_equal class TestEdgelist: - @classmethod def setup_class(cls): cls.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] cls.G.add_edges_from(e) - cls.G.add_node('g') + cls.G.add_node("g") cls.DG = nx.DiGraph(cls.G) cls.XG = nx.MultiGraph() cls.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) @@ -48,8 +46,9 @@ class TestEdgelist: bytesIO = io.BytesIO(s) G = nx.read_weighted_edgelist(bytesIO, nodetype=int) - assert_edges_equal(G.edges(data=True), - [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})]) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) def test_read_edgelist_3(self): s = b"""\ @@ -64,8 +63,9 @@ class TestEdgelist: bytesIO = io.BytesIO(s) G = nx.read_edgelist(bytesIO, nodetype=int, data=True) - assert_edges_equal(G.edges(data=True), - [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})]) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) def test_read_edgelist_4(self): s = b"""\ @@ -80,8 +80,9 @@ class TestEdgelist: bytesIO = io.BytesIO(s) G = nx.read_edgelist(bytesIO, nodetype=int, data=True) - assert_edges_equal(G.edges(data=True), - [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})]) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) s = """\ # comment line @@ -95,8 +96,9 @@ class TestEdgelist: StringIO = io.StringIO(s) G = nx.read_edgelist(StringIO, nodetype=int, data=True) - assert_edges_equal(G.edges(data=True), - [(1, 2, {'weight': 2.0}), (2, 3, {'weight': 3.0})]) + assert_edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) def test_write_edgelist_1(self): fh = io.BytesIO() @@ -128,7 +130,7 @@ class TestEdgelist: G = nx.OrderedGraph() G.add_edge(1, 2, weight=2.0) G.add_edge(2, 3, weight=3.0) - nx.write_edgelist(G, fh, data=[('weight')]) + nx.write_edgelist(G, fh, data=[("weight")]) fh.seek(0) assert fh.read() == b"1 2 2.0\n2 3 3.0\n" @@ -136,7 +138,7 @@ class TestEdgelist: G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + G.add_edge(name1, "Radiohead", **{name2: 3}) fd, fname = tempfile.mkstemp() nx.write_edgelist(G, fname) H = nx.read_edgelist(fname) @@ -148,22 +150,22 @@ class TestEdgelist: G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) - G.add_edge(name1, 'Radiohead', **{name2: 3}) + G.add_edge(name1, "Radiohead", **{name2: 3}) fd, fname = tempfile.mkstemp() - pytest.raises(UnicodeEncodeError, - nx.write_edgelist, - G, fname, encoding='latin-1') + pytest.raises( + UnicodeEncodeError, nx.write_edgelist, G, fname, encoding="latin-1" + ) os.close(fd) os.unlink(fname) def test_latin1(self): G = nx.Graph() - name1 = 'Bj' + chr(246) + 'rk' - name2 = chr(220) + 'ber' - G.add_edge(name1, 'Radiohead', **{name2: 3}) + name1 = "Bj" + chr(246) + "rk" + name2 = chr(220) + "ber" + G.add_edge(name1, "Radiohead", **{name2: 3}) fd, fname = tempfile.mkstemp() - nx.write_edgelist(G, fname, encoding='latin-1') - H = nx.read_edgelist(fname, encoding='latin-1') + nx.write_edgelist(G, fname, encoding="latin-1") + H = nx.read_edgelist(fname, encoding="latin-1") assert_graphs_equal(G, H) os.close(fd) os.unlink(fname) @@ -175,7 +177,7 @@ class TestEdgelist: H = nx.read_edgelist(fname) H2 = nx.read_edgelist(fname) assert H != H2 # they should be different graphs - G.remove_node('g') # isolated nodes are not written in edgelist + G.remove_node("g") # isolated nodes are not written in edgelist assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) @@ -188,7 +190,7 @@ class TestEdgelist: H = nx.read_edgelist(fname, create_using=nx.DiGraph()) H2 = nx.read_edgelist(fname, create_using=nx.DiGraph()) assert H != H2 # they should be different graphs - G.remove_node('g') # isolated nodes are not written in edgelist + G.remove_node("g") # isolated nodes are not written in edgelist assert_nodes_equal(list(H), list(G)) assert_edges_equal(list(H.edges()), list(G.edges())) os.close(fd) diff --git a/networkx/readwrite/tests/test_gexf.py b/networkx/readwrite/tests/test_gexf.py index 6885f851..4a266321 100644 --- a/networkx/readwrite/tests/test_gexf.py +++ b/networkx/readwrite/tests/test_gexf.py @@ -25,12 +25,11 @@ class TestGEXF: </gexf> """ cls.simple_directed_graph = nx.DiGraph() - cls.simple_directed_graph.add_node('0', label='Hello') - cls.simple_directed_graph.add_node('1', label='World') - cls.simple_directed_graph.add_edge('0', '1', id='0') + cls.simple_directed_graph.add_node("0", label="Hello") + cls.simple_directed_graph.add_node("1", label="World") + cls.simple_directed_graph.add_edge("0", "1", id="0") - cls.simple_directed_fh = \ - io.BytesIO(cls.simple_directed_data.encode('UTF-8')) + cls.simple_directed_fh = io.BytesIO(cls.simple_directed_data.encode("UTF-8")) cls.attribute_data = """<?xml version="1.0" encoding="UTF-8"?>\ <gexf xmlns="http://www.gexf.net/1.2draft" xmlns:xsi="http://www.w3.\ @@ -89,29 +88,29 @@ org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.gexf.net/\ </gexf> """ cls.attribute_graph = nx.DiGraph() - cls.attribute_graph.graph['node_default'] = {'frog': True} - cls.attribute_graph.add_node('0', - label='Gephi', - url='https://gephi.org', - indegree=1, frog=False) - cls.attribute_graph.add_node('1', - label='Webatlas', - url='http://webatlas.fr', - indegree=2, frog=False) - cls.attribute_graph.add_node('2', - label='RTGI', - url='http://rtgi.fr', - indegree=1, frog=True) - cls.attribute_graph.add_node('3', - label='BarabasiLab', - url='http://barabasilab.com', - indegree=1, frog=True) - cls.attribute_graph.add_edge('0', '1', id='0') - cls.attribute_graph.add_edge('0', '2', id='1') - cls.attribute_graph.add_edge('1', '0', id='2') - cls.attribute_graph.add_edge('2', '1', id='3') - cls.attribute_graph.add_edge('0', '3', id='4') - cls.attribute_fh = io.BytesIO(cls.attribute_data.encode('UTF-8')) + cls.attribute_graph.graph["node_default"] = {"frog": True} + cls.attribute_graph.add_node( + "0", label="Gephi", url="https://gephi.org", indegree=1, frog=False + ) + cls.attribute_graph.add_node( + "1", label="Webatlas", url="http://webatlas.fr", indegree=2, frog=False + ) + cls.attribute_graph.add_node( + "2", label="RTGI", url="http://rtgi.fr", indegree=1, frog=True + ) + cls.attribute_graph.add_node( + "3", + label="BarabasiLab", + url="http://barabasilab.com", + indegree=1, + frog=True, + ) + cls.attribute_graph.add_edge("0", "1", id="0") + cls.attribute_graph.add_edge("0", "2", id="1") + cls.attribute_graph.add_edge("1", "0", id="2") + cls.attribute_graph.add_edge("2", "1", id="3") + cls.attribute_graph.add_edge("0", "3", id="4") + cls.attribute_fh = io.BytesIO(cls.attribute_data.encode("UTF-8")) cls.simple_undirected_data = """<?xml version="1.0" encoding="UTF-8"?> <gexf xmlns="http://www.gexf.net/1.2draft" version="1.2"> @@ -127,12 +126,13 @@ org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.gexf.net/\ </gexf> """ cls.simple_undirected_graph = nx.Graph() - cls.simple_undirected_graph.add_node('0', label='Hello') - cls.simple_undirected_graph.add_node('1', label='World') - cls.simple_undirected_graph.add_edge('0', '1', id='0') + cls.simple_undirected_graph.add_node("0", label="Hello") + cls.simple_undirected_graph.add_node("1", label="World") + cls.simple_undirected_graph.add_edge("0", "1", id="0") - cls.simple_undirected_fh = io.BytesIO(cls.simple_undirected_data - .encode('UTF-8')) + cls.simple_undirected_fh = io.BytesIO( + cls.simple_undirected_data.encode("UTF-8") + ) def test_read_simple_directed_graphml(self): G = self.simple_directed_graph @@ -157,8 +157,9 @@ org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.gexf.net/\ G = self.simple_undirected_graph H = nx.read_gexf(self.simple_undirected_fh) assert sorted(G.nodes()) == sorted(H.nodes()) - assert (sorted(sorted(e) for e in G.edges()) - == sorted(sorted(e) for e in H.edges())) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) self.simple_undirected_fh.seek(0) def test_read_attribute_graphml(self): @@ -185,7 +186,7 @@ org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.gexf.net/\ </graph> </gexf> """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) pytest.raises(nx.NetworkXError, nx.read_gexf, fh) def test_undirected_edge_in_directed(self): @@ -202,7 +203,7 @@ org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.gexf.net/\ </graph> </gexf> """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) pytest.raises(nx.NetworkXError, nx.read_gexf, fh) def test_key_raises(self): @@ -223,7 +224,7 @@ org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.gexf.net/\ </graph> </gexf> """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) pytest.raises(nx.NetworkXError, nx.read_gexf, fh) def test_relabel(self): @@ -240,28 +241,29 @@ org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.gexf.net/\ </graph> </gexf> """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) G = nx.read_gexf(fh, relabel=True) assert sorted(G.nodes()) == ["Hello", "Word"] def test_default_attribute(self): G = nx.Graph() - G.add_node(1, label='1', color='green') + G.add_node(1, label="1", color="green") nx.add_path(G, [0, 1, 2, 3]) G.add_edge(1, 2, foo=3) - G.graph['node_default'] = {'color': 'yellow'} - G.graph['edge_default'] = {'foo': 7} + G.graph["node_default"] = {"color": "yellow"} + G.graph["edge_default"] = {"foo": 7} fh = io.BytesIO() nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert sorted(G.nodes()) == sorted(H.nodes()) - assert (sorted(sorted(e) for e in G.edges()) - == sorted(sorted(e) for e in H.edges())) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) # Reading a gexf graph always sets mode attribute to either # 'static' or 'dynamic'. Remove the mode attribute from the # read graph for the sake of comparing remaining attributes. - del H.graph['mode'] + del H.graph["mode"] assert G.graph == H.graph def test_serialize_ints_to_strings(self): @@ -272,30 +274,30 @@ org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.gexf.net/\ fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert list(H) == [7] - assert H.nodes[7]['label'] == '77' - -# FIXME: We should test xml without caring about their order This is causing a -# problem b/c of a change in Python 3.8 -# -# "Prior to Python 3.8, the serialisation order of the XML attributes of -# elements was artificially made predictable by sorting the attributes by their -# name. Based on the now guaranteed ordering of dicts, this arbitrary -# reordering was removed in Python 3.8 to preserve the order in which -# attributes were originally parsed or created by user code." -# -# https://docs.python.org/3.8/library/xml.etree.elementtree.html -# https://bugs.python.org/issue34160 + assert H.nodes[7]["label"] == "77" + + # FIXME: We should test xml without caring about their order This is causing a + # problem b/c of a change in Python 3.8 + # + # "Prior to Python 3.8, the serialisation order of the XML attributes of + # elements was artificially made predictable by sorting the attributes by their + # name. Based on the now guaranteed ordering of dicts, this arbitrary + # reordering was removed in Python 3.8 to preserve the order in which + # attributes were originally parsed or created by user code." + # + # https://docs.python.org/3.8/library/xml.etree.elementtree.html + # https://bugs.python.org/issue34160 def test_write_with_node_attributes(self): # Addresses #673. G = nx.OrderedGraph() G.add_edges_from([(0, 1), (1, 2), (2, 3)]) for i in range(4): - G.nodes[i]['id'] = i - G.nodes[i]['label'] = i - G.nodes[i]['pid'] = i - G.nodes[i]['start'] = i - G.nodes[i]['end'] = i + 1 + G.nodes[i]["id"] = i + G.nodes[i]["label"] = i + G.nodes[i]["pid"] = i + G.nodes[i]["start"] = i + G.nodes[i]["end"] = i + 1 if sys.version_info < (3, 8): expected = f"""<gexf version="1.2" xmlns="http://www.gexf.net/1.2\ @@ -341,12 +343,12 @@ gexf.xsd" version="1.2"> </edges> </graph> </gexf>""" - obtained = '\n'.join(nx.generate_gexf(G)) + obtained = "\n".join(nx.generate_gexf(G)) assert expected == obtained def test_edge_id_construct(self): G = nx.Graph() - G.add_edges_from([(0, 1, {'id': 0}), (1, 2, {'id': 2}), (2, 3)]) + G.add_edges_from([(0, 1, {"id": 0}), (1, 2, {"id": 2}), (2, 3)]) if sys.version_info < (3, 8): expected = f"""<gexf version="1.2" xmlns="http://www.gexf.net/\ @@ -392,7 +394,7 @@ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2"> </graph> </gexf>""" - obtained = '\n'.join(nx.generate_gexf(G)) + obtained = "\n".join(nx.generate_gexf(G)) assert expected == obtained def test_numpy_type(self): @@ -401,8 +403,8 @@ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2"> import numpy except ImportError: return - nx.set_node_attributes(G, {n: n for n in numpy.arange(4)}, 'number') - G[0][1]['edge-number'] = numpy.float64(1.1) + nx.set_node_attributes(G, {n: n for n in numpy.arange(4)}, "number") + G[0][1]["edge-number"] = numpy.float64(1.1) if sys.version_info < (3, 8): expected = f"""<gexf version="1.2" xmlns="http://www.gexf.net/1.2draft"\ @@ -499,7 +501,7 @@ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2"> </edges> </graph> </gexf>""" - obtained = '\n'.join(nx.generate_gexf(G)) + obtained = "\n".join(nx.generate_gexf(G)) assert expected == obtained def test_bool(self): @@ -509,16 +511,17 @@ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2"> nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) - assert H.nodes[1]['testattr'] + assert H.nodes[1]["testattr"] # Test for NaN, INF and -INF def test_specials(self): from math import isnan - inf, nan = float('inf'), float('nan') + + inf, nan = float("inf"), float("nan") G = nx.Graph() - G.add_node(1, testattr=inf, strdata='inf', key='a') - G.add_node(2, testattr=nan, strdata='nan', key='b') - G.add_node(3, testattr=-inf, strdata='-inf', key='c') + G.add_node(1, testattr=inf, strdata="inf", key="a") + G.add_node(2, testattr=nan, strdata="nan", key="b") + G.add_node(3, testattr=-inf, strdata="-inf", key="c") fh = io.BytesIO() nx.write_gexf(G, fh) @@ -527,21 +530,21 @@ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2"> fh.seek(0) H = nx.read_gexf(fh, node_type=int) - assert b'INF' in filetext - assert b'NaN' in filetext - assert b'-INF' in filetext + assert b"INF" in filetext + assert b"NaN" in filetext + assert b"-INF" in filetext - assert H.nodes[1]['testattr'] == inf - assert isnan(H.nodes[2]['testattr']) - assert H.nodes[3]['testattr'] == -inf + assert H.nodes[1]["testattr"] == inf + assert isnan(H.nodes[2]["testattr"]) + assert H.nodes[3]["testattr"] == -inf - assert H.nodes[1]['strdata'] == 'inf' - assert H.nodes[2]['strdata'] == 'nan' - assert H.nodes[3]['strdata'] == '-inf' + assert H.nodes[1]["strdata"] == "inf" + assert H.nodes[2]["strdata"] == "nan" + assert H.nodes[3]["strdata"] == "-inf" - assert H.nodes[1]['networkx_key'] == 'a' - assert H.nodes[2]['networkx_key'] == 'b' - assert H.nodes[3]['networkx_key'] == 'c' + assert H.nodes[1]["networkx_key"] == "a" + assert H.nodes[2]["networkx_key"] == "b" + assert H.nodes[3]["networkx_key"] == "c" def test_simple_list(self): G = nx.Graph() @@ -551,97 +554,104 @@ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2"> nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) - assert H.nodes[1]['networkx_key'] == list_value + assert H.nodes[1]["networkx_key"] == list_value def test_dynamic_mode(self): G = nx.Graph() - G.add_node(1, label='1', color='green') - G.graph['mode'] = 'dynamic' + G.add_node(1, label="1", color="green") + G.graph["mode"] = "dynamic" fh = io.BytesIO() nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert sorted(G.nodes()) == sorted(H.nodes()) - assert (sorted(sorted(e) for e in G.edges()) - == sorted(sorted(e) for e in H.edges())) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) def test_multigraph_with_missing_attributes(self): G = nx.MultiGraph() - G.add_node(0, label='1', color='green') - G.add_node(1, label='2', color='green') - G.add_edge(0, 1, id='0', weight=3, type='undirected', start=0, end=1) + G.add_node(0, label="1", color="green") + G.add_node(1, label="2", color="green") + G.add_edge(0, 1, id="0", weight=3, type="undirected", start=0, end=1) G.add_edge(0, 1) fh = io.BytesIO() nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert sorted(G.nodes()) == sorted(H.nodes()) - assert (sorted(sorted(e) for e in G.edges()) - == sorted(sorted(e) for e in H.edges())) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) def test_missing_viz_attributes(self): G = nx.Graph() - G.add_node(0, label='1', color='green') - G.nodes[0]['viz'] = {'size': 54} - G.nodes[0]['viz']['position'] = {'x': 0, 'y': 1, 'z': 0} - G.nodes[0]['viz']['color'] = {'r': 0, 'g': 0, 'b': 256} - G.nodes[0]['viz']['shape'] = 'http://random.url' - G.nodes[0]['viz']['thickness'] = 2 + G.add_node(0, label="1", color="green") + G.nodes[0]["viz"] = {"size": 54} + G.nodes[0]["viz"]["position"] = {"x": 0, "y": 1, "z": 0} + G.nodes[0]["viz"]["color"] = {"r": 0, "g": 0, "b": 256} + G.nodes[0]["viz"]["shape"] = "http://random.url" + G.nodes[0]["viz"]["thickness"] = 2 fh = io.BytesIO() - nx.write_gexf(G, fh, version='1.1draft') + nx.write_gexf(G, fh, version="1.1draft") fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert sorted(G.nodes()) == sorted(H.nodes()) - assert (sorted(sorted(e) for e in G.edges()) - == sorted(sorted(e) for e in H.edges())) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) # Second graph for the other branch G = nx.Graph() - G.add_node(0, label='1', color='green') - G.nodes[0]['viz'] = {'size': 54} - G.nodes[0]['viz']['position'] = {'x': 0, 'y': 1, 'z': 0} - G.nodes[0]['viz']['color'] = {'r': 0, 'g': 0, 'b': 256, 'a': 0.5} - G.nodes[0]['viz']['shape'] = 'ftp://random.url' - G.nodes[0]['viz']['thickness'] = 2 + G.add_node(0, label="1", color="green") + G.nodes[0]["viz"] = {"size": 54} + G.nodes[0]["viz"]["position"] = {"x": 0, "y": 1, "z": 0} + G.nodes[0]["viz"]["color"] = {"r": 0, "g": 0, "b": 256, "a": 0.5} + G.nodes[0]["viz"]["shape"] = "ftp://random.url" + G.nodes[0]["viz"]["thickness"] = 2 fh = io.BytesIO() nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert sorted(G.nodes()) == sorted(H.nodes()) - assert (sorted(sorted(e) for e in G.edges()) - == sorted(sorted(e) for e in H.edges())) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) def test_slice_and_spell(self): # Test spell first, so version = 1.2 G = nx.Graph() - G.add_node(0, label='1', color='green') - G.nodes[0]['spells'] = [(1, 2)] + G.add_node(0, label="1", color="green") + G.nodes[0]["spells"] = [(1, 2)] fh = io.BytesIO() nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert sorted(G.nodes()) == sorted(H.nodes()) - assert (sorted(sorted(e) for e in G.edges()) - == sorted(sorted(e) for e in H.edges())) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) G = nx.Graph() - G.add_node(0, label='1', color='green') - G.nodes[0]['slices'] = [(1, 2)] + G.add_node(0, label="1", color="green") + G.nodes[0]["slices"] = [(1, 2)] fh = io.BytesIO() - nx.write_gexf(G, fh, version='1.1draft') + nx.write_gexf(G, fh, version="1.1draft") fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert sorted(G.nodes()) == sorted(H.nodes()) - assert (sorted(sorted(e) for e in G.edges()) - == sorted(sorted(e) for e in H.edges())) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) def test_add_parent(self): G = nx.Graph() - G.add_node(0, label='1', color='green', parents=[1, 2]) + G.add_node(0, label="1", color="green", parents=[1, 2]) fh = io.BytesIO() nx.write_gexf(G, fh) fh.seek(0) H = nx.read_gexf(fh, node_type=int) assert sorted(G.nodes()) == sorted(H.nodes()) - assert (sorted(sorted(e) for e in G.edges()) - == sorted(sorted(e) for e in H.edges())) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) diff --git a/networkx/readwrite/tests/test_gml.py b/networkx/readwrite/tests/test_gml.py index dffcf148..9ef3dd36 100644 --- a/networkx/readwrite/tests/test_gml.py +++ b/networkx/readwrite/tests/test_gml.py @@ -150,7 +150,9 @@ graph [ ] assert [e for e in sorted(G.edges(data=True))] == [ - ("Node 1", "Node 2", + ( + "Node 1", + "Node 2", { "color": {"line": "blue", "thickness": 3}, "label": "Edge from node 1 to node 2", @@ -490,8 +492,8 @@ graph # Test export for numbers that barely fit or don't fit into 32 bits, # and 3 numbers in the middle numbers = { - "toosmall": (-2 ** 31) - 1, - "small": -2 ** 31, + "toosmall": (-(2 ** 31)) - 1, + "small": -(2 ** 31), "med1": -4, "med2": 0, "med3": 17, @@ -530,7 +532,8 @@ class TestPropertyLists: nx.write_gml(g, f) result = f.read().decode() - assert result == dedent("""\ + assert result == dedent( + """\ graph [ node [ id 0 @@ -543,7 +546,8 @@ class TestPropertyLists: properties 0 ] ] - """) + """ + ) def test_writing_graph_with_one_element_property_list(self): g = nx.Graph() @@ -552,7 +556,8 @@ class TestPropertyLists: nx.write_gml(g, f) result = f.read().decode() - assert result == dedent("""\ + assert result == dedent( + """\ graph [ node [ id 0 @@ -561,11 +566,14 @@ class TestPropertyLists: properties "element" ] ] - """) + """ + ) def test_reading_graph_with_list_property(self): with byte_file() as f: - f.write(dedent(""" + f.write( + dedent( + """ graph [ node [ id 0 @@ -576,21 +584,18 @@ class TestPropertyLists: properties 2.5 ] ] - """).encode("ascii")) + """ + ).encode("ascii") + ) f.seek(0) graph = nx.read_gml(f) - assert graph.nodes(data=True)["n1"] == { - 'properties': [ - 'element', - 0, - 1, - 2.5, - ] - } + assert graph.nodes(data=True)["n1"] == {"properties": ["element", 0, 1, 2.5,]} def test_reading_graph_with_single_element_list_property(self): with byte_file() as f: - f.write(dedent(""" + f.write( + dedent( + """ graph [ node [ id 0 @@ -599,11 +604,9 @@ class TestPropertyLists: properties "element" ] ] - """).encode("ascii")) + """ + ).encode("ascii") + ) f.seek(0) graph = nx.read_gml(f) - assert graph.nodes(data=True)["n1"] == { - 'properties': [ - 'element', - ] - } + assert graph.nodes(data=True)["n1"] == {"properties": ["element",]} diff --git a/networkx/readwrite/tests/test_gpickle.py b/networkx/readwrite/tests/test_gpickle.py index cf7e6622..e92991db 100644 --- a/networkx/readwrite/tests/test_gpickle.py +++ b/networkx/readwrite/tests/test_gpickle.py @@ -5,7 +5,7 @@ import networkx as nx from networkx.testing.utils import ( assert_graphs_equal, assert_edges_equal, - assert_nodes_equal + assert_nodes_equal, ) @@ -13,15 +13,15 @@ class TestGpickle: @classmethod def setup_class(cls): G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] G.add_edges_from(e, width=10) - G.add_node('g', color='green') - G.graph['number'] = 1 + G.add_node("g", color="green") + G.graph["number"] = 1 DG = nx.DiGraph(G) MG = nx.MultiGraph(G) - MG.add_edge('a', 'a') + MG.add_edge("a", "a") MDG = nx.MultiDiGraph(G) - MDG.add_edge('a', 'a') + MDG.add_edge("a", "a") fG = G.copy() fDG = DG.copy() fMG = MG.copy() @@ -40,28 +40,40 @@ class TestGpickle: cls.fMDG = fMDG def test_gpickle(self): - for G in [self.G, self.DG, self.MG, self.MDG, - self.fG, self.fDG, self.fMG, self.fMDG]: + for G in [ + self.G, + self.DG, + self.MG, + self.MDG, + self.fG, + self.fDG, + self.fMG, + self.fMDG, + ]: (fd, fname) = tempfile.mkstemp() nx.write_gpickle(G, fname) Gin = nx.read_gpickle(fname) - assert_nodes_equal(list(G.nodes(data=True)), - list(Gin.nodes(data=True))) - assert_edges_equal(list(G.edges(data=True)), - list(Gin.edges(data=True))) + assert_nodes_equal(list(G.nodes(data=True)), list(Gin.nodes(data=True))) + assert_edges_equal(list(G.edges(data=True)), list(Gin.edges(data=True))) assert_graphs_equal(G, Gin) os.close(fd) os.unlink(fname) def test_protocol(self): - for G in [self.G, self.DG, self.MG, self.MDG, - self.fG, self.fDG, self.fMG, self.fMDG]: + for G in [ + self.G, + self.DG, + self.MG, + self.MDG, + self.fG, + self.fDG, + self.fMG, + self.fMDG, + ]: with tempfile.TemporaryFile() as f: nx.write_gpickle(G, f, 0) f.seek(0) Gin = nx.read_gpickle(f) - assert_nodes_equal(list(G.nodes(data=True)), - list(Gin.nodes(data=True))) - assert_edges_equal(list(G.edges(data=True)), - list(Gin.edges(data=True))) + assert_nodes_equal(list(G.nodes(data=True)), list(Gin.nodes(data=True))) + assert_edges_equal(list(G.edges(data=True)), list(Gin.edges(data=True))) assert_graphs_equal(G, Gin) diff --git a/networkx/readwrite/tests/test_graph6.py b/networkx/readwrite/tests/test_graph6.py index 729b01b7..74d7cea1 100644 --- a/networkx/readwrite/tests/test_graph6.py +++ b/networkx/readwrite/tests/test_graph6.py @@ -9,26 +9,24 @@ from networkx.testing.utils import assert_nodes_equal class TestGraph6Utils: - def test_n_data_n_conversion(self): for i in [0, 1, 42, 62, 63, 64, 258047, 258048, 7744773, 68719476735]: assert g6.data_to_n(g6.n_to_data(i))[0] == i assert g6.data_to_n(g6.n_to_data(i))[1] == [] - assert (g6.data_to_n(g6.n_to_data(i) + [42, 43])[1] == - [42, 43]) + assert g6.data_to_n(g6.n_to_data(i) + [42, 43])[1] == [42, 43] class TestFromGraph6Bytes: - def test_from_graph6_bytes(self): - data = b'DF{' + data = b"DF{" G = nx.from_graph6_bytes(data) assert_nodes_equal(G.nodes(), [0, 1, 2, 3, 4]) - assert_edges_equal(G.edges(), - [(0, 3), (0, 4), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]) + assert_edges_equal( + G.edges(), [(0, 3), (0, 4), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + ) def test_read_equals_from_bytes(self): - data = b'DF{' + data = b"DF{" G = nx.from_graph6_bytes(data) fh = BytesIO(data) Gin = nx.read_graph6(fh) @@ -37,10 +35,9 @@ class TestFromGraph6Bytes: class TestReadGraph6: - def test_read_many_graph6(self): """Test for reading many graphs from a file into a list.""" - data = b'DF{\nD`{\nDqK\nD~{\n' + data = b"DF{\nD`{\nDqK\nD~{\n" fh = BytesIO(data) glist = nx.read_graph6(fh) assert len(glist) == 4 @@ -54,34 +51,34 @@ class TestWriteGraph6: def test_null_graph(self): result = BytesIO() nx.write_graph6(nx.null_graph(), result) - assert result.getvalue() == b'>>graph6<<?\n' + assert result.getvalue() == b">>graph6<<?\n" def test_trivial_graph(self): result = BytesIO() nx.write_graph6(nx.trivial_graph(), result) - assert result.getvalue() == b'>>graph6<<@\n' + assert result.getvalue() == b">>graph6<<@\n" def test_complete_graph(self): result = BytesIO() nx.write_graph6(nx.complete_graph(4), result) - assert result.getvalue() == b'>>graph6<<C~\n' + assert result.getvalue() == b">>graph6<<C~\n" def test_large_complete_graph(self): result = BytesIO() nx.write_graph6(nx.complete_graph(67), result, header=False) - assert result.getvalue() == b'~?@B' + b'~' * 368 + b'w\n' + assert result.getvalue() == b"~?@B" + b"~" * 368 + b"w\n" def test_no_header(self): result = BytesIO() nx.write_graph6(nx.complete_graph(4), result, header=False) - assert result.getvalue() == b'C~\n' + assert result.getvalue() == b"C~\n" def test_complete_bipartite_graph(self): result = BytesIO() G = nx.complete_bipartite_graph(6, 9) nx.write_graph6(G, result, header=False) # The expected encoding here was verified by Sage. - assert result.getvalue() == b'N??F~z{~Fw^_~?~?^_?\n' + assert result.getvalue() == b"N??F~z{~Fw^_~?~?^_?\n" def test_no_directed_graphs(self): with pytest.raises(nx.NetworkXNotImplemented): @@ -94,8 +91,7 @@ class TestWriteGraph6: nx.write_graph6(g, gstr, header=False) # Strip the trailing newline. gstr = gstr.getvalue().rstrip() - assert (len(gstr) == - ((i - 1) * i // 2 + 5) // 6 + (1 if i < 63 else 4)) + assert len(gstr) == ((i - 1) * i // 2 + 5) // 6 + (1 if i < 63 else 4) def test_roundtrip(self): for i in list(range(13)) + [31, 47, 62, 63, 64, 72]: @@ -111,12 +107,12 @@ class TestWriteGraph6: with tempfile.NamedTemporaryFile() as f: g6.write_graph6_file(nx.null_graph(), f) f.seek(0) - assert f.read() == b'>>graph6<<?\n' + assert f.read() == b">>graph6<<?\n" def test_relabeling(self): G = nx.Graph([(0, 1)]) - assert g6.to_graph6_bytes(G) == b'>>graph6<<A_\n' + assert g6.to_graph6_bytes(G) == b">>graph6<<A_\n" G = nx.Graph([(1, 2)]) - assert g6.to_graph6_bytes(G) == b'>>graph6<<A_\n' + assert g6.to_graph6_bytes(G) == b">>graph6<<A_\n" G = nx.Graph([(1, 42)]) - assert g6.to_graph6_bytes(G) == b'>>graph6<<A_\n' + assert g6.to_graph6_bytes(G) == b">>graph6<<A_\n" diff --git a/networkx/readwrite/tests/test_graphml.py b/networkx/readwrite/tests/test_graphml.py index 79507c0b..f59f2bd5 100644 --- a/networkx/readwrite/tests/test_graphml.py +++ b/networkx/readwrite/tests/test_graphml.py @@ -42,21 +42,23 @@ class BaseGraphML: </graph> </graphml>""" cls.simple_directed_graph = nx.DiGraph() - cls.simple_directed_graph.add_node('n10') - cls.simple_directed_graph.add_edge('n0', 'n2', id='foo') - cls.simple_directed_graph.add_edges_from([('n1', 'n2'), - ('n2', 'n3'), - ('n3', 'n5'), - ('n3', 'n4'), - ('n4', 'n6'), - ('n6', 'n5'), - ('n5', 'n7'), - ('n6', 'n8'), - ('n8', 'n7'), - ('n8', 'n9'), - ]) - cls.simple_directed_fh = \ - io.BytesIO(cls.simple_directed_data.encode('UTF-8')) + cls.simple_directed_graph.add_node("n10") + cls.simple_directed_graph.add_edge("n0", "n2", id="foo") + cls.simple_directed_graph.add_edges_from( + [ + ("n1", "n2"), + ("n2", "n3"), + ("n3", "n5"), + ("n3", "n4"), + ("n4", "n6"), + ("n6", "n5"), + ("n5", "n7"), + ("n6", "n8"), + ("n8", "n7"), + ("n8", "n9"), + ] + ) + cls.simple_directed_fh = io.BytesIO(cls.simple_directed_data.encode("UTF-8")) cls.attribute_data = """<?xml version="1.0" encoding="UTF-8"?> <graphml xmlns="http://graphml.graphdrawing.org/xmlns" @@ -100,21 +102,21 @@ class BaseGraphML: </graph> </graphml> """ - cls.attribute_graph = nx.DiGraph(id='G') - cls.attribute_graph.graph['node_default'] = {'color': 'yellow'} - cls.attribute_graph.add_node('n0', color='green') - cls.attribute_graph.add_node('n2', color='blue') - cls.attribute_graph.add_node('n3', color='red') - cls.attribute_graph.add_node('n4') - cls.attribute_graph.add_node('n5', color='turquoise') - cls.attribute_graph.add_edge('n0', 'n2', id='e0', weight=1.0) - cls.attribute_graph.add_edge('n0', 'n1', id='e1', weight=1.0) - cls.attribute_graph.add_edge('n1', 'n3', id='e2', weight=2.0) - cls.attribute_graph.add_edge('n3', 'n2', id='e3') - cls.attribute_graph.add_edge('n2', 'n4', id='e4') - cls.attribute_graph.add_edge('n3', 'n5', id='e5') - cls.attribute_graph.add_edge('n5', 'n4', id='e6', weight=1.1) - cls.attribute_fh = io.BytesIO(cls.attribute_data.encode('UTF-8')) + cls.attribute_graph = nx.DiGraph(id="G") + cls.attribute_graph.graph["node_default"] = {"color": "yellow"} + cls.attribute_graph.add_node("n0", color="green") + cls.attribute_graph.add_node("n2", color="blue") + cls.attribute_graph.add_node("n3", color="red") + cls.attribute_graph.add_node("n4") + cls.attribute_graph.add_node("n5", color="turquoise") + cls.attribute_graph.add_edge("n0", "n2", id="e0", weight=1.0) + cls.attribute_graph.add_edge("n0", "n1", id="e1", weight=1.0) + cls.attribute_graph.add_edge("n1", "n3", id="e2", weight=2.0) + cls.attribute_graph.add_edge("n3", "n2", id="e3") + cls.attribute_graph.add_edge("n2", "n4", id="e4") + cls.attribute_graph.add_edge("n3", "n5", id="e5") + cls.attribute_graph.add_edge("n5", "n4", id="e6", weight=1.1) + cls.attribute_fh = io.BytesIO(cls.attribute_data.encode("UTF-8")) cls.attribute_named_key_ids_data = """<?xml version='1.0' encoding='utf-8'?> <graphml xmlns="http://graphml.graphdrawing.org/xmlns" @@ -143,7 +145,7 @@ class BaseGraphML: cls.attribute_named_key_ids_graph.add_node("0", prop1="val1", prop2="val2") cls.attribute_named_key_ids_graph.add_node("1", prop1="val_one", prop2="val2") cls.attribute_named_key_ids_graph.add_edge("0", "1", edge_prop="edge_value") - fh = io.BytesIO(cls.attribute_named_key_ids_data.encode('UTF-8')) + fh = io.BytesIO(cls.attribute_named_key_ids_data.encode("UTF-8")) cls.attribute_named_key_ids_fh = fh cls.attribute_numeric_type_data = """<?xml version='1.0' encoding='utf-8'?> @@ -173,11 +175,11 @@ class BaseGraphML: </graphml> """ cls.attribute_numeric_type_graph = nx.DiGraph() - cls.attribute_numeric_type_graph.add_node('n0', weight=1) - cls.attribute_numeric_type_graph.add_node('n1', weight=2.0) - cls.attribute_numeric_type_graph.add_edge('n0', 'n1', weight=1) - cls.attribute_numeric_type_graph.add_edge('n1', 'n1', weight=1.0) - fh = io.BytesIO(cls.attribute_numeric_type_data.encode('UTF-8')) + cls.attribute_numeric_type_graph.add_node("n0", weight=1) + cls.attribute_numeric_type_graph.add_node("n1", weight=2.0) + cls.attribute_numeric_type_graph.add_edge("n0", "n1", weight=1) + cls.attribute_numeric_type_graph.add_edge("n1", "n1", weight=1.0) + fh = io.BytesIO(cls.attribute_numeric_type_data.encode("UTF-8")) cls.attribute_numeric_type_fh = fh cls.simple_undirected_data = """<?xml version="1.0" encoding="UTF-8"?> @@ -195,14 +197,14 @@ class BaseGraphML: <edge source="n2" target="n3"/> </graph> </graphml>""" -# <edge source="n8" target="n10" directed="false"/> + # <edge source="n8" target="n10" directed="false"/> cls.simple_undirected_graph = nx.Graph() - cls.simple_undirected_graph.add_node('n10') - cls.simple_undirected_graph.add_edge('n0', 'n2', id='foo') - cls.simple_undirected_graph.add_edges_from([('n1', 'n2'), - ('n2', 'n3'), - ]) - fh = io.BytesIO(cls.simple_undirected_data.encode('UTF-8')) + cls.simple_undirected_graph.add_node("n10") + cls.simple_undirected_graph.add_edge("n0", "n2", id="foo") + cls.simple_undirected_graph.add_edges_from( + [("n1", "n2"), ("n2", "n3"),] + ) + fh = io.BytesIO(cls.simple_undirected_data.encode("UTF-8")) cls.simple_undirected_fh = fh @@ -212,15 +214,13 @@ class TestReadGraphML(BaseGraphML): H = nx.read_graphml(self.simple_directed_fh) assert sorted(G.nodes()) == sorted(H.nodes()) assert sorted(G.edges()) == sorted(H.edges()) - assert (sorted(G.edges(data=True)) == - sorted(H.edges(data=True))) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) self.simple_directed_fh.seek(0) I = nx.parse_graphml(self.simple_directed_data) assert sorted(G.nodes()) == sorted(I.nodes()) assert sorted(G.edges()) == sorted(I.edges()) - assert (sorted(G.edges(data=True)) == - sorted(I.edges(data=True))) + assert sorted(G.edges(data=True)) == sorted(I.edges(data=True)) def test_read_simple_undirected_graphml(self): G = self.simple_undirected_graph @@ -264,7 +264,7 @@ class TestReadGraphML(BaseGraphML): <edge source="n1" target="n2" directed='true'/> </graph> </graphml>""" - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) pytest.raises(nx.NetworkXError, nx.read_graphml, fh) pytest.raises(nx.NetworkXError, nx.parse_graphml, s) @@ -282,7 +282,7 @@ class TestReadGraphML(BaseGraphML): <edge source="n1" target="n2" directed='false'/> </graph> </graphml>""" - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) pytest.raises(nx.NetworkXError, nx.read_graphml, fh) pytest.raises(nx.NetworkXError, nx.parse_graphml, s) @@ -310,7 +310,7 @@ class TestReadGraphML(BaseGraphML): </graph> </graphml> """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) pytest.raises(nx.NetworkXError, nx.read_graphml, fh) pytest.raises(nx.NetworkXError, nx.parse_graphml, s) @@ -340,7 +340,7 @@ class TestReadGraphML(BaseGraphML): </graph> </graphml> """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) pytest.raises(nx.NetworkXError, nx.read_graphml, fh) pytest.raises(nx.NetworkXError, nx.parse_graphml, s) @@ -359,7 +359,7 @@ class TestReadGraphML(BaseGraphML): </graph> </graphml> """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) G = nx.read_graphml(fh) expected = [("n0", "n1", "e0"), ("n0", "n1", "e1")] assert sorted(G.edges(keys=True)) == expected @@ -375,25 +375,25 @@ class TestReadGraphML(BaseGraphML): G = nx.MultiGraph() G.add_node(1) G.add_node(2) - G.add_edges_from([ - # edges with no data, no keys: - (1, 2), - # edges with only data: - (1, 2, dict(key='data_key1')), - (1, 2, dict(id='data_id2')), - (1, 2, dict(key='data_key3', id='data_id3')), - # edges with both data and keys: - (1, 2, 103, dict(key='data_key4')), - (1, 2, 104, dict(id='data_id5')), - (1, 2, 105, dict(key='data_key6', id='data_id7')), - ]) + G.add_edges_from( + [ + # edges with no data, no keys: + (1, 2), + # edges with only data: + (1, 2, dict(key="data_key1")), + (1, 2, dict(id="data_id2")), + (1, 2, dict(key="data_key3", id="data_id3")), + # edges with both data and keys: + (1, 2, 103, dict(key="data_key4")), + (1, 2, 104, dict(id="data_id5")), + (1, 2, 105, dict(key="data_key6", id="data_id7")), + ] + ) fh = io.BytesIO() nx.write_graphml(G, fh) fh.seek(0) H = nx.read_graphml(fh, node_type=int) - assert_edges_equal( - G.edges(data=True, keys=True), H.edges(data=True, keys=True) - ) + assert_edges_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True)) assert G._adj == H._adj def test_yfiles_extension(self): @@ -464,18 +464,18 @@ class TestReadGraphML(BaseGraphML): </data> </graphml> """ - fh = io.BytesIO(data.encode('UTF-8')) + fh = io.BytesIO(data.encode("UTF-8")) G = nx.read_graphml(fh) - assert list(G.edges()) == [('n0', 'n1')] - assert G['n0']['n1']['id'] == 'e0' - assert G.nodes['n0']['label'] == '1' - assert G.nodes['n1']['label'] == '2' + assert list(G.edges()) == [("n0", "n1")] + assert G["n0"]["n1"]["id"] == "e0" + assert G.nodes["n0"]["label"] == "1" + assert G.nodes["n1"]["label"] == "2" H = nx.parse_graphml(data) - assert list(H.edges()) == [('n0', 'n1')] - assert H['n0']['n1']['id'] == 'e0' - assert H.nodes['n0']['label'] == '1' - assert H.nodes['n1']['label'] == '2' + assert list(H.edges()) == [("n0", "n1")] + assert H["n0"]["n1"]["id"] == "e0" + assert H.nodes["n0"]["label"] == "1" + assert H.nodes["n1"]["label"] == "2" def test_bool(self): s = """<?xml version="1.0" encoding="UTF-8"?> @@ -509,16 +509,16 @@ class TestReadGraphML(BaseGraphML): </graph> </graphml> """ - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) G = nx.read_graphml(fh) H = nx.parse_graphml(s) for graph in [G, H]: - assert graph.nodes['n0']['test'] - assert not graph.nodes['n2']['test'] - assert not graph.nodes['n3']['test'] - assert graph.nodes['n4']['test'] - assert not graph.nodes['n5']['test'] - assert graph.nodes['n6']['test'] + assert graph.nodes["n0"]["test"] + assert not graph.nodes["n2"]["test"] + assert not graph.nodes["n3"]["test"] + assert graph.nodes["n4"]["test"] + assert not graph.nodes["n5"]["test"] + assert graph.nodes["n6"]["test"] def test_graphml_header_line(self): good = """<?xml version="1.0" encoding="UTF-8" standalone="no"?> @@ -561,13 +561,13 @@ class TestReadGraphML(BaseGraphML): </graphml> """ for s in (good, bad): - fh = io.BytesIO(s.encode('UTF-8')) + fh = io.BytesIO(s.encode("UTF-8")) G = nx.read_graphml(fh) H = nx.parse_graphml(s) for graph in [G, H]: - assert graph.nodes['n0']['test'] + assert graph.nodes["n0"]["test"] - fh = io.BytesIO(ugly.encode('UTF-8')) + fh = io.BytesIO(ugly.encode("UTF-8")) pytest.raises(nx.NetworkXError, nx.read_graphml, fh) pytest.raises(nx.NetworkXError, nx.parse_graphml, ugly) @@ -856,12 +856,12 @@ class TestReadGraphML(BaseGraphML): </graphml> """ # verify that nodes / attributes are correctly read when part of a group - fh = io.BytesIO(data.encode('UTF-8')) + fh = io.BytesIO(data.encode("UTF-8")) G = nx.read_graphml(fh) data = [x for _, x in G.nodes(data=True)] assert len(data) == 9 for node_data in data: - assert node_data['CustomProperty'] != '' + assert node_data["CustomProperty"] != "" class TestWriteGraphML(BaseGraphML): @@ -875,13 +875,14 @@ class TestWriteGraphML(BaseGraphML): def test_write_interface(self): try: import lxml.etree + assert nx.write_graphml == nx.write_graphml_lxml except ImportError: assert nx.write_graphml == nx.write_graphml_xml def test_write_read_simple_directed_graphml(self): G = self.simple_directed_graph - G.graph['hi'] = 'there' + G.graph["hi"] = "there" fh = io.BytesIO() self.writer(G, fh) fh.seek(0) @@ -914,12 +915,12 @@ class TestWriteGraphML(BaseGraphML): keys = [child.items() for child in children[:3]] assert len(keys) == 3 - assert ('id', 'edge_prop') in keys[0] - assert ('attr.name', 'edge_prop') in keys[0] - assert ('id', 'prop2') in keys[1] - assert ('attr.name', 'prop2') in keys[1] - assert ('id', 'prop1') in keys[2] - assert ('attr.name', 'prop1') in keys[2] + assert ("id", "edge_prop") in keys[0] + assert ("attr.name", "edge_prop") in keys[0] + assert ("id", "prop2") in keys[1] + assert ("attr.name", "prop2") in keys[1] + assert ("id", "prop1") in keys[2] + assert ("attr.name", "prop1") in keys[2] # Confirm the read graph nodes/edge are identical when compared to # default writing behavior. @@ -933,8 +934,8 @@ class TestWriteGraphML(BaseGraphML): named_key_ids_behavior_fh.seek(0) J = nx.read_graphml(named_key_ids_behavior_fh) - assert(all(n1 == n2 for (n1, n2) in zip(H.nodes, J.nodes))) - assert(all(e1 == e2 for (e1, e2) in zip(H.edges, J.edges))) + assert all(n1 == n2 for (n1, n2) in zip(H.nodes, J.nodes)) + assert all(e1 == e2 for (e1, e2) in zip(H.edges, J.edges)) def test_write_read_attribute_numeric_type_graphml(self): from xml.etree.ElementTree import parse @@ -959,8 +960,8 @@ class TestWriteGraphML(BaseGraphML): keys = [child.items() for child in children[:2]] assert len(keys) == 2 - assert ('attr.type', 'double') in keys[0] - assert ('attr.type', 'double') in keys[1] + assert ("attr.type", "double") in keys[0] + assert ("attr.type", "double") in keys[1] def test_more_multigraph_keys(self): """Writing keys as edge id attributes means keys become strings. @@ -969,7 +970,7 @@ class TestWriteGraphML(BaseGraphML): This allows the adjacency to remain the same. """ G = nx.MultiGraph() - G.add_edges_from([('a', 'b', 2), ('a', 'b', 3)]) + G.add_edges_from([("a", "b", 2), ("a", "b", 3)]) fd, fname = tempfile.mkstemp() self.writer(G, fname) H = nx.read_graphml(fname) @@ -981,11 +982,11 @@ class TestWriteGraphML(BaseGraphML): def test_default_attribute(self): G = nx.Graph(name="Fred") - G.add_node(1, label=1, color='green') + G.add_node(1, label=1, color="green") nx.add_path(G, [0, 1, 2, 3]) G.add_edge(1, 2, weight=3) - G.graph['node_default'] = {'color': 'yellow'} - G.graph['edge_default'] = {'weight': 7} + G.graph["node_default"] = {"color": "yellow"} + G.graph["edge_default"] = {"weight": 7} fh = io.BytesIO() self.writer(G, fh) fh.seek(0) @@ -996,23 +997,23 @@ class TestWriteGraphML(BaseGraphML): def test_mixed_type_attributes(self): G = nx.MultiGraph() - G.add_node('n0', special=False) - G.add_node('n1', special=0) - G.add_edge('n0', 'n1', special=False) - G.add_edge('n0', 'n1', special=0) + G.add_node("n0", special=False) + G.add_node("n1", special=0) + G.add_edge("n0", "n1", special=False) + G.add_edge("n0", "n1", special=0) fh = io.BytesIO() self.writer(G, fh) fh.seek(0) H = nx.read_graphml(fh) - assert not H.nodes['n0']['special'] - assert H.nodes['n1']['special'] == 0 - assert not H.edges['n0', 'n1', 0]['special'] - assert H.edges['n0', 'n1', 1]['special'] == 0 + assert not H.nodes["n0"]["special"] + assert H.nodes["n1"]["special"] == 0 + assert not H.edges["n0", "n1", 0]["special"] + assert H.edges["n0", "n1", 1]["special"] == 0 def test_multigraph_to_graph(self): # test converting multigraph to graph if no parallel edges found G = nx.MultiGraph() - G.add_edges_from([('a', 'b', 2), ('b', 'c', 3)]) # no multiedges + G.add_edges_from([("a", "b", 2), ("b", "c", 3)]) # no multiedges fd, fname = tempfile.mkstemp() self.writer(G, fname) H = nx.read_graphml(fname) @@ -1021,9 +1022,9 @@ class TestWriteGraphML(BaseGraphML): os.unlink(fname) def test_numpy_float(self): - np = pytest.importorskip('numpy') + np = pytest.importorskip("numpy") wt = np.float(3.4) - G = nx.Graph([(1, 2, {'weight': wt})]) + G = nx.Graph([(1, 2, {"weight": wt})]) fd, fname = tempfile.mkstemp() self.writer(G, fname) H = nx.read_graphml(fname, node_type=int) @@ -1032,15 +1033,15 @@ class TestWriteGraphML(BaseGraphML): os.unlink(fname) def test_numpy_float64(self): - np = pytest.importorskip('numpy') + np = pytest.importorskip("numpy") wt = np.float64(3.4) - G = nx.Graph([(1, 2, {'weight': wt})]) + G = nx.Graph([(1, 2, {"weight": wt})]) fd, fname = tempfile.mkstemp() self.writer(G, fname) H = nx.read_graphml(fname, node_type=int) assert G.edges == H.edges - wtG = G[1][2]['weight'] - wtH = H[1][2]['weight'] + wtG = G[1][2]["weight"] + wtH = H[1][2]["weight"] assert almost_equal(wtG, wtH, places=6) assert type(wtG) == np.float64 assert type(wtH) == float @@ -1048,15 +1049,15 @@ class TestWriteGraphML(BaseGraphML): os.unlink(fname) def test_numpy_float32(self): - np = pytest.importorskip('numpy') + np = pytest.importorskip("numpy") wt = np.float32(3.4) - G = nx.Graph([(1, 2, {'weight': wt})]) + G = nx.Graph([(1, 2, {"weight": wt})]) fd, fname = tempfile.mkstemp() self.writer(G, fname) H = nx.read_graphml(fname, node_type=int) assert G.edges == H.edges - wtG = G[1][2]['weight'] - wtH = H[1][2]['weight'] + wtG = G[1][2]["weight"] + wtH = H[1][2]["weight"] assert almost_equal(wtG, wtH, places=6) assert type(wtG) == np.float32 assert type(wtH) == float @@ -1064,9 +1065,9 @@ class TestWriteGraphML(BaseGraphML): os.unlink(fname) def test_numpy_float64_inference(self): - np = pytest.importorskip('numpy') + np = pytest.importorskip("numpy") G = self.attribute_numeric_type_graph - G.edges[('n1', 'n1')]['weight'] = np.float64(1.1) + G.edges[("n1", "n1")]["weight"] = np.float64(1.1) fd, fname = tempfile.mkstemp() self.writer(G, fname, infer_numeric_types=True) H = nx.read_graphml(fname) @@ -1079,7 +1080,7 @@ class TestWriteGraphML(BaseGraphML): name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) node_type = str - G.add_edge(name1, 'Radiohead', foo=name2) + G.add_edge(name1, "Radiohead", foo=name2) fd, fname = tempfile.mkstemp() self.writer(G, fname) H = nx.read_graphml(fname, node_type=node_type) @@ -1090,15 +1091,16 @@ class TestWriteGraphML(BaseGraphML): def test_unicode_escape(self): # test for handling json escaped stings in python 2 Issue #1880 import json + a = dict(a='{"a": "123"}') # an object with many chars to escape sa = json.dumps(a) G = nx.Graph() - G.graph['test'] = sa + G.graph["test"] = sa fh = io.BytesIO() self.writer(G, fh) fh.seek(0) H = nx.read_graphml(fh) - assert G.graph['test'] == H.graph['test'] + assert G.graph["test"] == H.graph["test"] class TestXMLGraphML(TestWriteGraphML): diff --git a/networkx/readwrite/tests/test_leda.py b/networkx/readwrite/tests/test_leda.py index 278f770c..03e2b68d 100644 --- a/networkx/readwrite/tests/test_leda.py +++ b/networkx/readwrite/tests/test_leda.py @@ -3,27 +3,26 @@ import io class TestLEDA: - def test_parse_leda(self): data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" G = nx.parse_leda(data) - G = nx.parse_leda(data.split('\n')) - assert (sorted(G.nodes()) == - ['v1', 'v2', 'v3', 'v4', 'v5']) - assert (sorted(G.edges(data=True)) == - [('v1', 'v2', {'label': '4'}), - ('v1', 'v3', {'label': '3'}), - ('v2', 'v3', {'label': '2'}), - ('v3', 'v4', {'label': '3'}), - ('v3', 'v5', {'label': '7'}), - ('v4', 'v5', {'label': '6'}), - ('v5', 'v1', {'label': 'foo'})]) + G = nx.parse_leda(data.split("\n")) + assert sorted(G.nodes()) == ["v1", "v2", "v3", "v4", "v5"] + assert sorted(G.edges(data=True)) == [ + ("v1", "v2", {"label": "4"}), + ("v1", "v3", {"label": "3"}), + ("v2", "v3", {"label": "2"}), + ("v3", "v4", {"label": "3"}), + ("v3", "v5", {"label": "7"}), + ("v4", "v5", {"label": "6"}), + ("v5", "v1", {"label": "foo"}), + ] def test_read_LEDA(self): fh = io.BytesIO() data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" G = nx.parse_leda(data) - fh.write(data.encode('UTF-8')) + fh.write(data.encode("UTF-8")) fh.seek(0) Gin = nx.read_leda(fh) assert sorted(G.nodes()) == sorted(Gin.nodes()) diff --git a/networkx/readwrite/tests/test_p2g.py b/networkx/readwrite/tests/test_p2g.py index 66557e3e..0b1a910f 100644 --- a/networkx/readwrite/tests/test_p2g.py +++ b/networkx/readwrite/tests/test_p2g.py @@ -5,13 +5,12 @@ from networkx.testing import assert_edges_equal class TestP2G: - @classmethod def setup_class(cls): cls.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] cls.G.add_edges_from(e) - cls.G.add_node('g') + cls.G.add_node("g") cls.DG = nx.DiGraph(cls.G) def test_read_p2g(self): @@ -27,10 +26,10 @@ c """ bytesIO = io.BytesIO(s) G = read_p2g(bytesIO) - assert G.name == 'name' - assert sorted(G) == ['a', 'b', 'c'] + assert G.name == "name" + assert sorted(G) == ["a", "b", "c"] edges = [(str(u), str(v)) for u, v in G.edges()] - assert_edges_equal(G.edges(), [('a', 'c'), ('a', 'b'), ('c', 'a'), ('c', 'c')]) + assert_edges_equal(G.edges(), [("a", "c"), ("a", "b"), ("c", "a"), ("c", "c")]) def test_write_p2g(self): s = b"""foo @@ -44,7 +43,7 @@ c """ fh = io.BytesIO() G = nx.OrderedDiGraph() - G.name = 'foo' + G.name = "foo" G.add_edges_from([(1, 2), (2, 3)]) write_p2g(G, fh) fh.seek(0) @@ -54,8 +53,8 @@ c def test_write_read_p2g(self): fh = io.BytesIO() G = nx.DiGraph() - G.name = 'foo' - G.add_edges_from([('a', 'b'), ('b', 'c')]) + G.name = "foo" + G.add_edges_from([("a", "b"), ("b", "c")]) write_p2g(G, fh) fh.seek(0) H = read_p2g(fh) diff --git a/networkx/readwrite/tests/test_pajek.py b/networkx/readwrite/tests/test_pajek.py index 27e4c21b..7d7e4c48 100644 --- a/networkx/readwrite/tests/test_pajek.py +++ b/networkx/readwrite/tests/test_pajek.py @@ -12,15 +12,23 @@ class TestPajek: def setup_class(cls): cls.data = """*network Tralala\n*vertices 4\n 1 "A1" 0.0938 0.0896 ellipse x_fact 1 y_fact 1\n 2 "Bb" 0.8188 0.2458 ellipse x_fact 1 y_fact 1\n 3 "C" 0.3688 0.7792 ellipse x_fact 1\n 4 "D2" 0.9583 0.8563 ellipse x_fact 1\n*arcs\n1 1 1 h2 0 w 3 c Blue s 3 a1 -130 k1 0.6 a2 -130 k2 0.6 ap 0.5 l "Bezier loop" lc BlueViolet fos 20 lr 58 lp 0.3 la 360\n2 1 1 h2 0 a1 120 k1 1.3 a2 -120 k2 0.3 ap 25 l "Bezier arc" lphi 270 la 180 lr 19 lp 0.5\n1 2 1 h2 0 a1 40 k1 2.8 a2 30 k2 0.8 ap 25 l "Bezier arc" lphi 90 la 0 lp 0.65\n4 2 -1 h2 0 w 1 k1 -2 k2 250 ap 25 l "Circular arc" c Red lc OrangeRed\n3 4 1 p Dashed h2 0 w 2 c OliveGreen ap 25 l "Straight arc" lc PineGreen\n1 3 1 p Dashed h2 0 w 5 k1 -1 k2 -20 ap 25 l "Oval arc" c Brown lc Black\n3 3 -1 h1 6 w 1 h2 12 k1 -2 k2 -15 ap 0.5 l "Circular loop" c Red lc OrangeRed lphi 270 la 180""" cls.G = nx.MultiDiGraph() - cls.G.add_nodes_from(['A1', 'Bb', 'C', 'D2']) - cls.G.add_edges_from([('A1', 'A1'), ('A1', 'Bb'), ('A1', 'C'), - ('Bb', 'A1'), ('C', 'C'), ('C', 'D2'), - ('D2', 'Bb')]) + cls.G.add_nodes_from(["A1", "Bb", "C", "D2"]) + cls.G.add_edges_from( + [ + ("A1", "A1"), + ("A1", "Bb"), + ("A1", "C"), + ("Bb", "A1"), + ("C", "C"), + ("C", "D2"), + ("D2", "Bb"), + ] + ) - cls.G.graph['name'] = 'Tralala' + cls.G.graph["name"] = "Tralala" (fd, cls.fname) = tempfile.mkstemp() - with os.fdopen(fd, 'wb') as fh: - fh.write(cls.data.encode('UTF-8')) + with os.fdopen(fd, "wb") as fh: + fh.write(cls.data.encode("UTF-8")) @classmethod def teardown_class(cls): @@ -30,22 +38,34 @@ class TestPajek: # Example without node positions or shape data = """*Vertices 2\n1 "1"\n2 "2"\n*Edges\n1 2\n2 1""" G = nx.parse_pajek(data) - assert sorted(G.nodes()) == ['1', '2'] - assert_edges_equal(G.edges(), [('1', '2'), ('1', '2')]) + assert sorted(G.nodes()) == ["1", "2"] + assert_edges_equal(G.edges(), [("1", "2"), ("1", "2")]) def test_parse_pajek(self): G = nx.parse_pajek(self.data) - assert sorted(G.nodes()) == ['A1', 'Bb', 'C', 'D2'] - assert_edges_equal(G.edges(), [('A1', 'A1'), ('A1', 'Bb'), - ('A1', 'C'), ('Bb', 'A1'), - ('C', 'C'), ('C', 'D2'), ('D2', 'Bb')]) + assert sorted(G.nodes()) == ["A1", "Bb", "C", "D2"] + assert_edges_equal( + G.edges(), + [ + ("A1", "A1"), + ("A1", "Bb"), + ("A1", "C"), + ("Bb", "A1"), + ("C", "C"), + ("C", "D2"), + ("D2", "Bb"), + ], + ) def test_parse_pajet_mat(self): data = """*Vertices 3\n1 "one"\n2 "two"\n3 "three"\n*Matrix\n1 1 0\n0 1 0\n0 1 0\n""" G = nx.parse_pajek(data) - assert set(G.nodes()) == {'one', 'two', 'three'} - assert G.nodes['two'] == {'id': '2'} - assert_edges_equal(set(G.edges()), {('one', 'one'), ('two', 'one'), ('two', 'two'), ('two', 'three')}) + assert set(G.nodes()) == {"one", "two", "three"} + assert G.nodes["two"] == {"id": "2"} + assert_edges_equal( + set(G.edges()), + {("one", "one"), ("two", "one"), ("two", "two"), ("two", "three")}, + ) def test_read_pajek(self): G = nx.parse_pajek(self.data) @@ -58,6 +78,7 @@ class TestPajek: def test_write_pajek(self): import io + G = nx.parse_pajek(self.data) fh = io.BytesIO() nx.write_pajek(G, fh) @@ -70,14 +91,16 @@ class TestPajek: def test_ignored_attribute(self): import io + G = nx.Graph() fh = io.BytesIO() G.add_node(1, int_attr=1) - G.add_node(2, empty_attr=' ') + G.add_node(2, empty_attr=" ") G.add_edge(1, 2, int_attr=2) - G.add_edge(2, 3, empty_attr=' ') + G.add_edge(2, 3, empty_attr=" ") import warnings + with warnings.catch_warnings(record=True) as w: nx.write_pajek(G, fh) assert len(w) == 4 @@ -86,16 +109,17 @@ class TestPajek: # Make sure we can parse a line such as: *network # Issue #952 line = "*network\n" - other_lines = self.data.split('\n')[1:] - data = line + '\n'.join(other_lines) + other_lines = self.data.split("\n")[1:] + data = line + "\n".join(other_lines) G = nx.parse_pajek(data) def test_unicode(self): import io + G = nx.Graph() name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) - G.add_edge(name1, 'Radiohead', foo=name2) + G.add_edge(name1, "Radiohead", foo=name2) fh = io.BytesIO() nx.write_pajek(G, fh) fh.seek(0) diff --git a/networkx/readwrite/tests/test_shp.py b/networkx/readwrite/tests/test_shp.py index 6590e575..25df61d7 100644 --- a/networkx/readwrite/tests/test_shp.py +++ b/networkx/readwrite/tests/test_shp.py @@ -4,14 +4,14 @@ import os import tempfile import pytest -ogr = pytest.importorskip('osgeo.ogr') + +ogr = pytest.importorskip("osgeo.ogr") import networkx as nx class TestShp: def setup_method(self): - def createlayer(driver, layerType=ogr.wkbLineString): lyr = driver.CreateLayer("edges", None, layerType) namedef = ogr.FieldDefn("Name", ogr.OFTString) @@ -21,24 +21,28 @@ class TestShp: drv = ogr.GetDriverByName("ESRI Shapefile") - testdir = os.path.join(tempfile.gettempdir(), 'shpdir') - shppath = os.path.join(tempfile.gettempdir(), 'tmpshp.shp') - multi_shppath = os.path.join(tempfile.gettempdir(), 'tmp_mshp.shp') + testdir = os.path.join(tempfile.gettempdir(), "shpdir") + shppath = os.path.join(tempfile.gettempdir(), "tmpshp.shp") + multi_shppath = os.path.join(tempfile.gettempdir(), "tmp_mshp.shp") self.deletetmp(drv, testdir, shppath, multi_shppath) os.mkdir(testdir) - self.names = ['a', 'b', 'c', 'c'] # edgenames - self.paths = ([(1.0, 1.0), (2.0, 2.0)], - [(2.0, 2.0), (3.0, 3.0)], - [(0.9, 0.9), (4.0, 0.9), (4.0, 2.0)]) + self.names = ["a", "b", "c", "c"] # edgenames + self.paths = ( + [(1.0, 1.0), (2.0, 2.0)], + [(2.0, 2.0), (3.0, 3.0)], + [(0.9, 0.9), (4.0, 0.9), (4.0, 2.0)], + ) - self.simplified_names = ['a', 'b', 'c'] # edgenames - self.simplified_paths = ([(1.0, 1.0), (2.0, 2.0)], - [(2.0, 2.0), (3.0, 3.0)], - [(0.9, 0.9), (4.0, 2.0)]) + self.simplified_names = ["a", "b", "c"] # edgenames + self.simplified_paths = ( + [(1.0, 1.0), (2.0, 2.0)], + [(2.0, 2.0), (3.0, 3.0)], + [(0.9, 0.9), (4.0, 2.0)], + ) - self.multi_names = ['a', 'a', 'a', 'a'] # edgenames + self.multi_names = ["a", "a", "a", "a"] # edgenames shp = drv.CreateDataSource(shppath) lyr = createlayer(shp) @@ -67,7 +71,7 @@ class TestShp: multi_feat = ogr.Feature(multi_lyr.GetLayerDefn()) multi_feat.SetGeometry(multi_g) - multi_feat.SetField("Name", 'a') + multi_feat.SetField("Name", "a") multi_lyr.CreateFeature(multi_feat) self.shppath = shppath @@ -81,20 +85,18 @@ class TestShp: drv.DeleteDataSource(p) def testload(self): - def compare_graph_paths_names(g, paths, names): expected = nx.DiGraph() for p in paths: nx.add_path(expected, p) assert sorted(expected.nodes) == sorted(g.nodes) assert sorted(expected.edges()) == sorted(g.edges()) - g_names = [g.get_edge_data(s, e)['Name'] for s, e in g.edges()] + g_names = [g.get_edge_data(s, e)["Name"] for s, e in g.edges()] assert names == sorted(g_names) # simplified G = nx.read_shp(self.shppath) - compare_graph_paths_names(G, self.simplified_paths, - self.simplified_names) + compare_graph_paths_names(G, self.simplified_paths, self.simplified_names) # unsimplified G = nx.read_shp(self.shppath, simplify=False) @@ -118,12 +120,12 @@ class TestShp: "POINT (2 2)", "POINT (3 3)", "POINT (0.9 0.9)", - "POINT (4 2)" + "POINT (4 2)", ) expectedlines_simple = ( "LINESTRING (1 1,2 2)", "LINESTRING (2 2,3 3)", - "LINESTRING (0.9 0.9,4.0 0.9,4 2)" + "LINESTRING (0.9 0.9,4.0 0.9,4 2)", ) expectedpoints = ( "POINT (1 1)", @@ -131,16 +133,16 @@ class TestShp: "POINT (3 3)", "POINT (0.9 0.9)", "POINT (4.0 0.9)", - "POINT (4 2)" + "POINT (4 2)", ) expectedlines = ( "LINESTRING (1 1,2 2)", "LINESTRING (2 2,3 3)", "LINESTRING (0.9 0.9,4.0 0.9)", - "LINESTRING (4.0 0.9,4 2)" + "LINESTRING (4.0 0.9,4 2)", ) - tpath = os.path.join(tempfile.gettempdir(), 'shpdir') + tpath = os.path.join(tempfile.gettempdir(), "shpdir") G = nx.read_shp(self.shppath) nx.write_shp(G, tpath) shpdir = ogr.Open(tpath) @@ -164,11 +166,11 @@ class TestShp: ref = feature.GetGeometryRef() last = ref.GetPointCount() - 1 edge_nodes = (ref.GetPoint_2D(0), ref.GetPoint_2D(last)) - name = feature.GetFieldAsString('Name') - assert graph.get_edge_data(*edge_nodes)['Name'] == name + name = feature.GetFieldAsString("Name") + assert graph.get_edge_data(*edge_nodes)["Name"] == name feature = lyr.GetNextFeature() - tpath = os.path.join(tempfile.gettempdir(), 'shpdir') + tpath = os.path.join(tempfile.gettempdir(), "shpdir") G = nx.read_shp(self.shppath) nx.write_shp(G, tpath) @@ -178,7 +180,7 @@ class TestShp: # Test export of node attributes in nx.write_shp (#2778) def test_nodeattributeexport(self): - tpath = os.path.join(tempfile.gettempdir(), 'shpdir') + tpath = os.path.join(tempfile.gettempdir(), "shpdir") G = nx.DiGraph() A = (0, 0) @@ -186,25 +188,20 @@ class TestShp: C = (2, 2) G.add_edge(A, B) G.add_edge(A, C) - label = 'node_label' + label = "node_label" for n, d in G.nodes(data=True): - d['label'] = label + d["label"] = label nx.write_shp(G, tpath) H = nx.read_shp(tpath) for n, d in H.nodes(data=True): - assert d['label'] == label + assert d["label"] == label def test_wkt_export(self): G = nx.DiGraph() - tpath = os.path.join(tempfile.gettempdir(), 'shpdir') - points = ( - "POINT (0.9 0.9)", - "POINT (4 2)" - ) - line = ( - "LINESTRING (0.9 0.9,4 2)", - ) + tpath = os.path.join(tempfile.gettempdir(), "shpdir") + points = ("POINT (0.9 0.9)", "POINT (4 2)") + line = ("LINESTRING (0.9 0.9,4 2)",) G.add_node(1, Wkt=points[0]) G.add_node(2, Wkt=points[1]) G.add_edge(1, 2, Wkt=line[0]) @@ -235,7 +232,7 @@ class TestMissingGeometry: self.delete_shapedir() def setup_path(self): - self.path = os.path.join(tempfile.gettempdir(), 'missing_geometry') + self.path = os.path.join(tempfile.gettempdir(), "missing_geometry") def create_shapedir(self): drv = ogr.GetDriverByName("ESRI Shapefile") @@ -265,7 +262,7 @@ class TestMissingAttrWrite: self.delete_shapedir() def setup_path(self): - self.path = os.path.join(tempfile.gettempdir(), 'missing_attributes') + self.path = os.path.join(tempfile.gettempdir(), "missing_attributes") def delete_shapedir(self): drv = ogr.GetDriverByName("ESRI Shapefile") @@ -285,6 +282,6 @@ class TestMissingAttrWrite: for u, v, d in H.edges(data=True): if u == A and v == B: - assert d['foo'] == 100 + assert d["foo"] == 100 if u == A and v == C: - assert d['foo'] is None + assert d["foo"] is None diff --git a/networkx/readwrite/tests/test_sparse6.py b/networkx/readwrite/tests/test_sparse6.py index 516681b9..141d8230 100644 --- a/networkx/readwrite/tests/test_sparse6.py +++ b/networkx/readwrite/tests/test_sparse6.py @@ -8,30 +8,56 @@ from networkx.testing.utils import assert_nodes_equal class TestSparseGraph6: - def test_from_sparse6_bytes(self): - data = b':Q___eDcdFcDeFcE`GaJ`IaHbKNbLM' + data = b":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM" G = nx.from_sparse6_bytes(data) - assert_nodes_equal(sorted(G.nodes()), - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17]) - assert_edges_equal(G.edges(), - [(0, 1), (0, 2), (0, 3), (1, 12), (1, 14), (2, 13), - (2, 15), (3, 16), (3, 17), (4, 7), (4, 9), (4, 11), - (5, 6), (5, 8), (5, 9), (6, 10), (6, 11), (7, 8), - (7, 10), (8, 12), (9, 15), (10, 14), (11, 13), - (12, 16), (13, 17), (14, 17), (15, 16)]) + assert_nodes_equal( + sorted(G.nodes()), + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], + ) + assert_edges_equal( + G.edges(), + [ + (0, 1), + (0, 2), + (0, 3), + (1, 12), + (1, 14), + (2, 13), + (2, 15), + (3, 16), + (3, 17), + (4, 7), + (4, 9), + (4, 11), + (5, 6), + (5, 8), + (5, 9), + (6, 10), + (6, 11), + (7, 8), + (7, 10), + (8, 12), + (9, 15), + (10, 14), + (11, 13), + (12, 16), + (13, 17), + (14, 17), + (15, 16), + ], + ) def test_from_bytes_multigraph_graph(self): - graph_data = b':An' + graph_data = b":An" G = nx.from_sparse6_bytes(graph_data) assert type(G) == nx.Graph - multigraph_data = b':Ab' + multigraph_data = b":Ab" M = nx.from_sparse6_bytes(multigraph_data) assert type(M) == nx.MultiGraph def test_read_sparse6(self): - data = b':Q___eDcdFcDeFcE`GaJ`IaHbKNbLM' + data = b":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM" G = nx.from_sparse6_bytes(data) fh = BytesIO(data) Gin = nx.read_sparse6(fh) @@ -40,15 +66,15 @@ class TestSparseGraph6: def test_read_many_graph6(self): # Read many graphs into list - data = (b':Q___eDcdFcDeFcE`GaJ`IaHbKNbLM\n' - b':Q___dCfDEdcEgcbEGbFIaJ`JaHN`IM') + data = b":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM\n" b":Q___dCfDEdcEgcbEGbFIaJ`JaHN`IM" fh = BytesIO(data) glist = nx.read_sparse6(fh) assert len(glist) == 2 for G in glist: - assert_nodes_equal(G.nodes(), - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17]) + assert_nodes_equal( + G.nodes(), + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], + ) class TestWriteSparse6: @@ -62,58 +88,58 @@ class TestWriteSparse6: G = nx.null_graph() result = BytesIO() nx.write_sparse6(G, result) - assert result.getvalue() == b'>>sparse6<<:?\n' + assert result.getvalue() == b">>sparse6<<:?\n" def test_trivial_graph(self): G = nx.trivial_graph() result = BytesIO() nx.write_sparse6(G, result) - assert result.getvalue() == b'>>sparse6<<:@\n' + assert result.getvalue() == b">>sparse6<<:@\n" def test_empty_graph(self): G = nx.empty_graph(5) result = BytesIO() nx.write_sparse6(G, result) - assert result.getvalue() == b'>>sparse6<<:D\n' + assert result.getvalue() == b">>sparse6<<:D\n" def test_large_empty_graph(self): G = nx.empty_graph(68) result = BytesIO() nx.write_sparse6(G, result) - assert result.getvalue() == b'>>sparse6<<:~?@C\n' + assert result.getvalue() == b">>sparse6<<:~?@C\n" def test_very_large_empty_graph(self): G = nx.empty_graph(258049) result = BytesIO() nx.write_sparse6(G, result) - assert result.getvalue() == b'>>sparse6<<:~~???~?@\n' + assert result.getvalue() == b">>sparse6<<:~~???~?@\n" def test_complete_graph(self): G = nx.complete_graph(4) result = BytesIO() nx.write_sparse6(G, result) - assert result.getvalue() == b'>>sparse6<<:CcKI\n' + assert result.getvalue() == b">>sparse6<<:CcKI\n" def test_no_header(self): G = nx.complete_graph(4) result = BytesIO() nx.write_sparse6(G, result, header=False) - assert result.getvalue() == b':CcKI\n' + assert result.getvalue() == b":CcKI\n" def test_padding(self): - codes = (b':Cdv', b':DaYn', b':EaYnN', b':FaYnL', b':GaYnLz') + codes = (b":Cdv", b":DaYn", b":EaYnN", b":FaYnL", b":GaYnLz") for n, code in enumerate(codes, start=4): G = nx.path_graph(n) result = BytesIO() nx.write_sparse6(G, result, header=False) - assert result.getvalue() == code + b'\n' + assert result.getvalue() == code + b"\n" def test_complete_bipartite(self): G = nx.complete_bipartite_graph(6, 9) result = BytesIO() nx.write_sparse6(G, result) # Compared with sage - expected = b'>>sparse6<<:Nk' + b'?G`cJ' * 9 + b'\n' + expected = b">>sparse6<<:Nk" + b"?G`cJ" * 9 + b"\n" assert result.getvalue() == expected def test_read_write_inverse(self): @@ -139,8 +165,9 @@ class TestWriteSparse6: fullfilename = f.name # file should be closed now, so write_sparse6 can open it nx.write_sparse6(nx.null_graph(), fullfilename) - fh = open(fullfilename, mode='rb') - assert fh.read() == b'>>sparse6<<:?\n' + fh = open(fullfilename, mode="rb") + assert fh.read() == b">>sparse6<<:?\n" fh.close() import os + os.remove(fullfilename) diff --git a/networkx/readwrite/tests/test_yaml.py b/networkx/readwrite/tests/test_yaml.py index 85570646..c1238fa5 100644 --- a/networkx/readwrite/tests/test_yaml.py +++ b/networkx/readwrite/tests/test_yaml.py @@ -5,6 +5,7 @@ import os import tempfile import pytest + yaml = pytest.importorskip("yaml") import networkx as nx @@ -19,9 +20,9 @@ class TestYaml: @classmethod def build_graphs(cls): cls.G = nx.Graph(name="test") - e = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'e'), ('e', 'f'), ('a', 'f')] + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] cls.G.add_edges_from(e) - cls.G.add_node('g') + cls.G.add_node("g") cls.DG = nx.DiGraph(cls.G) diff --git a/networkx/release.py b/networkx/release.py index 695d32e5..bef01295 100644 --- a/networkx/release.py +++ b/networkx/release.py @@ -39,7 +39,7 @@ basedir = os.path.abspath(os.path.split(__file__)[0]) def write_versionfile(): """Creates a static file containing version information.""" - versionfile = os.path.join(basedir, 'version.py') + versionfile = os.path.join(basedir, "version.py") text = '''""" Version information for NetworkX, created during installation. @@ -72,19 +72,19 @@ vcs_info = %(vcs_info)r date, date_info, version, version_info, vcs_info = get_info(dynamic=True) def writefile(): - fh = open(versionfile, 'w') + fh = open(versionfile, "w") subs = { - 'dev': dev, - 'version': version, - 'version_info': version_info, - 'date': date, - 'date_info': date_info, - 'vcs_info': vcs_info + "dev": dev, + "version": version, + "version_info": version_info, + "date": date, + "date_info": date_info, + "vcs_info": vcs_info, } fh.write(text % subs) fh.close() - if vcs_info[0] == 'mercurial': + if vcs_info[0] == "mercurial": # Then, we want to update version.py. writefile() else: @@ -94,6 +94,7 @@ vcs_info = %(vcs_info)r # Grab the version so that setup can use it. # sys.path.insert(0, basedir) from version import version + # del sys.path[0] else: # This is *bad*. It means the user might have a tarball that @@ -113,10 +114,10 @@ def get_revision(): """Returns revision and vcs information, dynamically obtained.""" vcs, revision, tag = None, None, None - gitdir = os.path.join(basedir, '..', '.git') + gitdir = os.path.join(basedir, "..", ".git") if os.path.isdir(gitdir): - vcs = 'git' + vcs = "git" # For now, we are not bothering with revision and tag. vcs_info = (vcs, (revision, tag)) @@ -126,7 +127,9 @@ def get_revision(): def get_info(dynamic=True): # Date information - date_info = datetime.datetime.utcfromtimestamp(int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))) + date_info = datetime.datetime.utcfromtimestamp( + int(os.environ.get("SOURCE_DATE_EPOCH", time.time())) + ) date = time.asctime(date_info.timetuple()) revision, version, version_info, vcs_info = None, None, None, None @@ -157,16 +160,16 @@ def get_info(dynamic=True): # We are here if: # we failed to determine static versioning info, or # we successfully obtained dynamic revision info - version = ''.join([str(major), '.', str(minor)]) + version = "".join([str(major), ".", str(minor)]) if dev: - version += '.dev_' + date_info.strftime("%Y%m%d%H%M%S") + version += ".dev_" + date_info.strftime("%Y%m%d%H%M%S") version_info = (name, major, minor, revision) return date, date_info, version, version_info, vcs_info # Version information -name = 'networkx' +name = "networkx" major = "2" minor = "5rc1" @@ -177,39 +180,49 @@ dev = True description = "Python package for creating and manipulating graphs and networks" -authors = {'Hagberg': ('Aric Hagberg', 'hagberg@lanl.gov'), - 'Schult': ('Dan Schult', 'dschult@colgate.edu'), - 'Swart': ('Pieter Swart', 'swart@lanl.gov')} +authors = { + "Hagberg": ("Aric Hagberg", "hagberg@lanl.gov"), + "Schult": ("Dan Schult", "dschult@colgate.edu"), + "Swart": ("Pieter Swart", "swart@lanl.gov"), +} maintainer = "NetworkX Developers" maintainer_email = "networkx-discuss@googlegroups.com" -url = 'http://networkx.github.io/' +url = "http://networkx.github.io/" project_urls = { "Bug Tracker": "https://github.com/networkx/networkx/issues", "Documentation": "https://networkx.github.io/documentation/stable/", "Source Code": "https://github.com/networkx/networkx", } -platforms = ['Linux', 'Mac OSX', 'Windows', 'Unix'] -keywords = ['Networks', 'Graph Theory', 'Mathematics', - 'network', 'graph', 'discrete mathematics', 'math'] +platforms = ["Linux", "Mac OSX", "Windows", "Unix"] +keywords = [ + "Networks", + "Graph Theory", + "Mathematics", + "network", + "graph", + "discrete mathematics", + "math", +] classifiers = [ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3 :: Only', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: Scientific/Engineering :: Bio-Informatics', - 'Topic :: Scientific/Engineering :: Information Analysis', - 'Topic :: Scientific/Engineering :: Mathematics', - 'Topic :: Scientific/Engineering :: Physics'] + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3 :: Only", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Scientific/Engineering :: Bio-Informatics", + "Topic :: Scientific/Engineering :: Information Analysis", + "Topic :: Scientific/Engineering :: Mathematics", + "Topic :: Scientific/Engineering :: Physics", +] date, date_info, version, version_info, vcs_info = get_info() -if __name__ == '__main__': +if __name__ == "__main__": # Write versionfile for nightly snapshots. write_versionfile() diff --git a/networkx/testing/test.py b/networkx/testing/test.py index 85849b17..ce2c636b 100644 --- a/networkx/testing/test.py +++ b/networkx/testing/test.py @@ -12,10 +12,10 @@ def run(verbosity=1, doctest=False): import pytest - pytest_args = ['-l'] + pytest_args = ["-l"] if verbosity and int(verbosity) > 1: - pytest_args += ["-" + "v"*(int(verbosity)-1)] + pytest_args += ["-" + "v" * (int(verbosity) - 1)] if doctest: pytest_args += ["--doctest-modules"] @@ -27,7 +27,7 @@ def run(verbosity=1, doctest=False): except SystemExit as exc: code = exc.code - return (code == 0) + return code == 0 if __name__ == "__main__": diff --git a/networkx/testing/tests/test_utils.py b/networkx/testing/tests/test_utils.py index 25c2f7d6..3a7d76db 100644 --- a/networkx/testing/tests/test_utils.py +++ b/networkx/testing/tests/test_utils.py @@ -1,9 +1,5 @@ import networkx as nx -from networkx.testing import ( - assert_graphs_equal, - assert_edges_equal, - assert_nodes_equal -) +from networkx.testing import assert_graphs_equal, assert_edges_equal, assert_nodes_equal # thanks to numpy for this GenericTest class (numpy/testing/test_utils.py) @@ -39,16 +35,16 @@ class TestNodesEqual(_GenericTest): def test_nodes_with_data_equal(self): G = nx.Graph() - G.add_nodes_from([1, 2, 3], color='red') + G.add_nodes_from([1, 2, 3], color="red") H = nx.Graph() - H.add_nodes_from([1, 2, 3], color='red') + H.add_nodes_from([1, 2, 3], color="red") self._test_equal(G.nodes(data=True), H.nodes(data=True)) def test_edges_with_data_not_equal(self): G = nx.Graph() - G.add_nodes_from([1, 2, 3], color='red') + G.add_nodes_from([1, 2, 3], color="red") H = nx.Graph() - H.add_nodes_from([1, 2, 3], color='blue') + H.add_nodes_from([1, 2, 3], color="blue") self._test_not_equal(G.nodes(data=True), H.nodes(data=True)) @@ -70,22 +66,21 @@ class TestEdgesEqual(_GenericTest): nx.add_path(G, [0, 1, 2], weight=1) H = nx.MultiGraph() nx.add_path(H, [0, 1, 2], weight=1) - self._test_equal(G.edges(data=True, keys=True), - H.edges(data=True, keys=True)) + self._test_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True)) def test_edges_with_data_not_equal(self): G = nx.MultiGraph() nx.add_path(G, [0, 1, 2], weight=1) H = nx.MultiGraph() nx.add_path(H, [0, 1, 2], weight=2) - self._test_not_equal(G.edges(data=True, keys=True), - H.edges(data=True, keys=True)) + self._test_not_equal( + G.edges(data=True, keys=True), H.edges(data=True, keys=True) + ) def test_no_edges(self): G = nx.MultiGraph() H = nx.MultiGraph() - self._test_equal(G.edges(data=True, keys=True), - H.edges(data=True, keys=True)) + self._test_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True)) def test_duplicate_edges(self): a = [(1, 2), (5, 4), (1, 2)] @@ -93,28 +88,28 @@ class TestEdgesEqual(_GenericTest): self._test_not_equal(a, b) def test_duplicate_edges_with_data(self): - a = [(1, 2, {'weight': 10}), (5, 4), (1, 2, {'weight': 1})] - b = [(4, 5), (1, 2), (1, 2, {'weight': 1})] + a = [(1, 2, {"weight": 10}), (5, 4), (1, 2, {"weight": 1})] + b = [(4, 5), (1, 2), (1, 2, {"weight": 1})] self._test_not_equal(a, b) def test_order_of_edges_with_data(self): - a = [(1, 2, {'weight': 10}), (1, 2, {'weight': 1})] - b = [(1, 2, {'weight': 1}), (1, 2, {'weight': 10})] + a = [(1, 2, {"weight": 10}), (1, 2, {"weight": 1})] + b = [(1, 2, {"weight": 1}), (1, 2, {"weight": 10})] self._test_equal(a, b) def test_order_of_multiedges(self): - wt1 = {'weight': 1} - wt2 = {'weight': 2} + wt1 = {"weight": 1} + wt2 = {"weight": 2} a = [(1, 2, wt1), (1, 2, wt1), (1, 2, wt2)] b = [(1, 2, wt1), (1, 2, wt2), (1, 2, wt2)] self._test_not_equal(a, b) def test_order_of_edges_with_keys(self): - a = [(1, 2, 0, {'weight': 10}), (1, 2, 1, {'weight': 1}), (1, 2, 2)] - b = [(1, 2, 1, {'weight': 1}), (1, 2, 2), (1, 2, 0, {'weight': 10})] + a = [(1, 2, 0, {"weight": 10}), (1, 2, 1, {"weight": 1}), (1, 2, 2)] + b = [(1, 2, 1, {"weight": 1}), (1, 2, 2), (1, 2, 0, {"weight": 10})] self._test_equal(a, b) - a = [(1, 2, 1, {'weight': 10}), (1, 2, 0, {'weight': 1}), (1, 2, 2)] - b = [(1, 2, 1, {'weight': 1}), (1, 2, 2), (1, 2, 0, {'weight': 10})] + a = [(1, 2, 1, {"weight": 10}), (1, 2, 0, {"weight": 1}), (1, 2, 2)] + b = [(1, 2, 1, {"weight": 1}), (1, 2, 2), (1, 2, 0, {"weight": 10})] self._test_not_equal(a, b) @@ -161,5 +156,5 @@ class TestGraphsEqual(_GenericTest): G = nx.path_graph(4) H = nx.Graph() nx.add_path(H, range(4)) - H.name = 'path_graph(4)' + H.name = "path_graph(4)" self._test_not_equal(G, H) diff --git a/networkx/testing/utils.py b/networkx/testing/utils.py index 1d767917..795cefad 100644 --- a/networkx/testing/utils.py +++ b/networkx/testing/utils.py @@ -1,5 +1,9 @@ -__all__ = ['assert_nodes_equal', 'assert_edges_equal', 'assert_graphs_equal', - 'almost_equal'] +__all__ = [ + "assert_nodes_equal", + "assert_edges_equal", + "assert_graphs_equal", + "almost_equal", +] def almost_equal(x, y, places=7): @@ -25,6 +29,7 @@ def assert_edges_equal(edges1, edges2): # edge tuples with data dicts (u,v,d), or # edge tuples with keys and data dicts (u,v,k, d) from collections import defaultdict + d1 = defaultdict(dict) d2 = defaultdict(dict) c1 = 0 diff --git a/networkx/tests/test_convert_pandas.py b/networkx/tests/test_convert_pandas.py index 8488c332..42ece720 100644 --- a/networkx/tests/test_convert_pandas.py +++ b/networkx/tests/test_convert_pandas.py @@ -19,8 +19,7 @@ class TestConvertPandas: df["b"] = b # Column label 'b' (str) self.df = df - mdf = pd.DataFrame([[4, 16, "A", "D"]], - columns=["weight", "cost", 0, "b"]) + mdf = pd.DataFrame([[4, 16, "A", "D"]], columns=["weight", "cost", 0, "b"]) self.mdf = df.append(mdf) def test_exceptions(self): @@ -28,8 +27,7 @@ class TestConvertPandas: pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G) G = pd.DataFrame(["a", 0.0]) # elist pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G) - df = pd.DataFrame([[1, 1], [1, 0]], - dtype=int, index=[1, 2], columns=["a", "b"]) + df = pd.DataFrame([[1, 1], [1, 0]], dtype=int, index=[1, 2], columns=["a", "b"]) pytest.raises(nx.NetworkXError, nx.from_pandas_adjacency, df) def test_from_edgelist_all_attr(self): @@ -91,14 +89,18 @@ class TestConvertPandas: "St": ["X1", "X2", "X3", "X4", "Y1", "Y2", "X2", "Y3", "Z1", "X3"], "Co": ["zA", "zB", "zB", "zB", "zC", "zC", "zC", "zC", "zD", "zD"], "Mi": [0, 54, 49, 44, 0, 34, 29, 24, 0, 14], - } + } df = pd.DataFrame.from_dict(data) - G1 = nx.from_pandas_edgelist(df, source="O", target="D", - edge_attr=True, - create_using=nx.MultiDiGraph) - G2 = nx.from_pandas_edgelist(df, source="O", target="D", - edge_attr=["St", "Co", "Mi"], - create_using=nx.MultiDiGraph) + G1 = nx.from_pandas_edgelist( + df, source="O", target="D", edge_attr=True, create_using=nx.MultiDiGraph + ) + G2 = nx.from_pandas_edgelist( + df, + source="O", + target="D", + edge_attr=["St", "Co", "Mi"], + create_using=nx.MultiDiGraph, + ) assert_graphs_equal(G1, Gtrue) assert_graphs_equal(G2, Gtrue) @@ -122,18 +124,26 @@ class TestConvertPandas: assert_graphs_equal(G, Gtrue) def test_from_edgelist_invalid_attr(self): - pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, - self.df, 0, "b", "misspell") - pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, - self.df, 0, "b", 1) + pytest.raises( + nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", "misspell" + ) + pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", 1) # see Issue #3562 edgeframe = pd.DataFrame([[0, 1], [1, 2], [2, 0]], columns=["s", "t"]) - pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, - edgeframe, "s", "t", True) - pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, - edgeframe, "s", "t", "weight") - pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, - edgeframe, "s", "t", ["weight", "size"]) + pytest.raises( + nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", True + ) + pytest.raises( + nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", "weight" + ) + pytest.raises( + nx.NetworkXError, + nx.from_pandas_edgelist, + edgeframe, + "s", + "t", + ["weight", "size"], + ) def test_from_edgelist_no_attr(self): Gtrue = nx.Graph([("E", "C", {}), ("B", "A", {}), ("A", "D", {})]) @@ -149,8 +159,7 @@ class TestConvertPandas: source = [s for s, t, d in edgelist] target = [t for s, t, d in edgelist] weight = [d["weight"] for s, t, d in edgelist] - edges = pd.DataFrame({"source": source, "target": target, - "weight": weight}) + edges = pd.DataFrame({"source": source, "target": target, "weight": weight}) GG = nx.from_pandas_edgelist(edges, edge_attr="weight") assert_nodes_equal(G.nodes(), GG.nodes()) @@ -178,16 +187,18 @@ class TestConvertPandas: G = nx.path_graph(10) G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges)) nx.set_edge_attributes(G, 0, name="source_col_name") - pytest.raises(nx.NetworkXError, - nx.to_pandas_edgelist, G, source="source_col_name") + pytest.raises( + nx.NetworkXError, nx.to_pandas_edgelist, G, source="source_col_name" + ) # drop source column to test an exception raised for the target column for u, v, d in G.edges(data=True): d.pop("source_col_name", None) nx.set_edge_attributes(G, 0, name="target_col_name") - pytest.raises(nx.NetworkXError, - nx.to_pandas_edgelist, G, target="target_col_name") + pytest.raises( + nx.NetworkXError, nx.to_pandas_edgelist, G, target="target_col_name" + ) def test_from_adjacency(self): nodelist = [1, 2] diff --git a/networkx/utils/contextmanagers.py b/networkx/utils/contextmanagers.py index bd36a36c..4839f618 100644 --- a/networkx/utils/contextmanagers.py +++ b/networkx/utils/contextmanagers.py @@ -2,7 +2,7 @@ from contextlib import contextmanager import warnings __all__ = [ - 'reversed', + "reversed", ] @@ -24,8 +24,10 @@ def reversed(G): same graph without confusion while the context manager does not. This context manager is scheduled to be removed in version 3.0. """ - msg = "context manager reversed is deprecated and to be removed in 3.0." \ - "Use G.reverse(copy=False) if G.is_directed() else G instead." + msg = ( + "context manager reversed is deprecated and to be removed in 3.0." + "Use G.reverse(copy=False) if G.is_directed() else G instead." + ) warnings.warn(msg, DeprecationWarning) directed = G.is_directed() diff --git a/networkx/utils/decorators.py b/networkx/utils/decorators.py index a20c4d07..8c2a12dc 100644 --- a/networkx/utils/decorators.py +++ b/networkx/utils/decorators.py @@ -8,13 +8,13 @@ from decorator import decorator from networkx.utils import create_random_state, create_py_random_state __all__ = [ - 'not_implemented_for', - 'open_file', - 'nodes_or_number', - 'preserve_random_state', - 'random_state', - 'np_random_state', - 'py_random_state', + "not_implemented_for", + "open_file", + "nodes_or_number", + "preserve_random_state", + "random_state", + "np_random_state", + "py_random_state", ] @@ -53,47 +53,54 @@ def not_implemented_for(*graph_types): def sp_np_function(G): pass """ + @decorator def _not_implemented_for(not_implement_for_func, *args, **kwargs): graph = args[0] - terms = {'directed': graph.is_directed(), - 'undirected': not graph.is_directed(), - 'multigraph': graph.is_multigraph(), - 'graph': not graph.is_multigraph()} + terms = { + "directed": graph.is_directed(), + "undirected": not graph.is_directed(), + "multigraph": graph.is_multigraph(), + "graph": not graph.is_multigraph(), + } match = True try: for t in graph_types: match = match and terms[t] except KeyError as e: - raise KeyError('use one or more of ' - 'directed, undirected, multigraph, graph') from e + raise KeyError( + "use one or more of " "directed, undirected, multigraph, graph" + ) from e if match: msg = f"not implemented for {' '.join(graph_types)} type" raise nx.NetworkXNotImplemented(msg) else: return not_implement_for_func(*args, **kwargs) + return _not_implemented_for def _open_gz(path, mode): import gzip + return gzip.open(path, mode=mode) def _open_bz2(path, mode): import bz2 + return bz2.BZ2File(path, mode=mode) # To handle new extensions, define a function accepting a `path` and `mode`. # Then add the extension to _dispatch_dict. _dispatch_dict = defaultdict(lambda: open) -_dispatch_dict['.gz'] = _open_gz -_dispatch_dict['.bz2'] = _open_bz2 -_dispatch_dict['.gzip'] = _open_gz +_dispatch_dict[".gz"] = _open_gz +_dispatch_dict[".bz2"] = _open_bz2 +_dispatch_dict[".gzip"] = _open_gz -def open_file(path_arg, mode='r'): +def open_file(path_arg, mode="r"): """Decorator to ensure clean opening and closing of files. Parameters @@ -205,7 +212,7 @@ def open_file(path_arg, mode='r'): ext = splitext(path)[1] fobj = _dispatch_dict[ext](path, mode=mode) close_fobj = True - elif hasattr(path, 'read'): + elif hasattr(path, "read"): # path is already a file-like object fobj = path close_fobj = False @@ -272,6 +279,7 @@ def nodes_or_number(which_args): # r is a number. n can be a number of a list of nodes pass """ + @decorator def _nodes_or_number(func_to_be_decorated, *args, **kw): # form tuple of arg positions to be converted. @@ -293,6 +301,7 @@ def nodes_or_number(which_args): raise nx.NetworkXError(msg) new_args[i] = (n, nodes) return func_to_be_decorated(*new_args, **kw) + return _nodes_or_number @@ -337,6 +346,7 @@ def preserve_random_state(func): with save_random_state(): seed(1234567890) return func(*args, **kwargs) + wrapper.__name__ = func.__name__ return wrapper except ImportError: @@ -378,6 +388,7 @@ def random_state(random_state_index): -------- py_random_state """ + @decorator def _random_state(func, *args, **kwargs): # Parse the decorator arguments. @@ -395,6 +406,7 @@ def random_state(random_state_index): new_args = list(args) new_args[random_state_index] = random_state return func(*new_args, **kwargs) + return _random_state @@ -437,6 +449,7 @@ def py_random_state(random_state_index): -------- np_random_state """ + @decorator def _random_state(func, *args, **kwargs): # Parse the decorator arguments. @@ -454,4 +467,5 @@ def py_random_state(random_state_index): new_args = list(args) new_args[random_state_index] = random_state return func(*new_args, **kwargs) + return _random_state diff --git a/networkx/utils/heaps.py b/networkx/utils/heaps.py index 0f82c4c3..d07b1550 100644 --- a/networkx/utils/heaps.py +++ b/networkx/utils/heaps.py @@ -6,7 +6,7 @@ from heapq import heappop, heappush from itertools import count import networkx as nx -__all__ = ['MinHeap', 'PairingHeap', 'BinaryHeap'] +__all__ = ["MinHeap", "PairingHeap", "BinaryHeap"] class MinHeap: @@ -20,7 +20,8 @@ class MinHeap: class _Item: """Used by subclassess to represent a key-value pair. """ - __slots__ = ('key', 'value') + + __slots__ = ("key", "value") def __init__(self, key, value): self.key = key @@ -135,9 +136,11 @@ class MinHeap: def _inherit_doc(cls): """Decorator for inheriting docstrings from base classes. """ + def func(fn): fn.__doc__ = cls.__dict__[fn.__name__].__doc__ return fn + return func @@ -151,7 +154,8 @@ class PairingHeap(MinHeap): A tree in a pairing heap is stored using the left-child, right-sibling representation. """ - __slots__ = ('left', 'next', 'prev', 'parent') + + __slots__ = ("left", "next", "prev", "parent") def __init__(self, key, value): super(PairingHeap._Node, self).__init__(key, value) @@ -173,13 +177,13 @@ class PairingHeap(MinHeap): @_inherit_doc(MinHeap) def min(self): if self._root is None: - raise nx.NetworkXError('heap is empty.') + raise nx.NetworkXError("heap is empty.") return (self._root.key, self._root.value) @_inherit_doc(MinHeap) def pop(self): if self._root is None: - raise nx.NetworkXError('heap is empty.') + raise nx.NetworkXError("heap is empty.") min_node = self._root self._root = self._merge_children(self._root) del self._dict[min_node.key] @@ -311,7 +315,7 @@ class BinaryHeap(MinHeap): def min(self): dict = self._dict if not dict: - raise nx.NetworkXError('heap is empty') + raise nx.NetworkXError("heap is empty") heap = self._heap pop = heappop # Repeatedly remove stale key-value pairs until a up-to-date one is @@ -327,7 +331,7 @@ class BinaryHeap(MinHeap): def pop(self): dict = self._dict if not dict: - raise nx.NetworkXError('heap is empty') + raise nx.NetworkXError("heap is empty") heap = self._heap pop = heappop # Repeatedly remove stale key-value pairs until a up-to-date one is diff --git a/networkx/utils/mapped_queue.py b/networkx/utils/mapped_queue.py index 24f71964..d633c748 100644 --- a/networkx/utils/mapped_queue.py +++ b/networkx/utils/mapped_queue.py @@ -3,7 +3,7 @@ import heapq -__all__ = ['MappedQueue'] +__all__ = ["MappedQueue"] class MappedQueue: diff --git a/networkx/utils/misc.py b/networkx/utils/misc.py index 86a637bb..86e226c6 100644 --- a/networkx/utils/misc.py +++ b/networkx/utils/misc.py @@ -27,8 +27,10 @@ import networkx as nx def is_string_like(obj): # from John Hunter, types-free version """Check if obj is string.""" - msg = "is_string_like is deprecated and will be removed in 3.0." \ - "Use isinstance(obj, str) instead." + msg = ( + "is_string_like is deprecated and will be removed in 3.0." + "Use isinstance(obj, str) instead." + ) warnings.warn(msg, DeprecationWarning) return isinstance(obj, str) @@ -132,10 +134,12 @@ def default_opener(filename): """ from subprocess import call - cmds = {'darwin': ['open'], - 'linux': ['xdg-open'], - 'linux2': ['xdg-open'], - 'win32': ['cmd.exe', '/C', 'start', '']} + cmds = { + "darwin": ["open"], + "linux": ["xdg-open"], + "linux2": ["xdg-open"], + "win32": ["cmd.exe", "/C", "start", ""], + } cmd = cmds[sys.platform] + [filename] call(cmd) @@ -157,6 +161,7 @@ def dict_to_numpy_array2(d, mapping=None): """ import numpy + if mapping is None: s = set(d.keys()) for k, v in d.items(): @@ -179,6 +184,7 @@ def dict_to_numpy_array1(d, mapping=None): """ import numpy + if mapping is None: s = set(d.keys()) mapping = dict(zip(s, range(len(s)))) @@ -195,7 +201,7 @@ def is_iterator(obj): object. """ - has_next_attr = hasattr(obj, '__next__') or hasattr(obj, 'next') + has_next_attr = hasattr(obj, "__next__") or hasattr(obj, "next") return iter(obj) is obj and has_next_attr @@ -222,7 +228,7 @@ def arbitrary_element(iterable): """ if is_iterator(iterable): - raise ValueError('cannot return an arbitrary item from an iterator') + raise ValueError("cannot return an arbitrary item from an iterator") # Another possible implementation is ``for x in iterable: return x``. return next(iter(iterable)) @@ -302,19 +308,24 @@ def create_random_state(random_state=None): return random_state if isinstance(random_state, int): return np.random.RandomState(random_state) - msg = f"{random_state} cannot be used to generate a numpy.random.RandomState instance" + msg = ( + f"{random_state} cannot be used to generate a numpy.random.RandomState instance" + ) raise ValueError(msg) class PythonRandomInterface: try: + def __init__(self, rng=None): import numpy + if rng is None: self._rng = numpy.random.mtrand._rand self._rng = rng + except ImportError: - msg = 'numpy not found, only random.random available.' + msg = "numpy not found, only random.random available." warnings.warn(msg, ImportWarning) def random(self): @@ -335,8 +346,8 @@ class PythonRandomInterface: def shuffle(self, seq): return self._rng.shuffle(seq) -# Some methods don't match API for numpy RandomState. -# Commented out versions are not used by NetworkX + # Some methods don't match API for numpy RandomState. + # Commented out versions are not used by NetworkX def sample(self, seq, k): return self._rng.choice(list(seq), size=(k,), replace=False) @@ -344,14 +355,15 @@ class PythonRandomInterface: def randint(self, a, b): return self._rng.randint(a, b + 1) -# exponential as expovariate with 1/argument, + # exponential as expovariate with 1/argument, def expovariate(self, scale): - return self._rng.exponential(1/scale) + return self._rng.exponential(1 / scale) -# pareto as paretovariate with 1/argument, + # pareto as paretovariate with 1/argument, def paretovariate(self, shape): return self._rng.pareto(shape) + # weibull as weibullvariate multiplied by beta, # def weibullvariate(self, alpha, beta): # return self._rng.weibull(alpha) * beta @@ -380,8 +392,10 @@ def create_py_random_state(random_state=None): if a PythonRandomInterface instance, return it """ import random + try: import numpy as np + if random_state is np.random: return PythonRandomInterface(np.random.mtrand._rand) if isinstance(random_state, np.random.RandomState): diff --git a/networkx/utils/random_sequence.py b/networkx/utils/random_sequence.py index 8fb058c6..7bd68c79 100644 --- a/networkx/utils/random_sequence.py +++ b/networkx/utils/random_sequence.py @@ -11,6 +11,7 @@ from networkx.utils import py_random_state # uses Python's random module # https://docs.python.org/3/library/random.html + @py_random_state(2) def powerlaw_sequence(n, exponent=2.0, seed=None): """ @@ -73,12 +74,12 @@ def zipf_rv(alpha, xmin=1, seed=None): if alpha <= 1: raise ValueError("a <= 1.0") a1 = alpha - 1.0 - b = 2**a1 + b = 2 ** a1 while True: u = 1.0 - seed.random() # u in (0,1] v = seed.random() # v in [0,1) - x = int(xmin * u**-(1.0 / a1)) - t = (1.0 + (1.0 / x))**a1 + x = int(xmin * u ** -(1.0 / a1)) + t = (1.0 + (1.0 / x)) ** a1 if v * x * (t - 1.0) / (b - 1.0) <= t / b: break return x @@ -115,7 +116,8 @@ def discrete_sequence(n, distribution=None, cdistribution=None, seed=None): cdf = cumulative_distribution(distribution) else: raise nx.NetworkXError( - "discrete_sequence: distribution or cdistribution missing") + "discrete_sequence: distribution or cdistribution missing" + ) # get a uniform random number inputseq = [seed.random() for i in range(n)] diff --git a/networkx/utils/rcm.py b/networkx/utils/rcm.py index e1b7b2fb..6c7094f7 100644 --- a/networkx/utils/rcm.py +++ b/networkx/utils/rcm.py @@ -7,8 +7,7 @@ from operator import itemgetter import networkx as nx from ..utils import arbitrary_element -__all__ = ['cuthill_mckee_ordering', - 'reverse_cuthill_mckee_ordering'] +__all__ = ["cuthill_mckee_ordering", "reverse_cuthill_mckee_ordering"] def cuthill_mckee_ordering(G, heuristic=None): @@ -135,8 +134,7 @@ def connected_cuthill_mckee_ordering(G, heuristic=None): while queue: parent = queue.popleft() yield parent - nd = sorted(list(G.degree(set(G[parent]) - visited)), - key=itemgetter(1)) + nd = sorted(list(G.degree(set(G[parent]) - visited)), key=itemgetter(1)) children = [n for n, d in nd] visited.update(children) queue.extend(children) diff --git a/networkx/utils/tests/test_contextmanager.py b/networkx/utils/tests/test_contextmanager.py index c5acb281..69246837 100644 --- a/networkx/utils/tests/test_contextmanager.py +++ b/networkx/utils/tests/test_contextmanager.py @@ -3,16 +3,16 @@ import networkx as nx def test_reversed(): G = nx.DiGraph() - G.add_edge('A', 'B') + G.add_edge("A", "B") # no exception with nx.utils.reversed(G): pass - assert 'B' in G['A'] + assert "B" in G["A"] # exception try: with nx.utils.reversed(G): raise Exception except: - assert 'B' in G['A'] + assert "B" in G["A"] diff --git a/networkx/utils/tests/test_decorators.py b/networkx/utils/tests/test_decorators.py index ef77e97a..c14fa191 100644 --- a/networkx/utils/tests/test_decorators.py +++ b/networkx/utils/tests/test_decorators.py @@ -11,38 +11,43 @@ from networkx.utils.decorators import ( preserve_random_state, py_random_state, np_random_state, - random_state + random_state, ) from networkx.utils.misc import PythonRandomInterface def test_not_implemented_decorator(): - @not_implemented_for('directed') + @not_implemented_for("directed") def test1(G): pass + test1(nx.Graph()) def test_not_implemented_decorator_key(): with pytest.raises(KeyError): - @not_implemented_for('foo') + + @not_implemented_for("foo") def test1(G): pass + test1(nx.Graph()) def test_not_implemented_decorator_raise(): with pytest.raises(nx.NetworkXNotImplemented): - @not_implemented_for('graph') + + @not_implemented_for("graph") def test1(G): pass + test1(nx.Graph()) class TestOpenFileDecorator: def setup_method(self): - self.text = ['Blah... ', 'BLAH ', 'BLAH!!!!'] - self.fobj = tempfile.NamedTemporaryFile('wb+', delete=False) + self.text = ["Blah... ", "BLAH ", "BLAH!!!!"] + self.fobj = tempfile.NamedTemporaryFile("wb+", delete=False) self.name = self.fobj.name def teardown_method(self): @@ -51,42 +56,42 @@ class TestOpenFileDecorator: def write(self, path): for text in self.text: - path.write(text.encode('ascii')) + path.write(text.encode("ascii")) - @open_file(1, 'r') + @open_file(1, "r") def read(self, path): return path.readlines()[0] @staticmethod - @open_file(0, 'wb') + @open_file(0, "wb") def writer_arg0(path): - path.write(b'demo') + path.write(b"demo") - @open_file(1, 'wb+') + @open_file(1, "wb+") def writer_arg1(self, path): self.write(path) - @open_file(2, 'wb') + @open_file(2, "wb") def writer_arg2default(self, x, path=None): if path is None: - with tempfile.NamedTemporaryFile('wb+') as fh: + with tempfile.NamedTemporaryFile("wb+") as fh: self.write(fh) else: self.write(path) - @open_file(4, 'wb') - def writer_arg4default(self, x, y, other='hello', path=None, **kwargs): + @open_file(4, "wb") + def writer_arg4default(self, x, y, other="hello", path=None, **kwargs): if path is None: - with tempfile.NamedTemporaryFile('wb+') as fh: + with tempfile.NamedTemporaryFile("wb+") as fh: self.write(fh) else: self.write(path) - @open_file('path', 'wb') + @open_file("path", "wb") def writer_kwarg(self, **kwargs): - path = kwargs.get('path', None) + path = kwargs.get("path", None) if path is None: - with tempfile.NamedTemporaryFile('wb+') as fh: + with tempfile.NamedTemporaryFile("wb+") as fh: self.write(fh) else: self.write(path) @@ -102,41 +107,41 @@ class TestOpenFileDecorator: def test_writer_arg1_str(self): self.writer_arg1(self.name) - assert self.read(self.name) == ''.join(self.text) + assert self.read(self.name) == "".join(self.text) def test_writer_arg1_fobj(self): self.writer_arg1(self.fobj) assert not self.fobj.closed self.fobj.close() - assert self.read(self.name) == ''.join(self.text) + assert self.read(self.name) == "".join(self.text) def test_writer_arg2default_str(self): self.writer_arg2default(0, path=None) self.writer_arg2default(0, path=self.name) - assert self.read(self.name) == ''.join(self.text) + assert self.read(self.name) == "".join(self.text) def test_writer_arg2default_fobj(self): self.writer_arg2default(0, path=self.fobj) assert not self.fobj.closed self.fobj.close() - assert self.read(self.name) == ''.join(self.text) + assert self.read(self.name) == "".join(self.text) def test_writer_arg2default_fobj_path_none(self): self.writer_arg2default(0, path=None) def test_writer_arg4default_fobj(self): - self.writer_arg4default(0, 1, dog='dog', other='other') - self.writer_arg4default(0, 1, dog='dog', other='other', path=self.name) - assert self.read(self.name) == ''.join(self.text) + self.writer_arg4default(0, 1, dog="dog", other="other") + self.writer_arg4default(0, 1, dog="dog", other="other", path=self.name) + assert self.read(self.name) == "".join(self.text) def test_writer_kwarg_str(self): self.writer_kwarg(path=self.name) - assert self.read(self.name) == ''.join(self.text) + assert self.read(self.name) == "".join(self.text) def test_writer_kwarg_fobj(self): self.writer_kwarg(path=self.fobj) self.fobj.close() - assert self.read(self.name) == ''.join(self.text) + assert self.read(self.name) == "".join(self.text) def test_writer_kwarg_path_none(self): self.writer_kwarg(path=None) @@ -146,10 +151,11 @@ class TestOpenFileDecorator: def test_preserve_random_state(): try: import numpy.random + r = numpy.random.random() except ImportError: return - assert(abs(r - 0.61879477158568) < 1e-16) + assert abs(r - 0.61879477158568) < 1e-16 class TestRandomState: @@ -170,8 +176,9 @@ class TestRandomState: @py_random_state(1) def instantiate_py_random_state(self, random_state): - assert (isinstance(random_state, random.Random) or - isinstance(random_state, PythonRandomInterface)) + assert isinstance(random_state, random.Random) or isinstance( + random_state, PythonRandomInterface + ) return random_state.random() def test_random_state_None(self): @@ -255,31 +262,39 @@ class TestRandomState: def test_random_state_string_arg_index(): with pytest.raises(nx.NetworkXError): - @random_state('a') + + @random_state("a") def make_random_state(rs): pass + rstate = make_random_state(1) def test_py_random_state_string_arg_index(): with pytest.raises(nx.NetworkXError): - @py_random_state('a') + + @py_random_state("a") def make_random_state(rs): pass + rstate = make_random_state(1) def test_random_state_invalid_arg_index(): with pytest.raises(nx.NetworkXError): + @random_state(2) def make_random_state(rs): pass + rstate = make_random_state(1) def test_py_random_state_invalid_arg_index(): with pytest.raises(nx.NetworkXError): + @py_random_state(2) def make_random_state(rs): pass + rstate = make_random_state(1) diff --git a/networkx/utils/tests/test_heaps.py b/networkx/utils/tests/test_heaps.py index 6a7e61a0..29433881 100644 --- a/networkx/utils/tests/test_heaps.py +++ b/networkx/utils/tests/test_heaps.py @@ -4,7 +4,6 @@ from networkx.utils import BinaryHeap, PairingHeap class X: - def __eq__(self, other): raise self is other @@ -12,16 +11,16 @@ class X: raise self is not other def __lt__(self, other): - raise TypeError('cannot compare') + raise TypeError("cannot compare") def __le__(self, other): - raise TypeError('cannot compare') + raise TypeError("cannot compare") def __ge__(self, other): - raise TypeError('cannot compare') + raise TypeError("cannot compare") def __gt__(self, other): - raise TypeError('cannot compare') + raise TypeError("cannot compare") def __hash__(self): return hash(id(self)) @@ -31,60 +30,61 @@ x = X() data = [ # min should not invent an element. - ('min', nx.NetworkXError), + ("min", nx.NetworkXError), # Popping an empty heap should fail. - ('pop', nx.NetworkXError), + ("pop", nx.NetworkXError), # Getting nonexisting elements should return None. - ('get', 0, None), - ('get', x, None), - ('get', None, None), + ("get", 0, None), + ("get", x, None), + ("get", None, None), # Inserting a new key should succeed. - ('insert', x, 1, True), - ('get', x, 1), - ('min', (x, 1)), + ("insert", x, 1, True), + ("get", x, 1), + ("min", (x, 1)), # min should not pop the top element. - ('min', (x, 1)), + ("min", (x, 1)), # Inserting a new key of different type should succeed. - ('insert', 1, -2.0, True), + ("insert", 1, -2.0, True), # int and float values should interop. - ('min', (1, -2.0)), + ("min", (1, -2.0)), # pop removes minimum-valued element. - ('insert', 3, -10 ** 100, True), - ('insert', 4, 5, True), - ('pop', (3, -10 ** 100)), - ('pop', (1, -2.0)), + ("insert", 3, -(10 ** 100), True), + ("insert", 4, 5, True), + ("pop", (3, -(10 ** 100))), + ("pop", (1, -2.0)), # Decrease-insert should succeed. - ('insert', 4, -50, True), - ('insert', 4, -60, False, True), + ("insert", 4, -50, True), + ("insert", 4, -60, False, True), # Decrease-insert should not create duplicate keys. - ('pop', (4, -60)), - ('pop', (x, 1)), + ("pop", (4, -60)), + ("pop", (x, 1)), # Popping all elements should empty the heap. - ('min', nx.NetworkXError), - ('pop', nx.NetworkXError), + ("min", nx.NetworkXError), + ("pop", nx.NetworkXError), # Non-value-changing insert should fail. - ('insert', x, 0, True), - ('insert', x, 0, False, False), - ('min', (x, 0)), - ('insert', x, 0, True, False), - ('min', (x, 0)), + ("insert", x, 0, True), + ("insert", x, 0, False, False), + ("min", (x, 0)), + ("insert", x, 0, True, False), + ("min", (x, 0)), # Failed insert should not create duplicate keys. - ('pop', (x, 0)), - ('pop', nx.NetworkXError), + ("pop", (x, 0)), + ("pop", nx.NetworkXError), # Increase-insert should succeed when allowed. - ('insert', None, 0, True), - ('insert', 2, -1, True), - ('min', (2, -1)), - ('insert', 2, 1, True, False), - ('min', (None, 0)), + ("insert", None, 0, True), + ("insert", 2, -1, True), + ("min", (2, -1)), + ("insert", 2, 1, True, False), + ("min", (None, 0)), # Increase-insert should fail when disallowed. - ('insert', None, 2, False, False), - ('min', (None, 0)), + ("insert", None, 2, False, False), + ("min", (None, 0)), # Failed increase-insert should not create duplicate keys. - ('pop', (None, 0)), - ('pop', (2, 1)), - ('min', nx.NetworkXError), - ('pop', nx.NetworkXError)] + ("pop", (None, 0)), + ("pop", (2, 1)), + ("min", nx.NetworkXError), + ("pop", nx.NetworkXError), +] def _test_heap_class(cls, *args, **kwargs): diff --git a/networkx/utils/tests/test_mapped_queue.py b/networkx/utils/tests/test_mapped_queue.py index a29cb6b6..78ea91ec 100644 --- a/networkx/utils/tests/test_mapped_queue.py +++ b/networkx/utils/tests/test_mapped_queue.py @@ -2,7 +2,6 @@ from networkx.utils.mapped_queue import MappedQueue class TestMappedQueue: - def setup(self): pass diff --git a/networkx/utils/tests/test_misc.py b/networkx/utils/tests/test_misc.py index 265d0d8a..3ecf2ef0 100644 --- a/networkx/utils/tests/test_misc.py +++ b/networkx/utils/tests/test_misc.py @@ -16,7 +16,7 @@ from networkx.utils import ( pairwise, powerlaw_sequence, PythonRandomInterface, - to_tuple + to_tuple, ) @@ -43,7 +43,7 @@ def test_graph_iterable(): def test_make_list_of_ints(): - mylist = [1, 2, 3., 42, -2] + mylist = [1, 2, 3.0, 42, -2] assert make_list_of_ints(mylist) is mylist assert make_list_of_ints(mylist) == mylist assert type(make_list_of_ints(mylist)[2]) is int @@ -81,7 +81,7 @@ class TestNumpyArray: def test_numpy_to_list_of_ints(self): a = numpy.array([1, 2, 3], dtype=numpy.int64) - b = numpy.array([1., 2, 3]) + b = numpy.array([1.0, 2, 3]) c = numpy.array([1.1, 2, 3]) assert type(make_list_of_ints(a)) == list assert make_list_of_ints(b) == list(b) @@ -90,20 +90,19 @@ class TestNumpyArray: pytest.raises(nx.NetworkXError, make_list_of_ints, c) def test_dict_to_numpy_array1(self): - d = {'a': 1, 'b': 2} - a = dict_to_numpy_array1(d, mapping={'a': 0, 'b': 1}) + d = {"a": 1, "b": 2} + a = dict_to_numpy_array1(d, mapping={"a": 0, "b": 1}) assert_allclose(a, numpy.array([1, 2])) - a = dict_to_numpy_array1(d, mapping={'b': 0, 'a': 1}) + a = dict_to_numpy_array1(d, mapping={"b": 0, "a": 1}) assert_allclose(a, numpy.array([2, 1])) a = dict_to_numpy_array1(d) assert_allclose(a.sum(), 3) def test_dict_to_numpy_array2(self): - d = {'a': {'a': 1, 'b': 2}, - 'b': {'a': 10, 'b': 20}} + d = {"a": {"a": 1, "b": 2}, "b": {"a": 10, "b": 20}} - mapping = {'a': 1, 'b': 0} + mapping = {"a": 1, "b": 0} a = dict_to_numpy_array2(d, mapping=mapping) assert_allclose(a, numpy.array([[20, 10], [2, 1]])) @@ -111,14 +110,13 @@ class TestNumpyArray: assert_allclose(a.sum(), 33) def test_dict_to_numpy_array_a(self): - d = {'a': {'a': 1, 'b': 2}, - 'b': {'a': 10, 'b': 20}} + d = {"a": {"a": 1, "b": 2}, "b": {"a": 10, "b": 20}} - mapping = {'a': 0, 'b': 1} + mapping = {"a": 0, "b": 1} a = dict_to_numpy_array(d, mapping=mapping) assert_allclose(a, numpy.array([[1, 2], [10, 20]])) - mapping = {'a': 1, 'b': 0} + mapping = {"a": 1, "b": 0} a = dict_to_numpy_array(d, mapping=mapping) assert_allclose(a, numpy.array([[20, 10], [2, 1]])) @@ -126,9 +124,9 @@ class TestNumpyArray: assert_allclose(a.sum(), 33) def test_dict_to_numpy_array_b(self): - d = {'a': 1, 'b': 2} + d = {"a": 1, "b": 2} - mapping = {'a': 0, 'b': 1} + mapping = {"a": 0, "b": 1} a = dict_to_numpy_array(d, mapping=mapping) assert_allclose(a, numpy.array([1, 2])) @@ -150,9 +148,9 @@ def test_pairwise(): def test_groups(): - many_to_one = dict(zip('abcde', [0, 0, 1, 1, 2])) + many_to_one = dict(zip("abcde", [0, 0, 1, 1, 2])) actual = groups(many_to_one) - expected = {0: {'a', 'b'}, 1: {'c', 'd'}, 2: {'e'}} + expected = {0: {"a", "b"}, 1: {"c", "d"}, 2: {"e"}} assert actual == expected assert {} == groups({}) @@ -175,14 +173,14 @@ def test_to_tuple(): def test_create_random_state(): - np = pytest.importorskip('numpy') + np = pytest.importorskip("numpy") rs = np.random.RandomState assert isinstance(create_random_state(1), rs) assert isinstance(create_random_state(None), rs) assert isinstance(create_random_state(np.random), rs) assert isinstance(create_random_state(rs(1)), rs) - pytest.raises(ValueError, create_random_state, 'a') + pytest.raises(ValueError, create_random_state, "a") assert np.all(rs(1).rand(10) == create_random_state(1).rand(10)) @@ -193,9 +191,9 @@ def test_create_py_random_state(): assert isinstance(create_py_random_state(1), pyrs) assert isinstance(create_py_random_state(None), pyrs) assert isinstance(create_py_random_state(pyrs(1)), pyrs) - pytest.raises(ValueError, create_py_random_state, 'a') + pytest.raises(ValueError, create_py_random_state, "a") - np = pytest.importorskip('numpy') + np = pytest.importorskip("numpy") rs = np.random.RandomState nprs = PythonRandomInterface @@ -206,7 +204,7 @@ def test_create_py_random_state(): def test_PythonRandomInterface(): - np = pytest.importorskip('numpy') + np = pytest.importorskip("numpy") rs = np.random.RandomState rng = PythonRandomInterface(rs(42)) rs42 = rs(42) @@ -215,9 +213,10 @@ def test_PythonRandomInterface(): assert rng.randrange(3, 5) == rs42.randint(3, 5) assert np.all(rng.choice([1, 2, 3]) == rs42.choice([1, 2, 3])) assert rng.gauss(0, 1) == rs42.normal(0, 1) - assert rng.expovariate(1.5) == rs42.exponential(1/1.5) + assert rng.expovariate(1.5) == rs42.exponential(1 / 1.5) assert np.all(rng.shuffle([1, 2, 3]) == rs42.shuffle([1, 2, 3])) - assert np.all(rng.sample([1, 2, 3], 2) == - rs42.choice([1, 2, 3], (2,), replace=False)) + assert np.all( + rng.sample([1, 2, 3], 2) == rs42.choice([1, 2, 3], (2,), replace=False) + ) assert rng.randint(3, 5) == rs42.randint(3, 6) assert rng.random() == rs42.random_sample() diff --git a/networkx/utils/tests/test_random_sequence.py b/networkx/utils/tests/test_random_sequence.py index 9325415a..6a565bc6 100644 --- a/networkx/utils/tests/test_random_sequence.py +++ b/networkx/utils/tests/test_random_sequence.py @@ -1,7 +1,10 @@ import pytest -from networkx.utils import powerlaw_sequence,\ - zipf_rv, random_weighted_sample,\ - weighted_choice +from networkx.utils import ( + powerlaw_sequence, + zipf_rv, + random_weighted_sample, + weighted_choice, +) def test_degree_sequences(): @@ -20,7 +23,7 @@ def test_zipf_rv(): def test_random_weighted_sample(): - mapping = {'a': 10, 'b': 20} + mapping = {"a": 10, "b": 20} s = random_weighted_sample(mapping, 2, seed=1) s = random_weighted_sample(mapping, 2) assert sorted(s) == sorted(mapping.keys()) @@ -28,7 +31,7 @@ def test_random_weighted_sample(): def test_random_weighted_choice(): - mapping = {'a': 10, 'b': 0} + mapping = {"a": 10, "b": 0} c = weighted_choice(mapping, seed=1) c = weighted_choice(mapping) - assert c == 'a' + assert c == "a" diff --git a/networkx/utils/tests/test_rcm.py b/networkx/utils/tests/test_rcm.py index ce14812a..b53cc8f1 100644 --- a/networkx/utils/tests/test_rcm.py +++ b/networkx/utils/tests/test_rcm.py @@ -5,36 +5,59 @@ import networkx as nx def test_reverse_cuthill_mckee(): # example graph from # http://www.boost.org/doc/libs/1_37_0/libs/graph/example/cuthill_mckee_ordering.cpp - G = nx.Graph([(0, 3), (0, 5), (1, 2), (1, 4), (1, 6), (1, 9), (2, 3), - (2, 4), (3, 5), (3, 8), (4, 6), (5, 6), (5, 7), (6, 7)]) + G = nx.Graph( + [ + (0, 3), + (0, 5), + (1, 2), + (1, 4), + (1, 6), + (1, 9), + (2, 3), + (2, 4), + (3, 5), + (3, 8), + (4, 6), + (5, 6), + (5, 7), + (6, 7), + ] + ) rcm = list(reverse_cuthill_mckee_ordering(G)) - assert rcm in [[0, 8, 5, 7, 3, 6, 2, 4, 1, 9], - [0, 8, 5, 7, 3, 6, 4, 2, 1, 9]] + assert rcm in [[0, 8, 5, 7, 3, 6, 2, 4, 1, 9], [0, 8, 5, 7, 3, 6, 4, 2, 1, 9]] def test_rcm_alternate_heuristic(): # example from - G = nx.Graph([(0, 0), - (0, 4), - (1, 1), - (1, 2), - (1, 5), - (1, 7), - (2, 2), - (2, 4), - (3, 3), - (3, 6), - (4, 4), - (5, 5), - (5, 7), - (6, 6), - (7, 7)]) + G = nx.Graph( + [ + (0, 0), + (0, 4), + (1, 1), + (1, 2), + (1, 5), + (1, 7), + (2, 2), + (2, 4), + (3, 3), + (3, 6), + (4, 4), + (5, 5), + (5, 7), + (6, 6), + (7, 7), + ] + ) - answers = [[6, 3, 5, 7, 1, 2, 4, 0], [6, 3, 7, 5, 1, 2, 4, 0], - [7, 5, 1, 2, 4, 0, 6, 3]] + answers = [ + [6, 3, 5, 7, 1, 2, 4, 0], + [6, 3, 7, 5, 1, 2, 4, 0], + [7, 5, 1, 2, 4, 0, 6, 3], + ] def smallest_degree(G): deg, node = min((d, n) for n, d in G.degree()) return node + rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree)) assert rcm in answers diff --git a/networkx/utils/tests/test_unionfind.py b/networkx/utils/tests/test_unionfind.py index ac9f00e5..75a8faeb 100644 --- a/networkx/utils/tests/test_unionfind.py +++ b/networkx/utils/tests/test_unionfind.py @@ -9,7 +9,7 @@ def test_unionfind(): # # Now we just make sure that no exception is raised. x = nx.utils.UnionFind() - x.union(0, 'a') + x.union(0, "a") def test_subtree_union(): diff --git a/networkx/utils/union_find.py b/networkx/utils/union_find.py index f3214498..ed221793 100644 --- a/networkx/utils/union_find.py +++ b/networkx/utils/union_find.py @@ -89,12 +89,10 @@ class UnionFind: yield from groups(self.parents).values() - def union(self, *objects): """Find the sets containing the objects and merge them all.""" # Find the heaviest root according to its weight. - roots = iter(sorted({self[x] for x in objects}, - key=lambda r: self.weights[r])) + roots = iter(sorted({self[x] for x in objects}, key=lambda r: self.weights[r])) try: root = next(roots) except StopIteration: diff --git a/tools/gitwash_dumper.py b/tools/gitwash_dumper.py index 5ca97c4d..4f972b4c 100644 --- a/tools/gitwash_dumper.py +++ b/tools/gitwash_dumper.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -''' Checkout gitwash repo into directory and do search replace on name ''' +""" Checkout gitwash repo into directory and do search replace on name """ import os @@ -20,10 +20,10 @@ def clone_repo(url, branch): cwd = os.getcwd() tmpdir = tempfile.mkdtemp() try: - cmd = f'git clone {url} {tmpdir}' + cmd = f"git clone {url} {tmpdir}" call(cmd, shell=True) os.chdir(tmpdir) - cmd = 'git checkout %s' % branch + cmd = "git checkout %s" % branch call(cmd, shell=True) except: shutil.rmtree(tmpdir) @@ -52,9 +52,9 @@ def cp_files(in_path, globs, out_path): def filename_search_replace(sr_pairs, filename, backup=False): - ''' Search and replace for expressions in files + """ Search and replace for expressions in files - ''' + """ with open(filename) as in_fh: in_txt = in_fh.read(-1) out_txt = in_txt[:] @@ -63,27 +63,24 @@ def filename_search_replace(sr_pairs, filename, backup=False): out_txt = in_exp.sub(out_exp, out_txt) if in_txt == out_txt: return False - with open(filename, 'wt') as out_fh: + with open(filename, "wt") as out_fh: out_fh.write(out_txt) if backup: - with open(filename + '.bak', 'wt') as bak_fh: + with open(filename + ".bak", "wt") as bak_fh: bak_fh.write(in_txt) return True -def copy_replace(replace_pairs, - repo_path, - out_path, - cp_globs=('*',), - rep_globs=('*',), - renames = ()): +def copy_replace( + replace_pairs, repo_path, out_path, cp_globs=("*",), rep_globs=("*",), renames=() +): out_fnames = cp_files(repo_path, cp_globs, out_path) renames = [(re.compile(in_exp), out_exp) for in_exp, out_exp in renames] fnames = [] for rep_glob in rep_globs: fnames += fnmatch.filter(out_fnames, rep_glob) if verbose: - print('\n'.join(fnames)) + print("\n".join(fnames)) for fname in fnames: filename_search_replace(replace_pairs, fname, False) for in_exp, out_exp in renames: @@ -93,13 +90,15 @@ def copy_replace(replace_pairs, break -def make_link_targets(proj_name, - user_name, - repo_name, - known_link_fname, - out_link_fname, - url=None, - ml_url=None): +def make_link_targets( + proj_name, + user_name, + repo_name, + known_link_fname, + out_link_fname, + url=None, + ml_url=None, +): """ Check and make link targets If url is None or ml_url is None, check if there are links present for these @@ -125,81 +124,101 @@ def make_link_targets(proj_name, have_gh_url = None for line in link_contents: if not have_url: - match = re.match(r'..\s+_`%s`:\s+' % proj_name, line) + match = re.match(r"..\s+_`%s`:\s+" % proj_name, line) if match: have_url = True if not have_ml_url: - match = re.match(r'..\s+_`%s mailing list`:\s+' % proj_name, line) + match = re.match(r"..\s+_`%s mailing list`:\s+" % proj_name, line) if match: have_ml_url = True if not have_gh_url: - match = re.match(r'..\s+_`%s github`:\s+' % proj_name, line) + match = re.match(r"..\s+_`%s github`:\s+" % proj_name, line) if match: have_gh_url = True if not have_url or not have_ml_url: - raise RuntimeError('Need command line or known project ' - 'and / or mailing list URLs') + raise RuntimeError( + "Need command line or known project " "and / or mailing list URLs" + ) lines = [] if not url is None: - lines.append(f'.. _`{proj_name}`: {url}\n') + lines.append(f".. _`{proj_name}`: {url}\n") if not have_gh_url: - gh_url = f'https://github.com/{user_name}/{repo_name}\n' - lines.append(f'.. _`{proj_name} github`: {gh_url}\n') + gh_url = f"https://github.com/{user_name}/{repo_name}\n" + lines.append(f".. _`{proj_name} github`: {gh_url}\n") if not ml_url is None: - lines.append(f'.. _`{proj_name} mailing list`: {ml_url}\n') + lines.append(f".. _`{proj_name} mailing list`: {ml_url}\n") if len(lines) == 0: # Nothing to do return # A neat little header line - lines = ['.. %s\n' % proj_name] + lines - with open(out_link_fname, 'wt') as out_links: + lines = [".. %s\n" % proj_name] + lines + with open(out_link_fname, "wt") as out_links: out_links.writelines(lines) -USAGE = ''' <output_directory> <project_name> +USAGE = """ <output_directory> <project_name> If not set with options, the repository name is the same as the <project name> If not set with options, the main github user is the same as the -repository name.''' +repository name.""" -GITWASH_CENTRAL = 'git://github.com/matthew-brett/gitwash.git' -GITWASH_BRANCH = 'master' +GITWASH_CENTRAL = "git://github.com/matthew-brett/gitwash.git" +GITWASH_BRANCH = "master" def main(): parser = OptionParser() parser.set_usage(parser.get_usage().strip() + USAGE) - parser.add_option("--repo-name", dest="repo_name", - help="repository name - e.g. nitime", - metavar="REPO_NAME") - parser.add_option("--github-user", dest="main_gh_user", - help="github username for main repo - e.g fperez", - metavar="MAIN_GH_USER") - parser.add_option("--gitwash-url", dest="gitwash_url", - help="URL to gitwash repository - default %s" - % GITWASH_CENTRAL, - default=GITWASH_CENTRAL, - metavar="GITWASH_URL") - parser.add_option("--gitwash-branch", dest="gitwash_branch", - help="branch in gitwash repository - default %s" - % GITWASH_BRANCH, - default=GITWASH_BRANCH, - metavar="GITWASH_BRANCH") - parser.add_option("--source-suffix", dest="source_suffix", - help="suffix of ReST source files - default '.rst'", - default='.rst', - metavar="SOURCE_SUFFIX") - parser.add_option("--project-url", dest="project_url", - help="URL for project web pages", - default=None, - metavar="PROJECT_URL") - parser.add_option("--project-ml-url", dest="project_ml_url", - help="URL for project mailing list", - default=None, - metavar="PROJECT_ML_URL") + parser.add_option( + "--repo-name", + dest="repo_name", + help="repository name - e.g. nitime", + metavar="REPO_NAME", + ) + parser.add_option( + "--github-user", + dest="main_gh_user", + help="github username for main repo - e.g fperez", + metavar="MAIN_GH_USER", + ) + parser.add_option( + "--gitwash-url", + dest="gitwash_url", + help="URL to gitwash repository - default %s" % GITWASH_CENTRAL, + default=GITWASH_CENTRAL, + metavar="GITWASH_URL", + ) + parser.add_option( + "--gitwash-branch", + dest="gitwash_branch", + help="branch in gitwash repository - default %s" % GITWASH_BRANCH, + default=GITWASH_BRANCH, + metavar="GITWASH_BRANCH", + ) + parser.add_option( + "--source-suffix", + dest="source_suffix", + help="suffix of ReST source files - default '.rst'", + default=".rst", + metavar="SOURCE_SUFFIX", + ) + parser.add_option( + "--project-url", + dest="project_url", + help="URL for project web pages", + default=None, + metavar="PROJECT_URL", + ) + parser.add_option( + "--project-ml-url", + dest="project_ml_url", + help="URL for project mailing list", + default=None, + metavar="PROJECT_ML_URL", + ) (options, args) = parser.parse_args() if len(args) < 2: parser.print_help() @@ -211,24 +230,30 @@ def main(): options.main_gh_user = options.repo_name repo_path = clone_repo(options.gitwash_url, options.gitwash_branch) try: - copy_replace((('PROJECTNAME', project_name), - ('REPONAME', options.repo_name), - ('MAIN_GH_USER', options.main_gh_user)), - repo_path, - out_path, - cp_globs=(pjoin('gitwash', '*'),), - rep_globs=('*.rst',), - renames=((r'\.rst$', options.source_suffix),)) - make_link_targets(project_name, - options.main_gh_user, - options.repo_name, - pjoin(out_path, 'gitwash', 'known_projects.inc'), - pjoin(out_path, 'gitwash', 'this_project.inc'), - options.project_url, - options.project_ml_url) + copy_replace( + ( + ("PROJECTNAME", project_name), + ("REPONAME", options.repo_name), + ("MAIN_GH_USER", options.main_gh_user), + ), + repo_path, + out_path, + cp_globs=(pjoin("gitwash", "*"),), + rep_globs=("*.rst",), + renames=((r"\.rst$", options.source_suffix),), + ) + make_link_targets( + project_name, + options.main_gh_user, + options.repo_name, + pjoin(out_path, "gitwash", "known_projects.inc"), + pjoin(out_path, "gitwash", "this_project.inc"), + options.project_url, + options.project_ml_url, + ) finally: shutil.rmtree(repo_path) -if __name__ == '__main__': +if __name__ == "__main__": main() |