summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJarrod Millman <jarrod.millman@gmail.com>2017-09-11 15:48:32 -0700
committerGitHub <noreply@github.com>2017-09-11 15:48:32 -0700
commitbe23fa0e422b51f4526828cb19b8105c89e5dcbb (patch)
treec11b94af01c1329f6dbe4f69b68b1179862bb9df
parentae5acd8ad4fdab34a6b92a381a6ddc78c63a99c0 (diff)
downloadnetworkx-be23fa0e422b51f4526828cb19b8105c89e5dcbb.tar.gz
Fix links (#2663)
* Fix links * Comply with pep8
-rw-r--r--CONTRIBUTING.rst2
-rw-r--r--INSTALL.rst6
-rw-r--r--doc/bibliography.rst4
-rw-r--r--doc/developer/gitwash/development_workflow.rst2
-rw-r--r--doc/developer/gitwash/git_links.inc5
-rw-r--r--doc/developer/gitwash/git_resources.rst1
-rw-r--r--doc/index.rst4
-rw-r--r--doc/reference/drawing.rst4
-rw-r--r--doc/reference/glossary.rst4
-rw-r--r--doc/release/release_dev.rst2
-rw-r--r--examples/drawing/plot_unix_email.py6
-rw-r--r--networkx/algorithms/approximation/clique.py6
-rw-r--r--networkx/algorithms/approximation/independent_set.py2
-rw-r--r--networkx/algorithms/approximation/kcomponents.py85
-rw-r--r--networkx/algorithms/approximation/matching.py2
-rw-r--r--networkx/algorithms/bipartite/matching.py5
-rw-r--r--networkx/algorithms/bipartite/matrix.py31
-rw-r--r--networkx/algorithms/bridges.py2
-rw-r--r--networkx/algorithms/centrality/current_flow_betweenness.py6
-rw-r--r--networkx/algorithms/centrality/current_flow_betweenness_subset.py16
-rw-r--r--networkx/algorithms/centrality/current_flow_closeness.py5
-rw-r--r--networkx/algorithms/centrality/dispersion.py2
-rw-r--r--networkx/algorithms/centrality/subgraph_alg.py69
-rw-r--r--networkx/algorithms/centrality/tests/test_dispersion.py24
-rw-r--r--networkx/algorithms/centrality/tests/test_load_centrality.py332
-rw-r--r--networkx/algorithms/chordal.py4
-rw-r--r--networkx/algorithms/cluster.py2
-rw-r--r--networkx/algorithms/communicability_alg.py4
-rw-r--r--networkx/algorithms/community/quality.py4
-rw-r--r--networkx/algorithms/connectivity/edge_kcomponents.py2
-rw-r--r--networkx/algorithms/connectivity/kcomponents.py12
-rw-r--r--networkx/algorithms/connectivity/tests/test_kcomponents.py2
-rw-r--r--networkx/algorithms/core.py10
-rw-r--r--networkx/algorithms/dominating.py4
-rw-r--r--networkx/algorithms/euler.py2
-rw-r--r--networkx/algorithms/isomorphism/temporalisomorphvf2.py115
-rw-r--r--networkx/algorithms/isomorphism/tests/test_isomorphvf2.py137
-rw-r--r--networkx/algorithms/link_analysis/hits_alg.py127
-rw-r--r--networkx/algorithms/link_prediction.py2
-rw-r--r--networkx/algorithms/richclub.py4
-rw-r--r--networkx/algorithms/smetric.py2
-rw-r--r--networkx/algorithms/tests/test_dominating.py2
-rw-r--r--networkx/algorithms/tree/tests/test_branchings.py57
-rw-r--r--networkx/algorithms/tree/tests/test_mst.py2
-rw-r--r--networkx/classes/digraph.py7
-rw-r--r--networkx/classes/function.py4
-rw-r--r--networkx/classes/graph.py6
-rw-r--r--networkx/classes/multidigraph.py6
-rw-r--r--networkx/classes/multigraph.py6
-rw-r--r--networkx/convert_matrix.py2
-rw-r--r--networkx/generators/community.py9
-rw-r--r--networkx/generators/directed.py6
-rw-r--r--networkx/readwrite/gexf.py50
-rw-r--r--networkx/readwrite/gpickle.py6
-rw-r--r--networkx/readwrite/json_graph/__init__.py8
-rw-r--r--networkx/readwrite/nx_shp.py40
-rw-r--r--networkx/readwrite/tests/test_gexf.py4
-rw-r--r--networkx/testing/tests/test_utils.py62
-rw-r--r--networkx/testing/utils.py10
-rw-r--r--networkx/tests/test_exceptions.py8
-rw-r--r--networkx/utils/contextmanagers.py1
-rw-r--r--networkx/utils/random_sequence.py2
-rw-r--r--networkx/utils/tests/test_contextmanager.py1
-rw-r--r--networkx/utils/tests/test_decorators.py20
64 files changed, 719 insertions, 660 deletions
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index cbe48705..129cc051 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -68,7 +68,7 @@ For a more detailed discussion, read these :doc:`detailed documents
and commit. As soon as those changes are pushed up (to the same branch as
before) the pull request will update automatically.
- * `Travis-CI <http://travis-ci.org/>`_, a continuous integration service,
+ * `Travis-CI <https://travis-ci.org/>`_, a continuous integration service,
is triggered after each Pull Request update to build the code and run unit
tests of your branch. The Travis tests must pass before your PR can be merged.
If Travis fails, you can find out why by clicking on the "failed" icon (red
diff --git a/INSTALL.rst b/INSTALL.rst
index d8284bb0..697e96c9 100644
--- a/INSTALL.rst
+++ b/INSTALL.rst
@@ -9,8 +9,8 @@ instructions for installing the full `scientific Python stack
.. note::
If you are on Windows and want to install optional packages (e.g., `scipy`),
then you will need to install a Python distribution such as
- `Anaconda <https://www.continuum.io/downloads>`_,
- `Enthought Canopy <https://www.enthought.com/products/canopy/>`_,
+ `Anaconda <https://www.anaconda.com/download/>`_,
+ `Enthought Canopy <https://www.enthought.com/product/canopy>`_,
`Python(x,y) <http://python-xy.github.io/>`_,
`WinPython <https://winpython.github.io/>`_, or
`Pyzo <http://www.pyzo.org/>`_.
@@ -45,7 +45,7 @@ install into your user directory using the ``--user`` flag::
Alternatively, you can manually download ``networkx`` from
`GitHub <https://github.com/networkx/networkx/releases>`_ or
-`PyPI <http://pypi.python.org/pypi/networkx>`_.
+`PyPI <https://pypi.python.org/pypi/networkx>`_.
To install one of these versions, unpack it and run the following from the
top-level source directory using the Terminal::
diff --git a/doc/bibliography.rst b/doc/bibliography.rst
index 3510df5d..4867b258 100644
--- a/doc/bibliography.rst
+++ b/doc/bibliography.rst
@@ -5,7 +5,7 @@ Bibliography
.. [BA02] R. Albert and A.-L. Barabási, "Statistical mechanics of complex
networks", Reviews of Modern Physics, 74, pp. 47-97, 2002.
- http://arxiv.org/abs/cond-mat/0106096
+ https://arxiv.org/abs/cond-mat/0106096
.. [Bollobas01] B. Bollobás, "Random Graphs", Second Edition,
Cambridge University Press, 2001.
@@ -61,5 +61,5 @@ Bibliography
2nd ed., 2001.
.. [vanRossum98] Guido van Rossum. Python Patterns - Implementing Graphs, 1998.
- http://www.python.org/doc/essays/graphs
+ https://www.python.org/doc/essays/graphs
diff --git a/doc/developer/gitwash/development_workflow.rst b/doc/developer/gitwash/development_workflow.rst
index 957f0e7d..bedc49fd 100644
--- a/doc/developer/gitwash/development_workflow.rst
+++ b/doc/developer/gitwash/development_workflow.rst
@@ -27,8 +27,6 @@ In what follows we'll refer to the upstream networkx ``master`` branch, as
your feature branch while you are working.
* If you do find yourself merging from trunk, consider :ref:`rebase-on-trunk`
* Ask on the `networkx mailing list`_ if you get stuck.
-* Check that your code meets the requirements as outlined in our
- `wiki <https://github.com/networkx/networkx/wiki>`_.
* Ask for code review!
This way of working helps to keep work well organized, with readable history.
diff --git a/doc/developer/gitwash/git_links.inc b/doc/developer/gitwash/git_links.inc
index d01ad783..59287700 100644
--- a/doc/developer/gitwash/git_links.inc
+++ b/doc/developer/gitwash/git_links.inc
@@ -16,12 +16,11 @@
.. _git-osx-installer: https://git-scm.com/download/mac
.. _subversion: http://subversion.tigris.org/
.. _git cheat sheet: https://help.github.com/git-cheat-sheets/
-.. _pro git book: https://progit.org/
+.. _pro git book: https://git-scm.com/book/en/v2
.. _git svn crash course: https://git-scm.com/course/svn.html
.. _network graph visualizer: https://github.com/blog/39-say-hello-to-the-network-graph-visualizer
.. _git user manual: https://schacon.github.io/git/user-manual.html
.. _git tutorial: https://schacon.github.io/git/gittutorial.html
-.. _git community book: https://git-scm.com/book/en/v2
.. _git ready: http://gitready.com/
.. _Fernando's git page: http://www.fperez.org/py4science/git.html
.. _git magic: http://www-cs-students.stanford.edu/~blynn/gitmagic/index.html
@@ -41,7 +40,7 @@
.. _git config: https://schacon.github.io/git/git-config.html
.. _why the -a flag?: http://gitready.com/beginner/2009/01/18/the-staging-area.html
.. _git staging area: http://gitready.com/beginner/2009/01/18/the-staging-area.html
-.. _tangled working copy problem: http://2ndscale.com/rtomayko/2008/the-thing-about-git
+.. _tangled working copy problem: https://2ndscale.com/rtomayko/2008/the-thing-about-git
.. _git management: https://web.archive.org/web/20090224195437/http://kerneltrap.org/Linux/Git_Management
.. _linux git workflow: https://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html
.. _git parable: http://tom.preston-werner.com/2009/05/19/the-git-parable.html
diff --git a/doc/developer/gitwash/git_resources.rst b/doc/developer/gitwash/git_resources.rst
index 2787a575..bdf0496c 100644
--- a/doc/developer/gitwash/git_resources.rst
+++ b/doc/developer/gitwash/git_resources.rst
@@ -14,7 +14,6 @@ Tutorials and summaries
* A `git cheat sheet`_ is a page giving summaries of common commands.
* The `git user manual`_
* The `git tutorial`_
-* The `git community book`_
* `git ready`_ |emdash| a nice series of tutorials
* `git magic`_ |emdash| extended introduction with intermediate detail
* The `git parable`_ is an easy read explaining the concepts behind git.
diff --git a/doc/index.rst b/doc/index.rst
index dad1ef31..f086212d 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -46,8 +46,8 @@ algorithms. Python has a vibrant and growing ecosystem of packages that
NetworkX uses to provide more features such as numerical linear algebra and
drawing. In order to make the most out of NetworkX you will want to know how
to write basic programs in Python. Among the many guides to Python, we
-recommend the `Python documentation <http://www.python.org>`_ and the text by
-Alex Martelli [Martelli03]_.
+recommend the `Python documentation <https://docs.python.org/3/>`_ and the text
+by Alex Martelli [Martelli03]_.
Free software
-------------
diff --git a/doc/reference/drawing.rst b/doc/reference/drawing.rst
index 21a6d8a7..bdfb80c1 100644
--- a/doc/reference/drawing.rst
+++ b/doc/reference/drawing.rst
@@ -13,10 +13,10 @@ Proper graph visualization is hard, and we highly recommend that people
visualize their graphs with tools dedicated to that task. Notable examples of
dedicated and fully-featured graph visualization tools are
`Cytoscape <http://www.cytoscape.org/>`_,
-`Gephi <http://gephi.github.io/>`_,
+`Gephi <https://gephi.org/>`_,
`Graphviz <http://www.graphviz.org/>`_ and, for
`LaTeX <http://www.latex-project.org/>`_ typesetting,
-`PGF/TikZ <http://sourceforge.net/projects/pgf/>`_.
+`PGF/TikZ <https://sourceforge.net/projects/pgf/>`_.
To use these and other such tools, you should export your NetworkX graph into
a format that can be read by those tools. For example, Cytoscape can read the
GraphML format, and so, ``networkx.write_graphml(G)`` might be an appropriate
diff --git a/doc/reference/glossary.rst b/doc/reference/glossary.rst
index 09696ae1..798da320 100644
--- a/doc/reference/glossary.rst
+++ b/doc/reference/glossary.rst
@@ -8,7 +8,7 @@ Glossary
dictionary
A Python dictionary maps keys to values. Also known as "hashes",
or "associative arrays" in other programming languages.
- See http://docs.python.org/tutorial/datastructures.html#dictionaries
+ See https://docs.python.org/2/tutorial/datastructures.html#dictionaries
edge
Edges are either two-tuples of nodes `(u, v)` or three tuples of nodes
@@ -40,7 +40,7 @@ Glossary
default; they all compare unequal, and their hash value is their
:func:`id`.
- Definition from http://docs.python.org/glossary.html
+ Definition from https://docs.python.org/2/glossary.html
nbunch
An nbunch is a single node, container of nodes or None (representing
diff --git a/doc/release/release_dev.rst b/doc/release/release_dev.rst
index 1b079af0..feb5a850 100644
--- a/doc/release/release_dev.rst
+++ b/doc/release/release_dev.rst
@@ -172,7 +172,7 @@ API Changes
returned. When ``target`` is not specified the return value is still 2
dicts.
-* [`#2553 <https://github.com/networkx/networkx/issues/2553>`_]
+* [`#2553 <https://github.com/networkx/networkx/pull/2553>`_]
``set_node_attributes()`` and ``set_edge_attributes()`` now accept
dict-of-dict input of shape ``{node/edge: {name: value}}`` in addition to
previous valid inputs: ``{node/edge: value}`` and ``value``. The order of the
diff --git a/examples/drawing/plot_unix_email.py b/examples/drawing/plot_unix_email.py
index 98da9a96..ac7e3b75 100644
--- a/examples/drawing/plot_unix_email.py
+++ b/examples/drawing/plot_unix_email.py
@@ -35,7 +35,8 @@ import matplotlib.pyplot as plt
import networkx as nx
# unix mailbox recipe
-# see http://www.python.org/doc/current/lib/module-mailbox.html
+# see https://docs.python.org/2/library/mailbox.html
+
def mbox_graph():
try:
@@ -52,7 +53,7 @@ def mbox_graph():
for msg in mbox: # msg is python email.Message.Message object
(source_name, source_addr) = parseaddr(msg['From']) # sender
# get all recipients
- # see http://www.python.org/doc/current/lib/module-email.Utils.html
+ # see https://docs.python.org/2/library/email.html
tos = msg.get_all('to', [])
ccs = msg.get_all('cc', [])
resent_tos = msg.get_all('resent-to', [])
@@ -64,6 +65,7 @@ def mbox_graph():
return G
+
if __name__ == '__main__':
G = mbox_graph()
diff --git a/networkx/algorithms/approximation/clique.py b/networkx/algorithms/approximation/clique.py
index b2789e82..0503f34b 100644
--- a/networkx/algorithms/approximation/clique.py
+++ b/networkx/algorithms/approximation/clique.py
@@ -9,7 +9,8 @@ Cliques.
import networkx as nx
from networkx.algorithms.approximation import ramsey
__author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)"""
-__all__ = ["clique_removal","max_clique"]
+__all__ = ["clique_removal", "max_clique"]
+
def max_clique(G):
r"""Find the Maximum Clique
@@ -40,7 +41,7 @@ def max_clique(G):
vertices in a maximum clique in G. The intersection number of
G is the smallest number of cliques that together cover all edges of G.
- http://en.wikipedia.org/wiki/Maximum_clique
+ https://en.wikipedia.org/wiki/Maximum_clique
References
----------
@@ -58,6 +59,7 @@ def max_clique(G):
iset, _ = clique_removal(cgraph)
return iset
+
def clique_removal(G):
""" Repeatedly remove cliques from the graph.
diff --git a/networkx/algorithms/approximation/independent_set.py b/networkx/algorithms/approximation/independent_set.py
index 8f993fcb..f86d2acb 100644
--- a/networkx/algorithms/approximation/independent_set.py
+++ b/networkx/algorithms/approximation/independent_set.py
@@ -14,7 +14,7 @@ the maximum independent set problem and is an NP-hard optimization problem.
As such, it is unlikely that there exists an efficient algorithm for finding
a maximum independent set of a graph.
-`Wikipedia: Independent set <http://en.wikipedia.org/wiki/Independent_set_(graph_theory)>`_
+`Wikipedia: Independent set <https://en.wikipedia.org/wiki/Independent_set_(graph_theory)>`_
Independent set algorithm is based on the following paper:
diff --git a/networkx/algorithms/approximation/kcomponents.py b/networkx/algorithms/approximation/kcomponents.py
index 60ddcfe6..2fbcd271 100644
--- a/networkx/algorithms/approximation/kcomponents.py
+++ b/networkx/algorithms/approximation/kcomponents.py
@@ -1,6 +1,6 @@
""" Fast approximation for k-component structure
"""
-# Copyright (C) 2015 by
+# Copyright (C) 2015 by
# Jordi Torrents <jtorrents@milnou.net>
# All rights reserved.
# BSD license.
@@ -24,21 +24,23 @@ __all__ = ['k_components']
not_implemented_for('directed')
+
+
def k_components(G, min_density=0.95):
r"""Returns the approximate k-component structure of a graph G.
-
- A `k`-component is a maximal subgraph of a graph G that has, at least,
+
+ A `k`-component is a maximal subgraph of a graph G that has, at least,
node connectivity `k`: we need to remove at least `k` nodes to break it
into more components. `k`-components have an inherent hierarchical
- structure because they are nested in terms of connectivity: a connected
- graph can contain several 2-components, each of which can contain
+ structure because they are nested in terms of connectivity: a connected
+ graph can contain several 2-components, each of which can contain
one or more 3-components, and so forth.
This implementation is based on the fast heuristics to approximate
the `k`-component sturcture of a graph [1]_. Which, in turn, it is based on
- a fast approximation algorithm for finding good lower bounds of the number
+ a fast approximation algorithm for finding good lower bounds of the number
of node independent paths between two nodes [2]_.
-
+
Parameters
----------
G : NetworkX graph
@@ -52,39 +54,39 @@ def k_components(G, min_density=0.95):
k_components : dict
Dictionary with connectivity level `k` as key and a list of
sets of nodes that form a k-component of level `k` as values.
-
+
Examples
--------
- >>> # Petersen graph has 10 nodes and it is triconnected, thus all
+ >>> # Petersen graph has 10 nodes and it is triconnected, thus all
>>> # nodes are in a single component on all three connectivity levels
>>> from networkx.algorithms import approximation as apxa
>>> G = nx.petersen_graph()
>>> k_components = apxa.k_components(G)
-
+
Notes
-----
- The logic of the approximation algorithm for computing the `k`-component
- structure [1]_ is based on repeatedly applying simple and fast algorithms
- for `k`-cores and biconnected components in order to narrow down the
+ The logic of the approximation algorithm for computing the `k`-component
+ structure [1]_ is based on repeatedly applying simple and fast algorithms
+ for `k`-cores and biconnected components in order to narrow down the
number of pairs of nodes over which we have to compute White and Newman's
approximation algorithm for finding node independent paths [2]_. More
- formally, this algorithm is based on Whitney's theorem, which states
- an inclusion relation among node connectivity, edge connectivity, and
- minimum degree for any graph G. This theorem implies that every
- `k`-component is nested inside a `k`-edge-component, which in turn,
+ formally, this algorithm is based on Whitney's theorem, which states
+ an inclusion relation among node connectivity, edge connectivity, and
+ minimum degree for any graph G. This theorem implies that every
+ `k`-component is nested inside a `k`-edge-component, which in turn,
is contained in a `k`-core. Thus, this algorithm computes node independent
paths among pairs of nodes in each biconnected part of each `k`-core,
- and repeats this procedure for each `k` from 3 to the maximal core number
+ and repeats this procedure for each `k` from 3 to the maximal core number
of a node in the input graph.
- Because, in practice, many nodes of the core of level `k` inside a
- bicomponent actually are part of a component of level k, the auxiliary
- graph needed for the algorithm is likely to be very dense. Thus, we use
- a complement graph data structure (see `AntiGraph`) to save memory.
- AntiGraph only stores information of the edges that are *not* present
- in the actual auxiliary graph. When applying algorithms to this
- complement graph data structure, it behaves as if it were the dense
+ Because, in practice, many nodes of the core of level `k` inside a
+ bicomponent actually are part of a component of level k, the auxiliary
+ graph needed for the algorithm is likely to be very dense. Thus, we use
+ a complement graph data structure (see `AntiGraph`) to save memory.
+ AntiGraph only stores information of the edges that are *not* present
+ in the actual auxiliary graph. When applying algorithms to this
+ complement graph data structure, it behaves as if it were the dense
version.
See also
@@ -93,16 +95,16 @@ def k_components(G, min_density=0.95):
References
----------
- .. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion:
+ .. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion:
Visualization and Heuristics for Fast Computation.
- http://arxiv.org/pdf/1503.04476v1
+ https://arxiv.org/pdf/1503.04476v1
- .. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for
+ .. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
http://eclectic.ss.uci.edu/~drwhite/working.pdf
- .. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness:
- A hierarchical conception of social groups.
+ .. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness:
+ A hierarchical conception of social groups.
American Sociological Review 68(1), 103--28.
http://www2.asanet.org/journals/ASRFeb03MoodyWhite.pdf
@@ -120,12 +122,12 @@ def k_components(G, min_density=0.95):
# Exact solution for k = {1,2}
# There is a linear time algorithm for triconnectivity, if we had an
# implementation available we could start from k = 4.
- for component in nx.connected_components(G):
+ for component in nx.connected_components(G):
# isolated nodes have connectivity 0
comp = set(component)
if len(comp) > 1:
k_components[1].append(comp)
- for bicomponent in nx.biconnected_components(G):
+ for bicomponent in nx.biconnected_components(G):
# avoid considering dyads as bicomponents
bicomp = set(bicomponent)
if len(bicomp) > 2:
@@ -145,7 +147,7 @@ def k_components(G, min_density=0.95):
# Build auxiliary graph
H = _AntiGraph()
H.add_nodes_from(SG.nodes())
- for u,v in combinations(SG, 2):
+ for u, v in combinations(SG, 2):
K = node_connectivity(SG, u, v, cutoff=k)
if k > K:
H.add_edge(u, v)
@@ -171,8 +173,8 @@ def _cliques_heuristic(G, H, k, min_density):
overlap = False
else:
overlap = set.intersection(*[
- set(x for x in H[n] if x not in cands)
- for n in cands])
+ set(x for x in H[n] if x not in cands)
+ for n in cands])
if overlap and len(overlap) < k:
SH = H.subgraph(cands | overlap)
else:
@@ -208,13 +210,14 @@ class _AntiGraph(nx.Graph):
a low memory foodprint.
In this class you add the edges that *do not exist* in the dense graph,
- the report methods of the class return the neighbors, the edges and
+ the report methods of the class return the neighbors, the edges and
the degree as if it was the dense graph. Thus it's possible to use
an instance of this class with some of NetworkX functions. In this
case we only use k-core, connected_components, and biconnected_components.
"""
all_edge_dict = {'weight': 1}
+
def single_edge_dict(self):
return self.all_edge_dict
edge_attr_dict_factory = single_edge_dict
@@ -234,21 +237,22 @@ class _AntiGraph(nx.Graph):
"""
all_edge_dict = self.all_edge_dict
- return dict((node, all_edge_dict) for node in
+ return dict((node, all_edge_dict) for node in
set(self._adj) - set(self._adj[n]) - set([n]))
def neighbors(self, n):
- """Return an iterator over all neighbors of node n in the
+ """Return an iterator over all neighbors of node n in the
dense graph.
"""
try:
return iter(set(self._adj) - set(self._adj[n]) - set([n]))
except KeyError:
- raise NetworkXError("The node %s is not in the graph."%(n,))
+ raise NetworkXError("The node %s is not in the graph." % (n,))
class AntiAtlasView(Mapping):
"""An adjacency inner dict for AntiGraph"""
+
def __init__(self, graph, node):
self._graph = graph
self._atlas = graph._adj[node]
@@ -268,6 +272,7 @@ class _AntiGraph(nx.Graph):
class AntiAdjacencyView(AntiAtlasView):
"""An adjacency outer dict for AntiGraph"""
+
def __init__(self, graph):
self._graph = graph
self._atlas = graph._adj
@@ -327,7 +332,7 @@ class _AntiGraph(nx.Graph):
through once.
weight : string or None, optional (default=None)
- The edge attribute that holds the numerical value used
+ The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
diff --git a/networkx/algorithms/approximation/matching.py b/networkx/algorithms/approximation/matching.py
index 0c9f3131..d7fc4c43 100644
--- a/networkx/algorithms/approximation/matching.py
+++ b/networkx/algorithms/approximation/matching.py
@@ -7,7 +7,7 @@ Graph Matching
Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
edges; that is, no two edges share a common vertex.
-`Wikipedia: Matching <http://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
+`Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
"""
# Copyright (C) 2011-2012 by
# Nicholas Mancuso <nick.mancuso@gmail.com>
diff --git a/networkx/algorithms/bipartite/matching.py b/networkx/algorithms/bipartite/matching.py
index 8f584394..4457ad99 100644
--- a/networkx/algorithms/bipartite/matching.py
+++ b/networkx/algorithms/bipartite/matching.py
@@ -423,7 +423,7 @@ def to_vertex_cover(G, matching, top_nodes=None):
Container with all nodes in one bipartite node set. If not supplied
it will be computed. But if more than one solution exists an exception
will be raised.
-
+
Returns
-------
@@ -445,7 +445,7 @@ def to_vertex_cover(G, matching, top_nodes=None):
This function is implemented using the procedure guaranteed by `Konig's
theorem
- <http://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29>`_,
+ <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29>`_,
which proves an equivalence between a maximum matching and a minimum vertex
cover in bipartite graphs.
@@ -478,6 +478,7 @@ def to_vertex_cover(G, matching, top_nodes=None):
# endpoint not in Z. This gives us the vertex cover.
return (L - Z) | (R & Z)
+
#: Returns the maximum cardinality matching in the given bipartite graph.
#:
#: This function is simply an alias for :func:`hopcroft_karp_matching`.
diff --git a/networkx/algorithms/bipartite/matrix.py b/networkx/algorithms/bipartite/matrix.py
index 81b99ee7..adaa12c5 100644
--- a/networkx/algorithms/bipartite/matrix.py
+++ b/networkx/algorithms/bipartite/matrix.py
@@ -16,7 +16,8 @@ from networkx.convert_matrix import _generate_weighted_edges
import networkx as nx
__author__ = """\n""".join(['Jordi Torrents <jtorrents@milnou.net>',
'Aric Hagberg <aric.hagberg@gmail.com>'])
-__all__ = ['biadjacency_matrix','from_biadjacency_matrix']
+__all__ = ['biadjacency_matrix', 'from_biadjacency_matrix']
+
def biadjacency_matrix(G, row_order, column_order=None,
dtype=None, weight='weight', format='csr'):
@@ -76,9 +77,9 @@ def biadjacency_matrix(G, row_order, column_order=None,
References
----------
- .. [1] http://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
+ .. [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
.. [2] Scipy Dev. References, "Sparse Matrices",
- http://docs.scipy.org/doc/scipy/reference/sparse.html
+ https://docs.scipy.org/doc/scipy/reference/sparse.html
"""
from scipy import sparse
nlen = len(row_order)
@@ -98,17 +99,17 @@ def biadjacency_matrix(G, row_order, column_order=None,
col_index = dict(zip(column_order, itertools.count()))
if G.number_of_edges() == 0:
- row,col,data=[],[],[]
+ row, col, data = [], [], []
else:
- row,col,data = zip(*((row_index[u],col_index[v],d.get(weight,1))
- for u,v,d in G.edges(row_order,data=True)
- if u in row_index and v in col_index))
- M = sparse.coo_matrix((data,(row,col)),
- shape=(nlen,mlen), dtype=dtype)
+ row, col, data = zip(*((row_index[u], col_index[v], d.get(weight, 1))
+ for u, v, d in G.edges(row_order, data=True)
+ if u in row_index and v in col_index))
+ M = sparse.coo_matrix((data, (row, col)),
+ shape=(nlen, mlen), dtype=dtype)
try:
return M.asformat(format)
except AttributeError:
- raise nx.NetworkXError("Unknown sparse matrix format: %s"%format)
+ raise nx.NetworkXError("Unknown sparse matrix format: %s" % format)
def from_biadjacency_matrix(A, create_using=None, edge_attribute='weight'):
@@ -145,16 +146,16 @@ def from_biadjacency_matrix(A, create_using=None, edge_attribute='weight'):
References
----------
- [1] http://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
+ [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
"""
G = _prep_create_using(create_using)
n, m = A.shape
# Make sure we get even the isolated nodes of the graph.
G.add_nodes_from(range(n), bipartite=0)
- G.add_nodes_from(range(n,n+m), bipartite=1)
+ G.add_nodes_from(range(n, n + m), bipartite=1)
# Create an iterable over (u, v, w) triples and for each triple, add an
# edge from u to v with weight w.
- triples = ((u, n+v, d) for (u, v, d) in _generate_weighted_edges(A))
+ triples = ((u, n + v, d) for (u, v, d) in _generate_weighted_edges(A))
# If the entries in the adjacency matrix are integers and the graph is a
# multigraph, then create parallel edges, each with weight 1, for each
# entry in the adjacency matrix. Otherwise, create one edge for each
@@ -167,11 +168,11 @@ def from_biadjacency_matrix(A, create_using=None, edge_attribute='weight'):
return G
# fixture for nose tests
+
+
def setup_module(module):
from nose import SkipTest
try:
import scipy
except:
raise SkipTest("SciPy not available")
-
-
diff --git a/networkx/algorithms/bridges.py b/networkx/algorithms/bridges.py
index 2d6400be..a56a6283 100644
--- a/networkx/algorithms/bridges.py
+++ b/networkx/algorithms/bridges.py
@@ -65,7 +65,7 @@ def bridges(G, root=None):
References
----------
- .. [1] https://en.wikipedia.org/wiki/Bridge_(graph_theory)#Bridge-Finding_with_Chain_Decompositions
+ .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29#Bridge-Finding_with_Chain_Decompositions
"""
chains = nx.chain_decomposition(G, root=root)
chain_edges = set(chain.from_iterable(chains))
diff --git a/networkx/algorithms/centrality/current_flow_betweenness.py b/networkx/algorithms/centrality/current_flow_betweenness.py
index 878012fb..b256c1a1 100644
--- a/networkx/algorithms/centrality/current_flow_betweenness.py
+++ b/networkx/algorithms/centrality/current_flow_betweenness.py
@@ -80,7 +80,7 @@ def approximate_current_flow_betweenness_centrality(G, normalized=True,
Centrality Measures Based on Current Flow.
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
- http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
+ http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
"""
try:
import numpy as np
@@ -201,7 +201,7 @@ def current_flow_betweenness_centrality(G, normalized=True, weight=None,
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
- http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
+ http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
@@ -312,7 +312,7 @@ def edge_current_flow_betweenness_centrality(G, normalized=True,
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
- http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
+ http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
diff --git a/networkx/algorithms/centrality/current_flow_betweenness_subset.py b/networkx/algorithms/centrality/current_flow_betweenness_subset.py
index c6893acf..453b6974 100644
--- a/networkx/algorithms/centrality/current_flow_betweenness_subset.py
+++ b/networkx/algorithms/centrality/current_flow_betweenness_subset.py
@@ -91,7 +91,7 @@ def current_flow_betweenness_centrality_subset(G, sources, targets,
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
- http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
+ http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
@@ -122,14 +122,14 @@ def current_flow_betweenness_centrality_subset(G, sources, targets,
i = mapping[ss]
for tt in targets:
j = mapping[tt]
- betweenness[s] += 0.5*np.abs(row[i]-row[j])
- betweenness[t] += 0.5*np.abs(row[i]-row[j])
+ betweenness[s] += 0.5 * np.abs(row[i] - row[j])
+ betweenness[t] += 0.5 * np.abs(row[i] - row[j])
if normalized:
- nb = (n-1.0)*(n-2.0) # normalization factor
+ nb = (n - 1.0) * (n - 2.0) # normalization factor
else:
nb = 2.0
for v in H:
- betweenness[v] = betweenness[v]/nb + 1.0/(2-n)
+ betweenness[v] = betweenness[v] / nb + 1.0 / (2 - n)
return dict((ordering[k], v) for k, v in betweenness.items())
@@ -207,7 +207,7 @@ def edge_current_flow_betweenness_centrality_subset(G, sources, targets,
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
- http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
+ http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
@@ -233,7 +233,7 @@ def edge_current_flow_betweenness_centrality_subset(G, sources, targets,
edges = (tuple(sorted((u, v))) for u, v in H.edges())
betweenness = dict.fromkeys(edges, 0.0)
if normalized:
- nb = (n-1.0)*(n-2.0) # normalization factor
+ nb = (n - 1.0) * (n - 2.0) # normalization factor
else:
nb = 2.0
for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype,
@@ -242,7 +242,7 @@ def edge_current_flow_betweenness_centrality_subset(G, sources, targets,
i = mapping[ss]
for tt in targets:
j = mapping[tt]
- betweenness[e] += 0.5*np.abs(row[i]-row[j])
+ betweenness[e] += 0.5 * np.abs(row[i] - row[j])
betweenness[e] /= nb
return dict(((ordering[s], ordering[t]), v)
for (s, t), v in betweenness.items())
diff --git a/networkx/algorithms/centrality/current_flow_closeness.py b/networkx/algorithms/centrality/current_flow_closeness.py
index 9e231300..4606e1c2 100644
--- a/networkx/algorithms/centrality/current_flow_closeness.py
+++ b/networkx/algorithms/centrality/current_flow_closeness.py
@@ -63,7 +63,7 @@ def current_flow_closeness_centrality(G, weight=None,
Centrality Measures Based on Current Flow.
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
- http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
+ http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
.. [2] Karen Stephenson and Marvin Zelen:
Rethinking centrality: Methods and examples.
@@ -90,12 +90,13 @@ def current_flow_closeness_centrality(G, weight=None,
for v in H:
col = C2.get_row(v)
for w in H:
- betweenness[v] += col[v]-2*col[w]
+ betweenness[v] += col[v] - 2 * col[w]
betweenness[w] += col[v]
for v in H:
betweenness[v] = 1.0 / (betweenness[v])
return dict((ordering[k], float(v)) for k, v in betweenness.items())
+
information_centrality = current_flow_closeness_centrality
diff --git a/networkx/algorithms/centrality/dispersion.py b/networkx/algorithms/centrality/dispersion.py
index e413330e..f7049196 100644
--- a/networkx/algorithms/centrality/dispersion.py
+++ b/networkx/algorithms/centrality/dispersion.py
@@ -44,7 +44,7 @@ def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0):
.. [1] Romantic Partnerships and the Dispersion of Social Ties:
A Network Analysis of Relationship Status on Facebook.
Lars Backstrom, Jon Kleinberg.
- http://arxiv.org/pdf/1310.6753v1.pdf
+ https://arxiv.org/pdf/1310.6753v1.pdf
"""
diff --git a/networkx/algorithms/centrality/subgraph_alg.py b/networkx/algorithms/centrality/subgraph_alg.py
index 4c21410d..11536bd0 100644
--- a/networkx/algorithms/centrality/subgraph_alg.py
+++ b/networkx/algorithms/centrality/subgraph_alg.py
@@ -18,6 +18,7 @@ __all__ = ['subgraph_centrality_exp',
'estrada_index'
]
+
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def subgraph_centrality_exp(G):
@@ -63,7 +64,7 @@ def subgraph_centrality_exp(G):
.. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
"Subgraph centrality in complex networks",
Physical Review E 71, 056103 (2005).
- http://arxiv.org/abs/cond-mat/0504730
+ https://arxiv.org/abs/cond-mat/0504730
Examples
--------
@@ -75,15 +76,16 @@ def subgraph_centrality_exp(G):
"""
# alternative implementation that calculates the matrix exponential
import scipy.linalg
- nodelist = list(G) # ordering of nodes in matrix
- A = nx.to_numpy_matrix(G,nodelist)
+ nodelist = list(G) # ordering of nodes in matrix
+ A = nx.to_numpy_matrix(G, nodelist)
# convert to 0-1 matrix
- A[A!=0.0] = 1
+ A[A != 0.0] = 1
expA = scipy.linalg.expm(A.A)
# convert diagonal to dictionary keyed by node
- sc = dict(zip(nodelist,map(float,expA.diagonal())))
+ sc = dict(zip(nodelist, map(float, expA.diagonal())))
return sc
+
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def subgraph_centrality(G):
@@ -141,23 +143,24 @@ def subgraph_centrality(G):
.. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
"Subgraph centrality in complex networks",
Physical Review E 71, 056103 (2005).
- http://arxiv.org/abs/cond-mat/0504730
+ https://arxiv.org/abs/cond-mat/0504730
"""
import numpy
import numpy.linalg
- nodelist = list(G) # ordering of nodes in matrix
- A = nx.to_numpy_matrix(G,nodelist)
+ nodelist = list(G) # ordering of nodes in matrix
+ A = nx.to_numpy_matrix(G, nodelist)
# convert to 0-1 matrix
- A[A!=0.0] = 1
- w,v = numpy.linalg.eigh(A.A)
+ A[A != 0.0] = 1
+ w, v = numpy.linalg.eigh(A.A)
vsquare = numpy.array(v)**2
expw = numpy.exp(w)
- xg = numpy.dot(vsquare,expw)
+ xg = numpy.dot(vsquare, expw)
# convert vector dictionary keyed by node
- sc = dict(zip(nodelist,map(float,xg)))
+ sc = dict(zip(nodelist, map(float, xg)))
return sc
+
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def communicability_betweenness_centrality(G, normalized=True):
@@ -216,7 +219,7 @@ def communicability_betweenness_centrality(G, normalized=True):
.. [1] Ernesto Estrada, Desmond J. Higham, Naomichi Hatano,
"Communicability Betweenness in Complex Networks"
Physica A 388 (2009) 764-774.
- http://arxiv.org/abs/0905.4102
+ https://arxiv.org/abs/0905.4102
Examples
--------
@@ -225,47 +228,49 @@ def communicability_betweenness_centrality(G, normalized=True):
"""
import scipy
import scipy.linalg
- nodelist = list(G) # ordering of nodes in matrix
+ nodelist = list(G) # ordering of nodes in matrix
n = len(nodelist)
- A = nx.to_numpy_matrix(G,nodelist)
+ A = nx.to_numpy_matrix(G, nodelist)
# convert to 0-1 matrix
- A[A!=0.0] = 1
+ A[A != 0.0] = 1
expA = scipy.linalg.expm(A.A)
- mapping = dict(zip(nodelist,range(n)))
+ mapping = dict(zip(nodelist, range(n)))
cbc = {}
for v in G:
# remove row and col of node v
i = mapping[v]
- row = A[i,:].copy()
- col = A[:,i].copy()
- A[i,:] = 0
- A[:,i] = 0
+ row = A[i, :].copy()
+ col = A[:, i].copy()
+ A[i, :] = 0
+ A[:, i] = 0
B = (expA - scipy.linalg.expm(A.A)) / expA
# sum with row/col of node v and diag set to zero
- B[i,:] = 0
- B[:,i] = 0
+ B[i, :] = 0
+ B[:, i] = 0
B -= scipy.diag(scipy.diag(B))
cbc[v] = float(B.sum())
# put row and col back
- A[i,:] = row
- A[:,i] = col
+ A[i, :] = row
+ A[:, i] = col
# rescaling
- cbc = _rescale(cbc,normalized=normalized)
+ cbc = _rescale(cbc, normalized=normalized)
return cbc
-def _rescale(cbc,normalized):
+
+def _rescale(cbc, normalized):
# helper to rescale betweenness centrality
if normalized is True:
- order=len(cbc)
- if order <=2:
- scale=None
+ order = len(cbc)
+ if order <= 2:
+ scale = None
else:
- scale=1.0/((order-1.0)**2-(order-1.0))
+ scale = 1.0 / ((order - 1.0)**2 - (order - 1.0))
if scale is not None:
for v in cbc:
cbc[v] *= scale
return cbc
+
def estrada_index(G):
r"""Return the Estrada index of a the graph G.
@@ -312,6 +317,8 @@ def estrada_index(G):
return sum(subgraph_centrality(G).values())
# fixture for nose tests
+
+
def setup_module(module):
from nose import SkipTest
try:
diff --git a/networkx/algorithms/centrality/tests/test_dispersion.py b/networkx/algorithms/centrality/tests/test_dispersion.py
index bd4e8e75..f97d6055 100644
--- a/networkx/algorithms/centrality/tests/test_dispersion.py
+++ b/networkx/algorithms/centrality/tests/test_dispersion.py
@@ -1,20 +1,22 @@
import networkx as nx
from nose.tools import *
+
def small_ego_G():
- """The sample network from http://arxiv.org/pdf/1310.6753v1.pdf"""
- edges=[('a','b'), ('a','c'), ('b','c'), ('b','d'),
- ('b', 'e'),('b','f'),('c','d'),('c','f'),('c','h'),('d','f'), ('e','f'),
- ('f','h'),('h','j'), ('h','k'),('i','j'), ('i','k'), ('j','k'), ('u','a'),
- ('u','b'), ('u','c'), ('u','d'), ('u','e'), ('u','f'), ('u','g'), ('u','h'),
- ('u','i'), ('u','j'), ('u','k')]
+ """The sample network from https://arxiv.org/pdf/1310.6753v1.pdf"""
+ edges = [('a', 'b'), ('a', 'c'), ('b', 'c'), ('b', 'd'),
+ ('b', 'e'), ('b', 'f'), ('c', 'd'), ('c', 'f'), ('c', 'h'), ('d', 'f'), ('e', 'f'),
+ ('f', 'h'), ('h', 'j'), ('h', 'k'), ('i', 'j'), ('i', 'k'), ('j', 'k'), ('u', 'a'),
+ ('u', 'b'), ('u', 'c'), ('u', 'd'), ('u', 'e'), ('u', 'f'), ('u', 'g'), ('u', 'h'),
+ ('u', 'i'), ('u', 'j'), ('u', 'k')]
G = nx.Graph()
G.add_edges_from(edges)
return G
+
class TestDispersion(object):
-
+
def test_article(self):
"""our algorithm matches article's"""
G = small_ego_G()
@@ -27,17 +29,15 @@ class TestDispersion(object):
"""there is a result for every node"""
G = small_ego_G()
disp = nx.dispersion(G)
- disp_Gu = nx.dispersion(G, 'u')
- disp_uv = nx.dispersion(G, 'u', 'h')
+ disp_Gu = nx.dispersion(G, 'u')
+ disp_uv = nx.dispersion(G, 'u', 'h')
assert len(disp) == len(G)
assert len(disp_Gu) == len(G) - 1
assert type(disp_uv) is float
def test_impossible_things(self):
- G=nx.karate_club_graph()
+ G = nx.karate_club_graph()
disp = nx.dispersion(G)
for u in disp:
for v in disp[u]:
assert disp[u][v] >= 0
-
-
diff --git a/networkx/algorithms/centrality/tests/test_load_centrality.py b/networkx/algorithms/centrality/tests/test_load_centrality.py
index f603a74d..ef3f4b74 100644
--- a/networkx/algorithms/centrality/tests/test_load_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_load_centrality.py
@@ -7,26 +7,26 @@ class TestLoadCentrality:
def setUp(self):
- G=nx.Graph()
- G.add_edge(0,1,weight=3)
- G.add_edge(0,2,weight=2)
- G.add_edge(0,3,weight=6)
- G.add_edge(0,4,weight=4)
- G.add_edge(1,3,weight=5)
- G.add_edge(1,5,weight=5)
- G.add_edge(2,4,weight=1)
- G.add_edge(3,4,weight=2)
- G.add_edge(3,5,weight=1)
- G.add_edge(4,5,weight=4)
- self.G=G
- self.exact_weighted={0: 4.0, 1: 0.0, 2: 8.0, 3: 6.0, 4: 8.0, 5: 0.0}
+ G = nx.Graph()
+ G.add_edge(0, 1, weight=3)
+ G.add_edge(0, 2, weight=2)
+ G.add_edge(0, 3, weight=6)
+ G.add_edge(0, 4, weight=4)
+ G.add_edge(1, 3, weight=5)
+ G.add_edge(1, 5, weight=5)
+ G.add_edge(2, 4, weight=1)
+ G.add_edge(3, 4, weight=2)
+ G.add_edge(3, 5, weight=1)
+ G.add_edge(4, 5, weight=4)
+ self.G = G
+ self.exact_weighted = {0: 4.0, 1: 0.0, 2: 8.0, 3: 6.0, 4: 8.0, 5: 0.0}
self.K = nx.krackhardt_kite_graph()
self.P3 = nx.path_graph(3)
self.P4 = nx.path_graph(4)
self.K5 = nx.complete_graph(5)
- self.C4=nx.cycle_graph(4)
- self.T=nx.balanced_tree(r=2, h=2)
+ self.C4 = nx.cycle_graph(4)
+ self.T = nx.balanced_tree(r=2, h=2)
self.Gb = nx.Graph()
self.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3),
(2, 4), (4, 5), (3, 5)])
@@ -36,152 +36,147 @@ class TestLoadCentrality:
def test_not_strongly_connected(self):
b = nx.load_centrality(self.D)
- result = {0: 5./12,
- 1: 1./4,
- 2: 1./12,
- 3: 1./4,
+ result = {0: 5. / 12,
+ 1: 1. / 4,
+ 2: 1. / 12,
+ 3: 1. / 4,
4: 0.000}
for n in sorted(self.D):
assert_almost_equal(result[n], b[n], places=3)
assert_almost_equal(result[n], nx.load_centrality(self.D, n), places=3)
def test_weighted_load(self):
- b=nx.load_centrality(self.G,weight='weight',normalized=False)
+ b = nx.load_centrality(self.G, weight='weight', normalized=False)
for n in sorted(self.G):
- assert_equal(b[n],self.exact_weighted[n])
+ assert_equal(b[n], self.exact_weighted[n])
def test_k5_load(self):
- G=self.K5
- c=nx.load_centrality(G)
- d={0: 0.000,
- 1: 0.000,
- 2: 0.000,
- 3: 0.000,
- 4: 0.000}
+ G = self.K5
+ c = nx.load_centrality(G)
+ d = {0: 0.000,
+ 1: 0.000,
+ 2: 0.000,
+ 3: 0.000,
+ 4: 0.000}
for n in sorted(G):
- assert_almost_equal(c[n],d[n],places=3)
+ assert_almost_equal(c[n], d[n], places=3)
def test_p3_load(self):
- G=self.P3
- c=nx.load_centrality(G)
- d={0: 0.000,
- 1: 1.000,
- 2: 0.000}
+ G = self.P3
+ c = nx.load_centrality(G)
+ d = {0: 0.000,
+ 1: 1.000,
+ 2: 0.000}
for n in sorted(G):
- assert_almost_equal(c[n],d[n],places=3)
- c=nx.load_centrality(G,v=1)
- assert_almost_equal(c,1.0)
- c=nx.load_centrality(G,v=1,normalized=True)
- assert_almost_equal(c,1.0)
-
+ assert_almost_equal(c[n], d[n], places=3)
+ c = nx.load_centrality(G, v=1)
+ assert_almost_equal(c, 1.0)
+ c = nx.load_centrality(G, v=1, normalized=True)
+ assert_almost_equal(c, 1.0)
def test_p2_load(self):
- G=nx.path_graph(2)
- c=nx.load_centrality(G)
- d={0: 0.000,
- 1: 0.000}
+ G = nx.path_graph(2)
+ c = nx.load_centrality(G)
+ d = {0: 0.000,
+ 1: 0.000}
for n in sorted(G):
- assert_almost_equal(c[n],d[n],places=3)
-
+ assert_almost_equal(c[n], d[n], places=3)
def test_krackhardt_load(self):
- G=self.K
- c=nx.load_centrality(G)
- d={0: 0.023,
- 1: 0.023,
- 2: 0.000,
- 3: 0.102,
- 4: 0.000,
- 5: 0.231,
- 6: 0.231,
- 7: 0.389,
- 8: 0.222,
- 9: 0.000}
+ G = self.K
+ c = nx.load_centrality(G)
+ d = {0: 0.023,
+ 1: 0.023,
+ 2: 0.000,
+ 3: 0.102,
+ 4: 0.000,
+ 5: 0.231,
+ 6: 0.231,
+ 7: 0.389,
+ 8: 0.222,
+ 9: 0.000}
for n in sorted(G):
- assert_almost_equal(c[n],d[n],places=3)
+ assert_almost_equal(c[n], d[n], places=3)
def test_florentine_families_load(self):
- G=self.F
- c=nx.load_centrality(G)
- d={'Acciaiuoli': 0.000,
- 'Albizzi': 0.211,
- 'Barbadori': 0.093,
- 'Bischeri': 0.104,
- 'Castellani': 0.055,
- 'Ginori': 0.000,
- 'Guadagni': 0.251,
- 'Lamberteschi': 0.000,
- 'Medici': 0.522,
- 'Pazzi': 0.000,
- 'Peruzzi': 0.022,
- 'Ridolfi': 0.117,
- 'Salviati': 0.143,
- 'Strozzi': 0.106,
- 'Tornabuoni': 0.090}
+ G = self.F
+ c = nx.load_centrality(G)
+ d = {'Acciaiuoli': 0.000,
+ 'Albizzi': 0.211,
+ 'Barbadori': 0.093,
+ 'Bischeri': 0.104,
+ 'Castellani': 0.055,
+ 'Ginori': 0.000,
+ 'Guadagni': 0.251,
+ 'Lamberteschi': 0.000,
+ 'Medici': 0.522,
+ 'Pazzi': 0.000,
+ 'Peruzzi': 0.022,
+ 'Ridolfi': 0.117,
+ 'Salviati': 0.143,
+ 'Strozzi': 0.106,
+ 'Tornabuoni': 0.090}
for n in sorted(G):
- assert_almost_equal(c[n],d[n],places=3)
-
+ assert_almost_equal(c[n], d[n], places=3)
def test_unnormalized_k5_load(self):
- G=self.K5
- c=nx.load_centrality(G,normalized=False)
- d={0: 0.000,
- 1: 0.000,
- 2: 0.000,
- 3: 0.000,
- 4: 0.000}
+ G = self.K5
+ c = nx.load_centrality(G, normalized=False)
+ d = {0: 0.000,
+ 1: 0.000,
+ 2: 0.000,
+ 3: 0.000,
+ 4: 0.000}
for n in sorted(G):
- assert_almost_equal(c[n],d[n],places=3)
+ assert_almost_equal(c[n], d[n], places=3)
def test_unnormalized_p3_load(self):
- G=self.P3
- c=nx.load_centrality(G,normalized=False)
- d={0: 0.000,
- 1: 2.000,
- 2: 0.000}
+ G = self.P3
+ c = nx.load_centrality(G, normalized=False)
+ d = {0: 0.000,
+ 1: 2.000,
+ 2: 0.000}
for n in sorted(G):
- assert_almost_equal(c[n],d[n],places=3)
-
+ assert_almost_equal(c[n], d[n], places=3)
def test_unnormalized_krackhardt_load(self):
- G=self.K
- c=nx.load_centrality(G,normalized=False)
- d={0: 1.667,
- 1: 1.667,
- 2: 0.000,
- 3: 7.333,
- 4: 0.000,
- 5: 16.667,
- 6: 16.667,
- 7: 28.000,
- 8: 16.000,
- 9: 0.000}
+ G = self.K
+ c = nx.load_centrality(G, normalized=False)
+ d = {0: 1.667,
+ 1: 1.667,
+ 2: 0.000,
+ 3: 7.333,
+ 4: 0.000,
+ 5: 16.667,
+ 6: 16.667,
+ 7: 28.000,
+ 8: 16.000,
+ 9: 0.000}
for n in sorted(G):
- assert_almost_equal(c[n],d[n],places=3)
+ assert_almost_equal(c[n], d[n], places=3)
def test_unnormalized_florentine_families_load(self):
- G=self.F
- c=nx.load_centrality(G,normalized=False)
-
- d={'Acciaiuoli': 0.000,
- 'Albizzi': 38.333,
- 'Barbadori': 17.000,
- 'Bischeri': 19.000,
- 'Castellani': 10.000,
- 'Ginori': 0.000,
- 'Guadagni': 45.667,
- 'Lamberteschi': 0.000,
- 'Medici': 95.000,
- 'Pazzi': 0.000,
- 'Peruzzi': 4.000,
- 'Ridolfi': 21.333,
- 'Salviati': 26.000,
- 'Strozzi': 19.333,
- 'Tornabuoni': 16.333}
+ G = self.F
+ c = nx.load_centrality(G, normalized=False)
+
+ d = {'Acciaiuoli': 0.000,
+ 'Albizzi': 38.333,
+ 'Barbadori': 17.000,
+ 'Bischeri': 19.000,
+ 'Castellani': 10.000,
+ 'Ginori': 0.000,
+ 'Guadagni': 45.667,
+ 'Lamberteschi': 0.000,
+ 'Medici': 95.000,
+ 'Pazzi': 0.000,
+ 'Peruzzi': 4.000,
+ 'Ridolfi': 21.333,
+ 'Salviati': 26.000,
+ 'Strozzi': 19.333,
+ 'Tornabuoni': 16.333}
for n in sorted(G):
- assert_almost_equal(c[n],d[n],places=3)
-
+ assert_almost_equal(c[n], d[n], places=3)
def test_load_betweenness_difference(self):
# Difference Between Load and Betweenness
@@ -192,7 +187,7 @@ class TestLoadCentrality:
# Graph A and B are from Tao Zhou, Jian-Guo Liu, Bing-Hong
# Wang: Comment on "Scientific collaboration
# networks. II. Shortest paths, weighted networks, and
- # centrality". http://arxiv.org/pdf/physics/0511084
+ # centrality". https://arxiv.org/pdf/physics/0511084
# Notice that unlike here, their calculation adds to 1 to the
# betweennes of every node i for every path from i to every
@@ -212,62 +207,61 @@ class TestLoadCentrality:
# A.add_edges_from([(0,1), (1,2), (1,3), (2,4),
# (3,5), (4,6), (4,7), (4,8),
# (5,8), (6,9), (7,9), (8,9)])
- B = nx.Graph() # ladder_graph(3)
- B.add_edges_from([(0,1), (0,2), (1,3), (2,3), (2,4), (4,5), (3,5)])
- c = nx.load_centrality(B,normalized=False)
- d={0: 1.750,
- 1: 1.750,
- 2: 6.500,
- 3: 6.500,
- 4: 1.750,
- 5: 1.750}
+ B = nx.Graph() # ladder_graph(3)
+ B.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
+ c = nx.load_centrality(B, normalized=False)
+ d = {0: 1.750,
+ 1: 1.750,
+ 2: 6.500,
+ 3: 6.500,
+ 4: 1.750,
+ 5: 1.750}
for n in sorted(B):
- assert_almost_equal(c[n],d[n],places=3)
-
+ assert_almost_equal(c[n], d[n], places=3)
def test_c4_edge_load(self):
- G=self.C4
+ G = self.C4
c = nx.edge_load_centrality(G)
- d={(0, 1): 6.000,
- (0, 3): 6.000,
- (1, 2): 6.000,
- (2, 3): 6.000}
+ d = {(0, 1): 6.000,
+ (0, 3): 6.000,
+ (1, 2): 6.000,
+ (2, 3): 6.000}
for n in G.edges():
- assert_almost_equal(c[n],d[n],places=3)
+ assert_almost_equal(c[n], d[n], places=3)
def test_p4_edge_load(self):
- G=self.P4
+ G = self.P4
c = nx.edge_load_centrality(G)
- d={(0, 1): 6.000,
- (1, 2): 8.000,
- (2, 3): 6.000}
+ d = {(0, 1): 6.000,
+ (1, 2): 8.000,
+ (2, 3): 6.000}
for n in G.edges():
- assert_almost_equal(c[n],d[n],places=3)
+ assert_almost_equal(c[n], d[n], places=3)
def test_k5_edge_load(self):
- G=self.K5
+ G = self.K5
c = nx.edge_load_centrality(G)
- d={(0, 1): 5.000,
- (0, 2): 5.000,
- (0, 3): 5.000,
- (0, 4): 5.000,
- (1, 2): 5.000,
- (1, 3): 5.000,
- (1, 4): 5.000,
- (2, 3): 5.000,
- (2, 4): 5.000,
- (3, 4): 5.000}
+ d = {(0, 1): 5.000,
+ (0, 2): 5.000,
+ (0, 3): 5.000,
+ (0, 4): 5.000,
+ (1, 2): 5.000,
+ (1, 3): 5.000,
+ (1, 4): 5.000,
+ (2, 3): 5.000,
+ (2, 4): 5.000,
+ (3, 4): 5.000}
for n in G.edges():
- assert_almost_equal(c[n],d[n],places=3)
+ assert_almost_equal(c[n], d[n], places=3)
def test_tree_edge_load(self):
- G=self.T
+ G = self.T
c = nx.edge_load_centrality(G)
- d={(0, 1): 24.000,
- (0, 2): 24.000,
- (1, 3): 12.000,
- (1, 4): 12.000,
- (2, 5): 12.000,
- (2, 6): 12.000}
+ d = {(0, 1): 24.000,
+ (0, 2): 24.000,
+ (1, 3): 12.000,
+ (1, 4): 12.000,
+ (2, 5): 12.000,
+ (2, 6): 12.000}
for n in G.edges():
- assert_almost_equal(c[n],d[n],places=3)
+ assert_almost_equal(c[n], d[n], places=3)
diff --git a/networkx/algorithms/chordal.py b/networkx/algorithms/chordal.py
index 7f06e614..a561f67e 100644
--- a/networkx/algorithms/chordal.py
+++ b/networkx/algorithms/chordal.py
@@ -4,7 +4,7 @@ Algorithms for chordal graphs.
A graph is chordal if every cycle of length at least 4 has a chord
(an edge joining two nodes not adjacent in the cycle).
-http://en.wikipedia.org/wiki/Chordal_graph
+https://en.wikipedia.org/wiki/Chordal_graph
"""
import networkx as nx
import random
@@ -235,7 +235,7 @@ def chordal_graph_treewidth(G):
References
----------
- .. [1] http://en.wikipedia.org/wiki/Tree_decomposition#Treewidth
+ .. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth
"""
if not is_chordal(G):
raise nx.NetworkXError("Input graph is not chordal.")
diff --git a/networkx/algorithms/cluster.py b/networkx/algorithms/cluster.py
index 78de86bf..f446dc2c 100644
--- a/networkx/algorithms/cluster.py
+++ b/networkx/algorithms/cluster.py
@@ -175,7 +175,7 @@ def average_clustering(G, nodes=None, weight=None, count_zeros=True):
http://jponnela.com/web_documents/a9.pdf
.. [2] Marcus Kaiser, Mean clustering coefficients: the role of isolated
nodes and leafs on clustering measures for small-world networks.
- http://arxiv.org/abs/0802.2512
+ https://arxiv.org/abs/0802.2512
"""
c = clustering(G, nodes, weight=weight).values()
if not count_zeros:
diff --git a/networkx/algorithms/communicability_alg.py b/networkx/algorithms/communicability_alg.py
index 0b684b61..1650d6f5 100644
--- a/networkx/algorithms/communicability_alg.py
+++ b/networkx/algorithms/communicability_alg.py
@@ -69,7 +69,7 @@ def communicability(G):
.. [1] Ernesto Estrada, Naomichi Hatano,
"Communicability in complex networks",
Phys. Rev. E 77, 036111 (2008).
- http://arxiv.org/abs/0707.0756
+ https://arxiv.org/abs/0707.0756
Examples
--------
@@ -147,7 +147,7 @@ def communicability_exp(G):
.. [1] Ernesto Estrada, Naomichi Hatano,
"Communicability in complex networks",
Phys. Rev. E 77, 036111 (2008).
- http://arxiv.org/abs/0707.0756
+ https://arxiv.org/abs/0707.0756
Examples
--------
diff --git a/networkx/algorithms/community/quality.py b/networkx/algorithms/community/quality.py
index 6da462ac..7de690af 100644
--- a/networkx/algorithms/community/quality.py
+++ b/networkx/algorithms/community/quality.py
@@ -181,7 +181,7 @@ def performance(G, partition):
.. [1] Santo Fortunato.
"Community Detection in Graphs".
*Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
- <http://arxiv.org/abs/0906.0612>
+ <https://arxiv.org/abs/0906.0612>
"""
# Compute the number of intra-community edges and inter-community
@@ -236,7 +236,7 @@ def coverage(G, partition):
.. [1] Santo Fortunato.
"Community Detection in Graphs".
*Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
- <http://arxiv.org/abs/0906.0612>
+ <https://arxiv.org/abs/0906.0612>
"""
intra_edges = intra_community_edges(G, partition)
diff --git a/networkx/algorithms/connectivity/edge_kcomponents.py b/networkx/algorithms/connectivity/edge_kcomponents.py
index e6b01b66..b382cb93 100644
--- a/networkx/algorithms/connectivity/edge_kcomponents.py
+++ b/networkx/algorithms/connectivity/edge_kcomponents.py
@@ -90,7 +90,7 @@ def k_edge_components(G, k):
References
----------
- .. [1] https://en.wikipedia.org/wiki/Bridge_(graph_theory)
+ .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29
.. [2] Wang, Tianhao, et al. (2015) A simple algorithm for finding all
k-edge-connected components.
http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264
diff --git a/networkx/algorithms/connectivity/kcomponents.py b/networkx/algorithms/connectivity/kcomponents.py
index 2b5de33d..1aef9808 100644
--- a/networkx/algorithms/connectivity/kcomponents.py
+++ b/networkx/algorithms/connectivity/kcomponents.py
@@ -100,7 +100,7 @@ def k_components(G, flow_func=None):
.. [3] Torrents, J. and F. Ferraro (2015). Structural Cohesion:
Visualization and Heuristics for Fast Computation.
- http://arxiv.org/pdf/1503.04476v1
+ https://arxiv.org/pdf/1503.04476v1
"""
# Dictionary with connectivity level (k) as keys and a list of
@@ -111,7 +111,7 @@ def k_components(G, flow_func=None):
if flow_func is None:
flow_func = default_flow_func
# Bicomponents as a base to check for higher order k-components
- for component in nx.connected_components(G):
+ for component in nx.connected_components(G):
# isolated nodes have connectivity 0
comp = set(component)
if len(comp) > 1:
@@ -194,21 +194,21 @@ def _generate_partition(G, cuts, k):
component.add(node)
if len(component) < G.order():
components.append(component)
- for component in _consolidate(components, k+1):
+ for component in _consolidate(components, k + 1):
yield component
def _reconstruct_k_components(k_comps):
result = dict()
max_k = max(k_comps)
- for k in reversed(range(1, max_k+1)):
+ for k in reversed(range(1, max_k + 1)):
if k == max_k:
result[k] = list(_consolidate(k_comps[k], k))
elif k not in k_comps:
- result[k] = list(_consolidate(result[k+1], k))
+ result[k] = list(_consolidate(result[k + 1], k))
else:
nodes_at_k = set.union(*k_comps[k])
- to_add = [c for c in result[k+1] if any(n not in nodes_at_k for n in c)]
+ to_add = [c for c in result[k + 1] if any(n not in nodes_at_k for n in c)]
if to_add:
result[k] = list(_consolidate(k_comps[k] + to_add, k))
else:
diff --git a/networkx/algorithms/connectivity/tests/test_kcomponents.py b/networkx/algorithms/connectivity/tests/test_kcomponents.py
index 764c6419..708d7b83 100644
--- a/networkx/algorithms/connectivity/tests/test_kcomponents.py
+++ b/networkx/algorithms/connectivity/tests/test_kcomponents.py
@@ -12,7 +12,7 @@ from networkx.algorithms.connectivity.kcomponents import (
def torrents_and_ferraro_graph():
- # Graph from http://arxiv.org/pdf/1503.04476v1 p.26
+ # Graph from https://arxiv.org/pdf/1503.04476v1 p.26
G = nx.convert_node_labels_to_integers(
nx.grid_graph([5, 5]),
label_attribute='labels',
diff --git a/networkx/algorithms/core.py b/networkx/algorithms/core.py
index 18300675..73f017f7 100644
--- a/networkx/algorithms/core.py
+++ b/networkx/algorithms/core.py
@@ -17,11 +17,11 @@ See the following references for details:
An O(m) Algorithm for Cores Decomposition of Networks
Vladimir Batagelj and Matjaz Zaversnik, 2003.
-http://arxiv.org/abs/cs.DS/0310049
+https://arxiv.org/abs/cs.DS/0310049
Generalized Cores
Vladimir Batagelj and Matjaz Zaversnik, 2002.
-http://arxiv.org/pdf/cs/0202039
+https://arxiv.org/pdf/cs/0202039
For directed graphs a more general notion is that of D-cores which
looks at (k, l) restrictions on (in, out) degree. The (k, k) D-core
@@ -75,7 +75,7 @@ def core_number(G):
----------
.. [1] An O(m) Algorithm for Cores Decomposition of Networks
Vladimir Batagelj and Matjaz Zaversnik, 2003.
- http://arxiv.org/abs/cs.DS/0310049
+ https://arxiv.org/abs/cs.DS/0310049
"""
if nx.number_of_selfloops(G) > 0:
msg = ('Input graph has self loops which is not permitted; '
@@ -88,7 +88,7 @@ def core_number(G):
curr_degree = 0
for i, v in enumerate(nodes):
if degrees[v] > curr_degree:
- bin_boundaries.extend([i] * (degrees[v]-curr_degree))
+ bin_boundaries.extend([i] * (degrees[v] - curr_degree))
curr_degree = degrees[v]
node_pos = {v: pos for pos, v in enumerate(nodes)}
# The initial guess for the core number of a node is its degree.
@@ -181,7 +181,7 @@ def k_core(G, k=None, core_number=None):
----------
.. [1] An O(m) Algorithm for Cores Decomposition of Networks
Vladimir Batagelj and Matjaz Zaversnik, 2003.
- http://arxiv.org/abs/cs.DS/0310049
+ https://arxiv.org/abs/cs.DS/0310049
"""
def k_filter(v, k, c):
return c[v] >= k
diff --git a/networkx/algorithms/dominating.py b/networkx/algorithms/dominating.py
index a10b5652..4289f894 100644
--- a/networkx/algorithms/dominating.py
+++ b/networkx/algorithms/dominating.py
@@ -39,7 +39,7 @@ def dominating_set(G, start_with=None):
References
----------
- .. [1] http://en.wikipedia.org/wiki/Dominating_set
+ .. [1] https://en.wikipedia.org/wiki/Dominating_set
.. [2] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
@@ -86,7 +86,7 @@ def is_dominating_set(G, nbunch):
References
----------
- .. [1] http://en.wikipedia.org/wiki/Dominating_set
+ .. [1] https://en.wikipedia.org/wiki/Dominating_set
"""
testset = set(n for n in nbunch if n in G)
diff --git a/networkx/algorithms/euler.py b/networkx/algorithms/euler.py
index eb552779..4a49b50c 100644
--- a/networkx/algorithms/euler.py
+++ b/networkx/algorithms/euler.py
@@ -146,7 +146,7 @@ def eulerian_circuit(G, source=None, keys=False):
.. [1] J. Edmonds, E. L. Johnson.
Matching, Euler tours and the Chinese postman.
Mathematical programming, Volume 5, Issue 1 (1973), 111-114.
- .. [2] http://en.wikipedia.org/wiki/Eulerian_path
+ .. [2] https://en.wikipedia.org/wiki/Eulerian_path
Examples
--------
diff --git a/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/networkx/algorithms/isomorphism/temporalisomorphvf2.py
index 39ea1e47..23228cb5 100644
--- a/networkx/algorithms/isomorphism/temporalisomorphvf2.py
+++ b/networkx/algorithms/isomorphism/temporalisomorphvf2.py
@@ -4,34 +4,34 @@
Time-respecting VF2 Algorithm
*****************************
-An extension of the VF2 algorithm for time-respecting graph ismorphism
+An extension of the VF2 algorithm for time-respecting graph ismorphism
testing in temporal graphs.
-A temporal graph is one in which edges contain a datetime attribute,
-denoting when interaction occurred between the incident nodes. A
-time-respecting subgraph of a temporal graph is a subgraph such that
-all interactions incident to a node occurred within a time threshold,
-delta, of each other. A directed time-respecting subgraph has the
-added constraint that incoming interactions to a node must precede
-outgoing interactions from the same node - this enforces a sense of
+A temporal graph is one in which edges contain a datetime attribute,
+denoting when interaction occurred between the incident nodes. A
+time-respecting subgraph of a temporal graph is a subgraph such that
+all interactions incident to a node occurred within a time threshold,
+delta, of each other. A directed time-respecting subgraph has the
+added constraint that incoming interactions to a node must precede
+outgoing interactions from the same node - this enforces a sense of
directed flow.
Introduction
------------
-The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
-extend the GraphMatcher and DiGraphMatcher classes, respectively,
-to include temporal constraints on matches. This is achieved through
+The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
+extend the GraphMatcher and DiGraphMatcher classes, respectively,
+to include temporal constraints on matches. This is achieved through
a semantic check, via the semantic_feasibility() function.
-As well as including G1 (the graph in which to seek embeddings) and
-G2 (the subgraph structure of interest), the name of the temporal
-attribute on the edges and the time threshold, delta, must be supplied
+As well as including G1 (the graph in which to seek embeddings) and
+G2 (the subgraph structure of interest), the name of the temporal
+attribute on the edges and the time threshold, delta, must be supplied
as arguments to the matching constructors.
-A delta of zero is the strictest temporal constraint on the match -
-only embeddings in which all interactions occur at the same time will
-be returned. A delta of one day will allow embeddings in which
+A delta of zero is the strictest temporal constraint on the match -
+only embeddings in which all interactions occur at the same time will
+be returned. A delta of one day will allow embeddings in which
adjacent interactions occur up to a day apart.
Examples
@@ -43,20 +43,20 @@ Examples will be provided when the datetime type has been incorporated.
Temporal Subgraph Isomorphism
-----------------------------
-A brief discussion of the somewhat diverse current literature will be
+A brief discussion of the somewhat diverse current literature will be
included here.
References
----------
-[1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
-The 2013 IEEE/ACM International Conference on Advances in Social
-Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
+[1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
+The 2013 IEEE/ACM International Conference on Advances in Social
+Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
pages 1451 - 1452. [65]
For a discussion of the literature on temporal networks:
-[3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
+[3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
519(3):97–125, 2012.
Notes
@@ -84,7 +84,7 @@ class TimeRespectingGraphMatcher(GraphMatcher):
Examples
--------
- To create a TimeRespectingGraphMatcher which checks for
+ To create a TimeRespectingGraphMatcher which checks for
syntactic and semantic feasibility:
>>> from networkx.algorithms import isomorphism
@@ -97,23 +97,23 @@ class TimeRespectingGraphMatcher(GraphMatcher):
self.temporal_attribute_name = temporal_attribute_name
self.delta = delta
super(TimeRespectingGraphMatcher, self).__init__(G1, G2)
-
+
def one_hop(self, Gx, Gx_node, neighbors):
"""
- Edges one hop out from a node in the mapping should be
+ Edges one hop out from a node in the mapping should be
time-respecting with respect to each other.
"""
dates = []
for n in neighbors:
- if type(Gx) == type(nx.Graph()): # Graph G[u][v] returns the data dictionary.
+ if type(Gx) == type(nx.Graph()): # Graph G[u][v] returns the data dictionary.
dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
- else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
- for edge in Gx[Gx_node][n].values(): # Iterates all edges between node pair.
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+ for edge in Gx[Gx_node][n].values(): # Iterates all edges between node pair.
dates.append(edge[self.temporal_attribute_name])
if any(x is None for x in dates):
raise ValueError('Datetime not supplied for at least one edge.')
return not dates or max(dates) - min(dates) <= self.delta
-
+
def two_hop(self, Gx, core_x, Gx_node, neighbors):
"""
Paths of length 2 from Gx_node should be time-respecting.
@@ -121,15 +121,15 @@ class TimeRespectingGraphMatcher(GraphMatcher):
return all(self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node]) for v in neighbors)
def semantic_feasibility(self, G1_node, G2_node):
- """Returns True if adding (G1_node, G2_node) is semantically
+ """Returns True if adding (G1_node, G2_node) is semantically
feasible.
- Any subclass which redefines semantic_feasibility() must
- maintain the self.tests if needed, to keep the match() method
+ Any subclass which redefines semantic_feasibility() must
+ maintain the self.tests if needed, to keep the match() method
functional. Implementations should consider multigraphs.
"""
neighbors = [n for n in self.G1[G1_node] if n in self.core_1]
- if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node.
+ if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node.
return False
if not self.two_hop(self.G1, self.core_1, G1_node, neighbors):
return False
@@ -146,7 +146,7 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
Examples
--------
- To create a TimeRespectingDiGraphMatcher which checks for
+ To create a TimeRespectingDiGraphMatcher which checks for
syntactic and semantic feasibility:
>>> from networkx.algorithms import isomorphism
@@ -159,32 +159,32 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
self.temporal_attribute_name = temporal_attribute_name
self.delta = delta
super(TimeRespectingDiGraphMatcher, self).__init__(G1, G2)
-
+
def get_pred_dates(self, Gx, Gx_node, core_x, pred):
"""
Get the dates of edges from predecessors.
"""
pred_dates = []
- if type(Gx) == type(nx.DiGraph()): # Graph G[u][v] returns the data dictionary.
+ if type(Gx) == type(nx.DiGraph()): # Graph G[u][v] returns the data dictionary.
for n in pred:
pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name])
- else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
for n in pred:
- for edge in Gx[n][Gx_node].values(): # Iterates all edge data between node pair.
+ for edge in Gx[n][Gx_node].values(): # Iterates all edge data between node pair.
pred_dates.append(edge[self.temporal_attribute_name])
return pred_dates
-
+
def get_succ_dates(self, Gx, Gx_node, core_x, succ):
"""
Get the dates of edges to successors.
"""
succ_dates = []
- if type(Gx) == type(nx.DiGraph()): # Graph G[u][v] returns the data dictionary.
+ if type(Gx) == type(nx.DiGraph()): # Graph G[u][v] returns the data dictionary.
for n in succ:
succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
- else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
for n in succ:
- for edge in Gx[Gx_node][n].values(): # Iterates all edge data between node pair.
+ for edge in Gx[Gx_node][n].values(): # Iterates all edge data between node pair.
succ_dates.append(edge[self.temporal_attribute_name])
return succ_dates
@@ -195,7 +195,7 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred)
succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ)
return self.test_one(pred_dates, succ_dates) and self.test_two(pred_dates, succ_dates)
-
+
def two_hop_pred(self, Gx, Gx_node, core_x, pred):
"""
The predeccessors of the ego node.
@@ -207,39 +207,39 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
The successors of the ego node.
"""
return all(self.one_hop(Gx, s, core_x, self.preds(Gx, core_x, s, Gx_node), self.succs(Gx, core_x, s)) for s in succ)
-
+
def preds(self, Gx, core_x, v, Gx_node=None):
pred = [n for n in Gx.predecessors(v) if n in core_x]
if Gx_node:
pred.append(Gx_node)
return pred
-
+
def succs(self, Gx, core_x, v, Gx_node=None):
succ = [n for n in Gx.successors(v) if n in core_x]
if Gx_node:
succ.append(Gx_node)
return succ
-
+
def test_one(self, pred_dates, succ_dates):
"""
- Edges one hop out from Gx_node in the mapping should be
- time-respecting with respect to each other, regardless of
+ Edges one hop out from Gx_node in the mapping should be
+ time-respecting with respect to each other, regardless of
direction.
"""
time_respecting = True
dates = pred_dates + succ_dates
-
+
if any(x is None for x in dates):
raise ValueError('Date or datetime not supplied for at least one edge.')
-
- dates.sort() # Small to large.
+
+ dates.sort() # Small to large.
if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta):
time_respecting = False
return time_respecting
-
+
def test_two(self, pred_dates, succ_dates):
"""
- Edges from a dual Gx_node in the mapping should be ordered in
+ Edges from a dual Gx_node in the mapping should be ordered in
a time-respecting manner.
"""
time_respecting = True
@@ -251,15 +251,15 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
return time_respecting
def semantic_feasibility(self, G1_node, G2_node):
- """Returns True if adding (G1_node, G2_node) is semantically
+ """Returns True if adding (G1_node, G2_node) is semantically
feasible.
- Any subclass which redefines semantic_feasibility() must
- maintain the self.tests if needed, to keep the match() method
+ Any subclass which redefines semantic_feasibility() must
+ maintain the self.tests if needed, to keep the match() method
functional. Implementations should consider multigraphs.
"""
pred, succ = [n for n in self.G1.predecessors(G1_node) if n in self.core_1], [n for n in self.G1.successors(G1_node) if n in self.core_1]
- if not self.one_hop(self.G1, G1_node, self.core_1, pred, succ): # Fail fast on first node.
+ if not self.one_hop(self.G1, G1_node, self.core_1, pred, succ): # Fail fast on first node.
return False
if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
return False
@@ -267,4 +267,3 @@ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
return False
# Otherwise, this node is sematically feasible!
return True
-
diff --git a/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py b/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
index 93c0d3dd..37407c1c 100644
--- a/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
+++ b/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
@@ -11,46 +11,48 @@ from nose import SkipTest
import networkx as nx
from networkx.algorithms import isomorphism as iso
+
class TestWikipediaExample(object):
- # Source: http://en.wikipedia.org/wiki/Graph_isomorphism
+ # Source: https://en.wikipedia.org/wiki/Graph_isomorphism
# Nodes 'a', 'b', 'c' and 'd' form a column.
# Nodes 'g', 'h', 'i' and 'j' form a column.
- g1edges = [['a','g'], ['a','h'], ['a','i'],
- ['b','g'], ['b','h'], ['b','j'],
- ['c','g'], ['c','i'], ['c','j'],
- ['d','h'], ['d','i'], ['d','j']]
+ g1edges = [['a', 'g'], ['a', 'h'], ['a', 'i'],
+ ['b', 'g'], ['b', 'h'], ['b', 'j'],
+ ['c', 'g'], ['c', 'i'], ['c', 'j'],
+ ['d', 'h'], ['d', 'i'], ['d', 'j']]
# Nodes 1,2,3,4 form the clockwise corners of a large square.
- # Nodes 5,6,7,8 form the clockwise corners of a small square
- g2edges = [[1,2], [2,3], [3,4], [4,1],
- [5,6], [6,7], [7,8], [8,5],
- [1,5], [2,6], [3,7], [4,8]]
+ # Nodes 5,6,7,8 form the clockwise corners of a small square
+ g2edges = [[1, 2], [2, 3], [3, 4], [4, 1],
+ [5, 6], [6, 7], [7, 8], [8, 5],
+ [1, 5], [2, 6], [3, 7], [4, 8]]
def test_graph(self):
g1 = nx.Graph()
g2 = nx.Graph()
g1.add_edges_from(self.g1edges)
g2.add_edges_from(self.g2edges)
- gm = iso.GraphMatcher(g1,g2)
+ gm = iso.GraphMatcher(g1, g2)
assert_true(gm.is_isomorphic())
mapping = sorted(gm.mapping.items())
-# this mapping is only one of the possibilies
-# so this test needs to be reconsidered
-# isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8),
+# this mapping is only one of the possibilies
+# so this test needs to be reconsidered
+# isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8),
# ('g', 2), ('h', 5), ('i', 4), ('j', 7)]
# assert_equal(mapping, isomap)
-
+
def test_subgraph(self):
g1 = nx.Graph()
g2 = nx.Graph()
g1.add_edges_from(self.g1edges)
g2.add_edges_from(self.g2edges)
- g3 = g2.subgraph([1,2,3,4])
- gm = iso.GraphMatcher(g1,g3)
+ g3 = g2.subgraph([1, 2, 3, 4])
+ gm = iso.GraphMatcher(g1, g3)
assert_true(gm.subgraph_is_isomorphic())
+
class TestVF2GraphDB(object):
# http://amalfi.dis.unina.it/graph/db/
@@ -86,35 +88,36 @@ class TestVF2GraphDB(object):
return graph
def test_graph(self):
- head,tail = os.path.split(__file__)
- g1 = self.create_graph(os.path.join(head,'iso_r01_s80.A99'))
- g2 = self.create_graph(os.path.join(head,'iso_r01_s80.B99'))
- gm = iso.GraphMatcher(g1,g2)
+ head, tail = os.path.split(__file__)
+ g1 = self.create_graph(os.path.join(head, 'iso_r01_s80.A99'))
+ g2 = self.create_graph(os.path.join(head, 'iso_r01_s80.B99'))
+ gm = iso.GraphMatcher(g1, g2)
assert_true(gm.is_isomorphic())
def test_subgraph(self):
# A is the subgraph
# B is the full graph
- head,tail = os.path.split(__file__)
- subgraph = self.create_graph(os.path.join(head,'si2_b06_m200.A99'))
- graph = self.create_graph(os.path.join(head,'si2_b06_m200.B99'))
+ head, tail = os.path.split(__file__)
+ subgraph = self.create_graph(os.path.join(head, 'si2_b06_m200.A99'))
+ graph = self.create_graph(os.path.join(head, 'si2_b06_m200.B99'))
gm = iso.GraphMatcher(graph, subgraph)
assert_true(gm.subgraph_is_isomorphic())
+
class TestAtlas(object):
@classmethod
def setupClass(cls):
global atlas
import platform
- if platform.python_implementation()=='Jython':
+ if platform.python_implementation() == 'Jython':
raise SkipTest('graph atlas not available under Jython.')
import networkx.generators.atlas as atlas
-
+
def setUp(self):
- self.GAG=atlas.graph_atlas_g()
+ self.GAG = atlas.graph_atlas_g()
def test_graph_atlas(self):
- #Atlas = nx.graph_atlas_g()[0:208] # 208, 6 nodes or less
+ # Atlas = nx.graph_atlas_g()[0:208] # 208, 6 nodes or less
Atlas = self.GAG[0:100]
alphabet = list(range(26))
for graph in Atlas:
@@ -122,20 +125,21 @@ class TestAtlas(object):
labels = alphabet[:len(nlist)]
for s in range(10):
random.shuffle(labels)
- d = dict(zip(nlist,labels))
+ d = dict(zip(nlist, labels))
relabel = nx.relabel_nodes(graph, d)
gm = iso.GraphMatcher(graph, relabel)
assert_true(gm.is_isomorphic())
+
def test_multiedge():
# Simple test for multigraphs
# Need something much more rigorous
- edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5),
- (5, 6), (6, 7), (7, 8), (8, 9), (9, 10),
- (10, 11), (10, 11), (11, 12), (11, 12),
- (12, 13), (12, 13), (13, 14), (13, 14),
- (14, 15), (14, 15), (15, 16), (15, 16),
- (16, 17), (16, 17), (17, 18), (17, 18),
+ edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5),
+ (5, 6), (6, 7), (7, 8), (8, 9), (9, 10),
+ (10, 11), (10, 11), (11, 12), (11, 12),
+ (12, 13), (12, 13), (13, 14), (13, 14),
+ (14, 15), (14, 15), (15, 16), (15, 16),
+ (16, 17), (16, 17), (17, 18), (17, 18),
(18, 19), (18, 19), (19, 0), (19, 0)]
nodes = list(range(20))
@@ -147,14 +151,15 @@ def test_multiedge():
d = dict(zip(nodes, new_nodes))
g2 = nx.relabel_nodes(g1, d)
if not g1.is_directed():
- gm = iso.GraphMatcher(g1,g2)
+ gm = iso.GraphMatcher(g1, g2)
else:
- gm = iso.DiGraphMatcher(g1,g2)
+ gm = iso.DiGraphMatcher(g1, g2)
assert_true(gm.is_isomorphic())
+
def test_selfloop():
# Simple test for graphs with selfloops
- edges = [(0, 1), (0, 2), (1, 2), (1, 3), (2, 2),
+ edges = [(0, 1), (0, 2), (1, 2), (1, 3), (2, 2),
(2, 4), (3, 1), (3, 2), (4, 2), (4, 5), (5, 4)]
nodes = list(range(6))
@@ -166,59 +171,62 @@ def test_selfloop():
d = dict(zip(nodes, new_nodes))
g2 = nx.relabel_nodes(g1, d)
if not g1.is_directed():
- gm = iso.GraphMatcher(g1,g2)
+ gm = iso.GraphMatcher(g1, g2)
else:
- gm = iso.DiGraphMatcher(g1,g2)
+ gm = iso.DiGraphMatcher(g1, g2)
assert_true(gm.is_isomorphic())
+
def test_isomorphism_iter1():
# As described in:
# http://groups.google.com/group/networkx-discuss/browse_thread/thread/2ff65c67f5e3b99f/d674544ebea359bb?fwc=1
g1 = nx.DiGraph()
g2 = nx.DiGraph()
g3 = nx.DiGraph()
- g1.add_edge('A','B')
- g1.add_edge('B','C')
- g2.add_edge('Y','Z')
- g3.add_edge('Z','Y')
- gm12 = iso.DiGraphMatcher(g1,g2)
- gm13 = iso.DiGraphMatcher(g1,g3)
+ g1.add_edge('A', 'B')
+ g1.add_edge('B', 'C')
+ g2.add_edge('Y', 'Z')
+ g3.add_edge('Z', 'Y')
+ gm12 = iso.DiGraphMatcher(g1, g2)
+ gm13 = iso.DiGraphMatcher(g1, g3)
x = list(gm12.subgraph_isomorphisms_iter())
y = list(gm13.subgraph_isomorphisms_iter())
- assert_true({'A':'Y','B':'Z'} in x)
- assert_true({'B':'Y','C':'Z'} in x)
- assert_true({'A':'Z','B':'Y'} in y)
- assert_true({'B':'Z','C':'Y'} in y)
- assert_equal(len(x),len(y))
- assert_equal(len(x),2)
+ assert_true({'A': 'Y', 'B': 'Z'} in x)
+ assert_true({'B': 'Y', 'C': 'Z'} in x)
+ assert_true({'A': 'Z', 'B': 'Y'} in y)
+ assert_true({'B': 'Z', 'C': 'Y'} in y)
+ assert_equal(len(x), len(y))
+ assert_equal(len(x), 2)
+
def test_isomorphism_iter2():
# Path
- for L in range(2,10):
+ for L in range(2, 10):
g1 = nx.path_graph(L)
- gm = iso.GraphMatcher(g1,g1)
+ gm = iso.GraphMatcher(g1, g1)
s = len(list(gm.isomorphisms_iter()))
- assert_equal(s,2)
+ assert_equal(s, 2)
# Cycle
- for L in range(3,10):
+ for L in range(3, 10):
g1 = nx.cycle_graph(L)
- gm = iso.GraphMatcher(g1,g1)
+ gm = iso.GraphMatcher(g1, g1)
s = len(list(gm.isomorphisms_iter()))
- assert_equal(s, 2*L)
+ assert_equal(s, 2 * L)
+
def test_multiple():
# Verify that we can use the graph matcher multiple times
- edges = [('A','B'),('B','A'),('B','C')]
- for g1,g2 in [(nx.Graph(),nx.Graph()), (nx.DiGraph(),nx.DiGraph())]:
+ edges = [('A', 'B'), ('B', 'A'), ('B', 'C')]
+ for g1, g2 in [(nx.Graph(), nx.Graph()), (nx.DiGraph(), nx.DiGraph())]:
g1.add_edges_from(edges)
g2.add_edges_from(edges)
- g3 = nx.subgraph(g2, ['A','B'])
+ g3 = nx.subgraph(g2, ['A', 'B'])
if not g1.is_directed():
- gmA = iso.GraphMatcher(g1,g2)
- gmB = iso.GraphMatcher(g1,g3)
+ gmA = iso.GraphMatcher(g1, g2)
+ gmB = iso.GraphMatcher(g1, g3)
else:
- gmA = iso.DiGraphMatcher(g1,g2)
- gmB = iso.DiGraphMatcher(g1,g3)
+ gmA = iso.DiGraphMatcher(g1, g2)
+ gmB = iso.DiGraphMatcher(g1, g3)
assert_true(gmA.is_isomorphic())
g2.remove_node('C')
assert_true(gmA.subgraph_is_isomorphic())
@@ -227,4 +235,3 @@ def test_multiple():
# assert_true(m['A'] == 'A')
# assert_true(m['B'] == 'B')
# assert_true('C' not in m)
-
diff --git a/networkx/algorithms/link_analysis/hits_alg.py b/networkx/algorithms/link_analysis/hits_alg.py
index 2ca56bea..ea837234 100644
--- a/networkx/algorithms/link_analysis/hits_alg.py
+++ b/networkx/algorithms/link_analysis/hits_alg.py
@@ -10,9 +10,10 @@
import networkx as nx
from networkx.exception import NetworkXError
__author__ = """Aric Hagberg (hagberg@lanl.gov)"""
-__all__ = ['hits','hits_numpy','hits_scipy','authority_matrix','hub_matrix']
+__all__ = ['hits', 'hits_numpy', 'hits_scipy', 'authority_matrix', 'hub_matrix']
-def hits(G,max_iter=100,tol=1.0e-8,nstart=None,normalized=True):
+
+def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
"""Return HITS hubs and authorities values for nodes.
The HITS algorithm computes two numbers for a node.
@@ -79,61 +80,66 @@ def hits(G,max_iter=100,tol=1.0e-8,nstart=None,normalized=True):
if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph:
raise Exception("hits() not defined for graphs with multiedges.")
if len(G) == 0:
- return {},{}
+ return {}, {}
# choose fixed starting vector if not given
if nstart is None:
- h=dict.fromkeys(G,1.0/G.number_of_nodes())
+ h = dict.fromkeys(G, 1.0 / G.number_of_nodes())
else:
- h=nstart
+ h = nstart
# normalize starting vector
- s=1.0/sum(h.values())
+ s = 1.0 / sum(h.values())
for k in h:
- h[k]*=s
+ h[k] *= s
for _ in range(max_iter): # power iteration: make up to max_iter iterations
- hlast=h
- h=dict.fromkeys(hlast.keys(),0)
- a=dict.fromkeys(hlast.keys(),0)
+ hlast = h
+ h = dict.fromkeys(hlast.keys(), 0)
+ a = dict.fromkeys(hlast.keys(), 0)
# this "matrix multiply" looks odd because it is
# doing a left multiply a^T=hlast^T*G
for n in h:
for nbr in G[n]:
- a[nbr]+=hlast[n]*G[n][nbr].get('weight',1)
+ a[nbr] += hlast[n] * G[n][nbr].get('weight', 1)
# now multiply h=Ga
for n in h:
for nbr in G[n]:
- h[n]+=a[nbr]*G[n][nbr].get('weight',1)
+ h[n] += a[nbr] * G[n][nbr].get('weight', 1)
# normalize vector
- s=1.0/max(h.values())
- for n in h: h[n]*=s
+ s = 1.0 / max(h.values())
+ for n in h:
+ h[n] *= s
# normalize vector
- s=1.0/max(a.values())
- for n in a: a[n]*=s
+ s = 1.0 / max(a.values())
+ for n in a:
+ a[n] *= s
# check convergence, l1 norm
- err=sum([abs(h[n]-hlast[n]) for n in h])
+ err = sum([abs(h[n] - hlast[n]) for n in h])
if err < tol:
break
else:
raise nx.PowerIterationFailedConvergence(max_iter)
if normalized:
- s = 1.0/sum(a.values())
+ s = 1.0 / sum(a.values())
for n in a:
a[n] *= s
- s = 1.0/sum(h.values())
+ s = 1.0 / sum(h.values())
for n in h:
h[n] *= s
- return h,a
+ return h, a
+
-def authority_matrix(G,nodelist=None):
+def authority_matrix(G, nodelist=None):
"""Return the HITS authority matrix."""
- M=nx.to_numpy_matrix(G,nodelist=nodelist)
- return M.T*M
+ M = nx.to_numpy_matrix(G, nodelist=nodelist)
+ return M.T * M
-def hub_matrix(G,nodelist=None):
+
+def hub_matrix(G, nodelist=None):
"""Return the HITS hub matrix."""
- M=nx.to_numpy_matrix(G,nodelist=nodelist)
- return M*M.T
+ M = nx.to_numpy_matrix(G, nodelist=nodelist)
+ return M * M.T
+
-def hits_numpy(G,normalized=True):
+def hits_numpy(G, normalized=True):
"""Return HITS hubs and authorities values for nodes.
The HITS algorithm computes two numbers for a node.
@@ -181,29 +187,30 @@ def hits_numpy(G,normalized=True):
try:
import numpy as np
except ImportError:
- raise ImportError(\
+ raise ImportError(
"hits_numpy() requires NumPy: http://scipy.org/")
if len(G) == 0:
- return {},{}
+ return {}, {}
H = nx.hub_matrix(G, list(G))
- e,ev=np.linalg.eig(H)
- m=e.argsort()[-1] # index of maximum eigenvalue
- h=np.array(ev[:,m]).flatten()
- A=nx.authority_matrix(G, list(G))
- e,ev=np.linalg.eig(A)
- m=e.argsort()[-1] # index of maximum eigenvalue
- a=np.array(ev[:,m]).flatten()
+ e, ev = np.linalg.eig(H)
+ m = e.argsort()[-1] # index of maximum eigenvalue
+ h = np.array(ev[:, m]).flatten()
+ A = nx.authority_matrix(G, list(G))
+ e, ev = np.linalg.eig(A)
+ m = e.argsort()[-1] # index of maximum eigenvalue
+ a = np.array(ev[:, m]).flatten()
if normalized:
- h = h/h.sum()
- a = a/a.sum()
+ h = h / h.sum()
+ a = a / a.sum()
else:
- h = h/h.max()
- a = a/a.max()
+ h = h / h.max()
+ a = a / a.max()
hubs = dict(zip(G, map(float, h)))
authorities = dict(zip(G, map(float, a)))
- return hubs,authorities
+ return hubs, authorities
+
-def hits_scipy(G,max_iter=100,tol=1.0e-6,normalized=True):
+def hits_scipy(G, max_iter=100, tol=1.0e-6, normalized=True):
"""Return HITS hubs and authorities values for nodes.
The HITS algorithm computes two numbers for a node.
@@ -273,39 +280,41 @@ def hits_scipy(G,max_iter=100,tol=1.0e-6,normalized=True):
import scipy.sparse
import numpy as np
except ImportError:
- raise ImportError(\
+ raise ImportError(
"hits_scipy() requires SciPy: http://scipy.org/")
if len(G) == 0:
- return {},{}
+ return {}, {}
M = nx.to_scipy_sparse_matrix(G, nodelist=list(G))
- (n,m)=M.shape # should be square
- A=M.T*M # authority matrix
- x=scipy.ones((n,1))/n # initial guess
+ (n, m) = M.shape # should be square
+ A = M.T * M # authority matrix
+ x = scipy.ones((n, 1)) / n # initial guess
# power iteration on authority matrix
- i=0
+ i = 0
while True:
- xlast=x
- x=A*x
- x=x/x.max()
+ xlast = x
+ x = A * x
+ x = x / x.max()
# check convergence, l1 norm
- err=scipy.absolute(x-xlast).sum()
+ err = scipy.absolute(x - xlast).sum()
if err < tol:
break
- if i>max_iter:
+ if i > max_iter:
raise nx.PowerIterationFailedConvergence(max_iter)
- i+=1
+ i += 1
- a=np.asarray(x).flatten()
+ a = np.asarray(x).flatten()
# h=M*a
- h=np.asarray(M*a).flatten()
+ h = np.asarray(M * a).flatten()
if normalized:
- h = h/h.sum()
- a = a/a.sum()
+ h = h / h.sum()
+ a = a / a.sum()
hubs = dict(zip(G, map(float, h)))
authorities = dict(zip(G, map(float, a)))
- return hubs,authorities
+ return hubs, authorities
# fixture for nose tests
+
+
def setup_module(module):
from nose import SkipTest
try:
diff --git a/networkx/algorithms/link_prediction.py b/networkx/algorithms/link_prediction.py
index 9ab8d82f..b3f89959 100644
--- a/networkx/algorithms/link_prediction.py
+++ b/networkx/algorithms/link_prediction.py
@@ -85,7 +85,7 @@ def resource_allocation_index(G, ebunch=None):
.. [1] T. Zhou, L. Lu, Y.-C. Zhang.
Predicting missing links via local information.
Eur. Phys. J. B 71 (2009) 623.
- http://arxiv.org/pdf/0901.0553.pdf
+ https://arxiv.org/pdf/0901.0553.pdf
"""
def predict(u, v):
return sum(1 / G.degree(w) for w in nx.common_neighbors(G, u, v))
diff --git a/networkx/algorithms/richclub.py b/networkx/algorithms/richclub.py
index afc93cd9..7bb219be 100644
--- a/networkx/algorithms/richclub.py
+++ b/networkx/algorithms/richclub.py
@@ -71,10 +71,10 @@ def rich_club_coefficient(G, normalized=True, Q=100):
and Tibério S. Caetano,
"The rich-club phenomenon across complex network hierarchies",
Applied Physics Letters Vol 91 Issue 8, August 2007.
- http://arxiv.org/abs/physics/0701290
+ https://arxiv.org/abs/physics/0701290
.. [2] R. Milo, N. Kashtan, S. Itzkovitz, M. E. J. Newman, U. Alon,
"Uniform generation of random graphs with arbitrary degree
- sequences", 2006. http://arxiv.org/abs/cond-mat/0312028
+ sequences", 2006. https://arxiv.org/abs/cond-mat/0312028
"""
if nx.number_of_selfloops(G) > 0:
raise Exception('rich_club_coefficient is not implemented for '
diff --git a/networkx/algorithms/smetric.py b/networkx/algorithms/smetric.py
index 1cdc3287..75bcfbd1 100644
--- a/networkx/algorithms/smetric.py
+++ b/networkx/algorithms/smetric.py
@@ -27,7 +27,7 @@ def s_metric(G, normalized=True):
.. [1] Lun Li, David Alderson, John C. Doyle, and Walter Willinger,
Towards a Theory of Scale-Free Graphs:
Definition, Properties, and Implications (Extended Version), 2005.
- http://arxiv.org/abs/cond-mat/0501169
+ https://arxiv.org/abs/cond-mat/0501169
"""
if normalized:
raise nx.NetworkXError("Normalization not implemented")
diff --git a/networkx/algorithms/tests/test_dominating.py b/networkx/algorithms/tests/test_dominating.py
index 294dfab5..7a49d162 100644
--- a/networkx/algorithms/tests/test_dominating.py
+++ b/networkx/algorithms/tests/test_dominating.py
@@ -37,7 +37,7 @@ def test_is_dominating_set():
def test_wikipedia_is_dominating_set():
- """Example from http://en.wikipedia.org/wiki/Dominating_set
+ """Example from https://en.wikipedia.org/wiki/Dominating_set
"""
G = nx.cycle_graph(4)
G.add_edges_from([(0, 4), (1, 4), (2, 5)])
diff --git a/networkx/algorithms/tree/tests/test_branchings.py b/networkx/algorithms/tree/tests/test_branchings.py
index b801c154..31215668 100644
--- a/networkx/algorithms/tree/tests/test_branchings.py
+++ b/networkx/algorithms/tree/tests/test_branchings.py
@@ -20,15 +20,15 @@ from networkx.testing import *
#
G_array = np.array([
# 0 1 2 3 4 5 6 7 8
- [ 0, 0, 12, 0, 12, 0, 0, 0, 0], # 0
- [ 4, 0, 0, 0, 0, 13, 0, 0, 0], # 1
- [ 0, 17, 0, 21, 0, 12, 0, 0, 0], # 2
- [ 5, 0, 0, 0, 17, 0, 18, 0, 0], # 3
- [ 0, 0, 0, 0, 0, 0, 0, 12, 0], # 4
- [ 0, 0, 0, 0, 0, 0, 14, 0, 12], # 5
- [ 0, 0, 21, 0, 0, 0, 0, 0, 15], # 6
- [ 0, 0, 0, 19, 0, 0, 15, 0, 0], # 7
- [ 0, 0, 0, 0, 0, 0, 0, 18, 0], # 8
+ [0, 0, 12, 0, 12, 0, 0, 0, 0], # 0
+ [4, 0, 0, 0, 0, 13, 0, 0, 0], # 1
+ [0, 17, 0, 21, 0, 12, 0, 0, 0], # 2
+ [5, 0, 0, 0, 17, 0, 18, 0, 0], # 3
+ [0, 0, 0, 0, 0, 0, 0, 12, 0], # 4
+ [0, 0, 0, 0, 0, 0, 14, 0, 12], # 5
+ [0, 0, 21, 0, 0, 0, 0, 0, 15], # 6
+ [0, 0, 0, 19, 0, 0, 15, 0, 0], # 7
+ [0, 0, 0, 0, 0, 0, 0, 18, 0], # 8
], dtype=int)
# We convert to MultiDiGraph after using from_numpy_matrix
@@ -41,6 +41,7 @@ def G1():
G = nx.MultiDiGraph(G)
return G
+
def G2():
# Now we shift all the weights by -10.
# Should not affect optimal arborescence, but does affect optimal branching.
@@ -51,6 +52,7 @@ def G2():
G = nx.MultiDiGraph(G)
return G
+
# An optimal branching for G1 that is also a spanning arborescence. So it is
# also an optimal spanning arborescence.
#
@@ -93,17 +95,20 @@ greedy_subopt_branching_1b = [
(2, 3, 21), (1, 5, 13), (3, 0, 5), (3, 4, 17),
]
+
def build_branching(edges):
G = nx.DiGraph()
for u, v, weight in edges:
G.add_edge(u, v, weight=weight)
return G
+
def sorted_edges(G, attr='weight', default=1):
- edges = [(u,v,data.get(attr, default)) for (u,v,data) in G.edges(data=True)]
- edges = sorted(edges, key=lambda x: (x[2],x[1],x[0]))
+ edges = [(u, v, data.get(attr, default)) for (u, v, data) in G.edges(data=True)]
+ edges = sorted(edges, key=lambda x: (x[2], x[1], x[0]))
return edges
+
def assert_equal_branchings(G1, G2, attr='weight', default=1):
edges1 = list(G1.edges(data=True))
edges2 = list(G2.edges(data=True))
@@ -123,7 +128,6 @@ def assert_equal_branchings(G1, G2, attr='weight', default=1):
np.testing.assert_almost_equal(a[2], b[2])
-
################
def test_optimal_branching1():
@@ -131,31 +135,37 @@ def test_optimal_branching1():
assert_true(recognition.is_arborescence(G), True)
assert_equal(branchings.branching_weight(G), 131)
+
def test_optimal_branching2a():
G = build_branching(optimal_branching_2a)
assert_true(recognition.is_arborescence(G), True)
assert_equal(branchings.branching_weight(G), 53)
+
def test_optimal_branching2b():
G = build_branching(optimal_branching_2b)
assert_true(recognition.is_arborescence(G), True)
assert_equal(branchings.branching_weight(G), 53)
+
def test_optimal_arborescence2():
G = build_branching(optimal_arborescence_2)
assert_true(recognition.is_arborescence(G), True)
assert_equal(branchings.branching_weight(G), 51)
+
def test_greedy_suboptimal_branching1a():
G = build_branching(greedy_subopt_branching_1a)
assert_true(recognition.is_arborescence(G), True)
assert_equal(branchings.branching_weight(G), 128)
+
def test_greedy_suboptimal_branching1b():
G = build_branching(greedy_subopt_branching_1b)
assert_true(recognition.is_arborescence(G), True)
assert_equal(branchings.branching_weight(G), 127)
+
def test_greedy_max1():
# Standard test.
#
@@ -166,6 +176,7 @@ def test_greedy_max1():
B_ = build_branching(greedy_subopt_branching_1b)
assert_equal_branchings(B, B_)
+
def test_greedy_max2():
# Different default weight.
#
@@ -181,6 +192,7 @@ def test_greedy_max2():
B_ = build_branching(edges)
assert_equal_branchings(B, B_)
+
def test_greedy_max3():
# All equal weights.
#
@@ -195,6 +207,7 @@ def test_greedy_max3():
B_ = build_branching(edges)
assert_equal_branchings(B, B_, default=1)
+
def test_greedy_min():
G = G1()
B = branchings.greedy_branching(G, kind='min')
@@ -206,30 +219,35 @@ def test_greedy_min():
B_ = build_branching(edges)
assert_equal_branchings(B, B_)
+
def test_edmonds1_maxbranch():
G = G1()
x = branchings.maximum_branching(G)
x_ = build_branching(optimal_arborescence_1)
assert_equal_branchings(x, x_)
+
def test_edmonds1_maxarbor():
G = G1()
x = branchings.maximum_spanning_arborescence(G)
x_ = build_branching(optimal_arborescence_1)
assert_equal_branchings(x, x_)
+
def test_edmonds2_maxbranch():
G = G2()
x = branchings.maximum_branching(G)
x_ = build_branching(optimal_branching_2a)
assert_equal_branchings(x, x_)
+
def test_edmonds2_maxarbor():
G = G2()
x = branchings.maximum_spanning_arborescence(G)
x_ = build_branching(optimal_arborescence_2)
assert_equal_branchings(x, x_)
+
def test_edmonds2_minarbor():
G = G1()
x = branchings.minimum_spanning_arborescence(G)
@@ -242,6 +260,7 @@ def test_edmonds2_minarbor():
x_ = build_branching(edges)
assert_equal_branchings(x, x_)
+
def test_edmonds3_minbranch1():
G = G1()
x = branchings.minimum_branching(G)
@@ -249,6 +268,7 @@ def test_edmonds3_minbranch1():
x_ = build_branching(edges)
assert_equal_branchings(x, x_)
+
def test_edmonds3_minbranch2():
G = G1()
G.add_edge(8, 9, weight=-10)
@@ -259,9 +279,10 @@ def test_edmonds3_minbranch2():
# Need more tests
+
def test_mst():
# Make sure we get the same results for undirected graphs.
- # Example from: http://en.wikipedia.org/wiki/Kruskal's_algorithm
+ # Example from: https://en.wikipedia.org/wiki/Kruskal's_algorithm
G = nx.Graph()
edgelist = [(0, 3, [('weight', 5)]),
(0, 1, [('weight', 7)]),
@@ -278,12 +299,13 @@ def test_mst():
G = G.to_directed()
x = branchings.minimum_spanning_arborescence(G)
- edges = [(set([0, 1]), 7), (set([0, 3]), 5), (set([3, 5]), 6),
- (set([1, 4]), 7), (set([4, 2]), 5), (set([4, 6]), 9)]
+ edges = [(set([0, 1]), 7), (set([0, 3]), 5), (set([3, 5]), 6),
+ (set([1, 4]), 7), (set([4, 2]), 5), (set([4, 6]), 9)]
assert_equal(x.number_of_edges(), len(edges))
for u, v, d in x.edges(data=True):
- assert_true( (set([u,v]), d['weight']) in edges )
+ assert_true((set([u, v]), d['weight']) in edges)
+
def test_mixed_nodetypes():
# Smoke test to make sure no TypeError is raised for mixed node types.
@@ -294,10 +316,11 @@ def test_mixed_nodetypes():
G = G.to_directed()
x = branchings.minimum_spanning_arborescence(G)
+
def test_edmonds1_minbranch():
# Using -G_array and min should give the same as optimal_arborescence_1,
# but with all edges negative.
- edges = [ (u, v, -w) for (u, v, w) in optimal_arborescence_1 ]
+ edges = [(u, v, -w) for (u, v, w) in optimal_arborescence_1]
G = nx.DiGraph()
G = nx.from_numpy_matrix(-G_array, create_using=G)
diff --git a/networkx/algorithms/tree/tests/test_mst.py b/networkx/algorithms/tree/tests/test_mst.py
index 509ba1f4..6261b682 100644
--- a/networkx/algorithms/tree/tests/test_mst.py
+++ b/networkx/algorithms/tree/tests/test_mst.py
@@ -43,7 +43,7 @@ class MinimumSpanningTreeTestBase(object):
# This stores the class attribute `algorithm` in an instance attribute.
self.algo = self.algorithm
# This example graph comes from Wikipedia:
- # http://en.wikipedia.org/wiki/Kruskal's_algorithm
+ # https://en.wikipedia.org/wiki/Kruskal's_algorithm
edges = [(0, 1, 7), (0, 3, 5), (1, 2, 8), (1, 3, 9), (1, 4, 7),
(2, 4, 5), (3, 4, 15), (3, 5, 6), (4, 5, 8), (4, 6, 9),
(5, 6, 11)]
diff --git a/networkx/classes/digraph.py b/networkx/classes/digraph.py
index ff26ff8e..5167a8b0 100644
--- a/networkx/classes/digraph.py
+++ b/networkx/classes/digraph.py
@@ -15,7 +15,7 @@ import networkx as nx
from networkx.classes.graph import Graph
from networkx.classes.coreviews import AdjacencyView
from networkx.classes.reportviews import OutEdgeView, InEdgeView, \
- DiDegreeView, InDegreeView, OutDegreeView
+ DiDegreeView, InDegreeView, OutDegreeView
from networkx.exception import NetworkXError
import networkx.convert as convert
@@ -240,6 +240,7 @@ class DiGraph(Graph):
creating graph subclasses by overwriting the base class `dict` with
a dictionary-like object.
"""
+
def __getstate__(self):
attr = self.__dict__.copy()
# remove lazy property attributes
@@ -1129,7 +1130,7 @@ class DiGraph(Graph):
structure without requiring any memory for copying the information.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Parameters
----------
@@ -1203,7 +1204,7 @@ class DiGraph(Graph):
shallow copy of the data.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Warning: If you have subclassed DiGraph to use dict-like objects
in the data structure, those changes do not transfer to the
diff --git a/networkx/classes/function.py b/networkx/classes/function.py
index 73dca806..4853d1ed 100644
--- a/networkx/classes/function.py
+++ b/networkx/classes/function.py
@@ -552,9 +552,9 @@ def info(G, n=None):
nnodes = G.number_of_nodes()
if len(G) > 0:
if G.is_directed():
- deg = sum(d for n, d in G.in_degree())/float(nnodes)
+ deg = sum(d for n, d in G.in_degree()) / float(nnodes)
info += "Average in degree: %8.4f\n" % deg
- deg = sum(d for n, d in G.out_degree())/float(nnodes)
+ deg = sum(d for n, d in G.out_degree()) / float(nnodes)
info += "Average out degree: %8.4f" % deg
else:
s = sum(dict(G.degree()).values())
diff --git a/networkx/classes/graph.py b/networkx/classes/graph.py
index 926ebfd6..e1be2a61 100644
--- a/networkx/classes/graph.py
+++ b/networkx/classes/graph.py
@@ -1389,7 +1389,7 @@ class Graph(object):
structure without requiring any memory for copying the information.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Parameters
----------
@@ -1442,7 +1442,7 @@ class Graph(object):
shallow copy of the data.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Warning: If you have subclassed Graph to use dict-like objects
in the data structure, those changes do not transfer to the
@@ -1503,7 +1503,7 @@ class Graph(object):
shallow copy of the data.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Warning: If you have subclassed DiGraph to use dict-like objects
in the data structure, those changes do not transfer to the
diff --git a/networkx/classes/multidigraph.py b/networkx/classes/multidigraph.py
index f04829d6..20bf568f 100644
--- a/networkx/classes/multidigraph.py
+++ b/networkx/classes/multidigraph.py
@@ -17,7 +17,7 @@ from networkx.classes.digraph import DiGraph
from networkx.classes.multigraph import MultiGraph
from networkx.classes.coreviews import MultiAdjacencyView
from networkx.classes.reportviews import OutMultiEdgeView, InMultiEdgeView, \
- DiMultiDegreeView, OutMultiDegreeView, InMultiDegreeView
+ DiMultiDegreeView, OutMultiDegreeView, InMultiDegreeView
from networkx.exception import NetworkXError
@@ -824,7 +824,7 @@ class MultiDiGraph(MultiGraph, DiGraph):
structure without requiring any memory for copying the information.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Parameters
----------
@@ -893,7 +893,7 @@ class MultiDiGraph(MultiGraph, DiGraph):
returns a shallow copy of the data.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Warning: If you have subclassed MultiDiGraph to use dict-like
objects in the data structure, those changes do not transfer
diff --git a/networkx/classes/multigraph.py b/networkx/classes/multigraph.py
index 8c8724eb..2d908e57 100644
--- a/networkx/classes/multigraph.py
+++ b/networkx/classes/multigraph.py
@@ -912,7 +912,7 @@ class MultiGraph(Graph):
structure without requiring any memory for copying the information.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Parameters
----------
@@ -966,7 +966,7 @@ class MultiGraph(Graph):
shallow copy of the data.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Warning: If you have subclassed MultiGraph to use dict-like objects
in the data structure, those changes do not transfer to the
@@ -1023,7 +1023,7 @@ class MultiGraph(Graph):
which returns a shallow copy of the data.
See the Python copy module for more information on shallow
- and deep copies, http://docs.python.org/library/copy.html.
+ and deep copies, https://docs.python.org/2/library/copy.html.
Warning: If you have subclassed MultiiGraph to use dict-like
objects in the data structure, those changes do not transfer
diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py
index ebafaef3..8e6884bc 100644
--- a/networkx/convert_matrix.py
+++ b/networkx/convert_matrix.py
@@ -774,7 +774,7 @@ def to_scipy_sparse_matrix(G, nodelist=None, dtype=None,
References
----------
.. [1] Scipy Dev. References, "Sparse Matrices",
- http://docs.scipy.org/doc/scipy/reference/sparse.html
+ https://docs.scipy.org/doc/scipy/reference/sparse.html
"""
from scipy import sparse
if nodelist is None:
diff --git a/networkx/generators/community.py b/networkx/generators/community.py
index cb568ce0..de4efa85 100644
--- a/networkx/generators/community.py
+++ b/networkx/generators/community.py
@@ -143,7 +143,7 @@ def relaxed_caveman_graph(l, k, p, seed=None):
----------
.. [1] Santo Fortunato, Community Detection in Graphs,
Physics Reports Volume 486, Issues 3-5, February 2010, Pages 75-174.
- http://arxiv.org/abs/0906.0612
+ https://arxiv.org/abs/0906.0612
"""
if seed is not None:
random.seed(seed)
@@ -209,9 +209,8 @@ def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False):
References
----------
.. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports
- Volume 486, Issue 3-5 p. 75-174. http://arxiv.org/abs/0906.0612
- http://arxiv.org/abs/0906.0612
- """
+ Volume 486, Issue 3-5 p. 75-174. https://arxiv.org/abs/0906.0612
+ """
# Use geometric method for O(n+m) complexity algorithm
# partition = nx.community_sets(nx.get_node_attributes(G, 'affiliation'))
if seed is not None:
@@ -329,7 +328,7 @@ def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False):
Random Struct. Algor. 18 (2001) 116-140.
.. [2] Santo Fortunato 'Community Detection in Graphs' Physical Reports
- Volume 486, Issue 3-5 p. 75-174. http://arxiv.org/abs/0906.0612
+ Volume 486, Issue 3-5 p. 75-174. https://arxiv.org/abs/0906.0612
"""
return random_partition_graph([k] * l, p_in, p_out, seed, directed)
diff --git a/networkx/generators/directed.py b/networkx/generators/directed.py
index 90f14337..6145442f 100644
--- a/networkx/generators/directed.py
+++ b/networkx/generators/directed.py
@@ -73,7 +73,7 @@ def gn_graph(n, kernel=None, create_using=None, seed=None):
raise nx.NetworkXError("Directed Graph required in create_using")
if kernel is None:
- kernel = lambda x: x
+ def kernel(x): return x
if seed is not None:
random.seed(seed)
@@ -289,7 +289,7 @@ def scale_free_graph(n, alpha=0.41, beta=0.54, gamma=0.05, delta_in=0.2,
v = len(G)
# choose w according to in-degree and delta_in
w = _choose_node(G, G.in_degree(), delta_in, psum_in)
- elif r < alpha+beta:
+ elif r < alpha + beta:
# beta
# choose v according to out-degree and delta_out
v = _choose_node(G, G.out_degree(), delta_out, psum_out)
@@ -453,7 +453,7 @@ def random_k_out_graph(n, k, alpha, self_loops=True, seed=None):
"Distance between two random `k`-out digraphs, with and without
preferential attachment."
arXiv preprint arXiv:1311.5961 (2013).
- <http://arxiv.org/abs/1311.5961>
+ <https://arxiv.org/abs/1311.5961>
"""
if alpha < 0:
diff --git a/networkx/readwrite/gexf.py b/networkx/readwrite/gexf.py
index c22f9f67..4cddf890 100644
--- a/networkx/readwrite/gexf.py
+++ b/networkx/readwrite/gexf.py
@@ -16,8 +16,8 @@ undirected edges together).
Format
------
-GEXF is an XML format. See http://gephi.org/gexf/format/schema.html for the
-specification and http://gephi.org/gexf/format/basic.html for examples.
+GEXF is an XML format. See https://gephi.org/gexf/format/schema.html for the
+specification and https://gephi.org/gexf/format/basic.html for examples.
"""
import itertools
import time
@@ -80,8 +80,8 @@ def write_gexf(G, path, encoding='utf-8', prettyprint=True, version='1.2draft'):
References
----------
- .. [1] GEXF File Format, http://gephi.org/gexf/format/
- .. [2] GEXF viz schema 1.1, http://gephi.org/gexf/1.1draft/viz
+ .. [1] GEXF File Format, https://gephi.org/gexf/format/
+ .. [2] GEXF viz schema 1.1, https://gephi.org/gexf/1.1draft/viz
"""
writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint,
version=version)
@@ -104,7 +104,7 @@ def generate_gexf(G, encoding='utf-8', prettyprint=True, version='1.2draft'):
prettyprint : bool (optional, default: True)
If True use line breaks and indenting in output XML.
version : string (default: 1.2draft)
- Version of GEFX File Format (see http://gephi.org/gexf/format/schema.html).
+ Version of GEFX File Format (see https://gephi.org/gexf/format/schema.html).
Supported values: "1.1draft", "1.2draft"
@@ -127,7 +127,7 @@ def generate_gexf(G, encoding='utf-8', prettyprint=True, version='1.2draft'):
References
----------
- .. [1] GEXF File Format, http://gephi.org/gexf/format/
+ .. [1] GEXF File Format, https://gephi.org/gexf/format/
"""
writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint,
version=version)
@@ -154,7 +154,7 @@ def read_gexf(path, node_type=None, relabel=False, version='1.2draft'):
If True relabel the nodes to use the GEXF node "label" attribute
instead of the node "id" attribute as the NetworkX node label.
version : string (default: 1.2draft)
- Version of GEFX File Format (see http://gephi.org/gexf/format/schema.html).
+ Version of GEFX File Format (see https://gephi.org/gexf/format/schema.html).
Supported values: "1.1draft", "1.2draft"
Returns
@@ -170,7 +170,7 @@ def read_gexf(path, node_type=None, relabel=False, version='1.2draft'):
References
----------
- .. [1] GEXF File Format, http://gephi.org/gexf/format/
+ .. [1] GEXF File Format, https://gephi.org/gexf/format/
"""
reader = GEXFReader(node_type=node_type, version=version)
if relabel:
@@ -186,14 +186,14 @@ class GEXF(object):
'NS_VIZ': "http://www.gexf.net/1.1draft/viz",
'NS_XSI': "http://www.w3.org/2001/XMLSchema-instance",
'SCHEMALOCATION': ' '.join(['http://www.gexf.net/1.1draft',
- 'http://www.gexf.net/1.1draft/gexf.xsd']),
+ 'http://www.gexf.net/1.1draft/gexf.xsd']),
'VERSION': '1.1'}
versions['1.1draft'] = d
d = {'NS_GEXF': "http://www.gexf.net/1.2draft",
'NS_VIZ': "http://www.gexf.net/1.2draft/viz",
'NS_XSI': "http://www.w3.org/2001/XMLSchema-instance",
'SCHEMALOCATION': ' '.join(['http://www.gexf.net/1.2draft',
- 'http://www.gexf.net/1.2draft/gexf.xsd']),
+ 'http://www.gexf.net/1.2draft/gexf.xsd']),
'VERSION': '1.2'}
versions['1.2draft'] = d
@@ -207,19 +207,19 @@ class GEXF(object):
try: # Python 3.x
blurb = chr(1245) # just to trigger the exception
types.extend([
- (int, "long"),
- (str, "liststring"),
- (str, "anyURI"),
- (str, "string")])
+ (int, "long"),
+ (str, "liststring"),
+ (str, "anyURI"),
+ (str, "string")])
except ValueError: # Python 2.6+
types.extend([
- (long, "long"),
- (str, "liststring"),
- (str, "anyURI"),
- (str, "string"),
- (unicode, "liststring"),
- (unicode, "anyURI"),
- (unicode, "string")])
+ (long, "long"),
+ (str, "liststring"),
+ (str, "anyURI"),
+ (str, "string"),
+ (unicode, "liststring"),
+ (unicode, "anyURI"),
+ (unicode, "string")])
xml_type = dict(types)
python_type = dict(reversed(a) for a in types)
@@ -571,8 +571,8 @@ class GEXFWriter(GEXF):
elif isinstance(start_or_end, int):
timeformat = 'long'
else:
- raise nx.NetworkXError(\
- 'timeformat should be of the type int, float or str')
+ raise nx.NetworkXError(
+ 'timeformat should be of the type int, float or str')
self.graph_element.set('timeformat', timeformat)
self.graph_element.set('mode', 'dynamic')
@@ -585,14 +585,14 @@ class GEXFWriter(GEXF):
def indent(self, elem, level=0):
# in-place prettyprint formatter
- i = "\n" + " "*level
+ i = "\n" + " " * level
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
- self.indent(elem, level+1)
+ self.indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
diff --git a/networkx/readwrite/gpickle.py b/networkx/readwrite/gpickle.py
index ab7bd570..a09ab533 100644
--- a/networkx/readwrite/gpickle.py
+++ b/networkx/readwrite/gpickle.py
@@ -18,7 +18,7 @@ pickles to store the graph data can be used.
Format
------
-See http://docs.python.org/library/pickle.html
+See https://docs.python.org/2/library/pickle.html
"""
__author__ = """Aric Hagberg (hagberg@lanl.gov)\nDan Schult (dschult@colgate.edu)"""
# Copyright (C) 2004-2017 by
@@ -65,7 +65,7 @@ def write_gpickle(G, path, protocol=pickle.HIGHEST_PROTOCOL):
References
----------
- .. [1] http://docs.python.org/library/pickle.html
+ .. [1] https://docs.python.org/2/library/pickle.html
"""
pickle.dump(G, path, protocol)
@@ -96,7 +96,7 @@ def read_gpickle(path):
References
----------
- .. [1] http://docs.python.org/library/pickle.html
+ .. [1] https://docs.python.org/2/library/pickle.html
"""
return pickle.load(path)
diff --git a/networkx/readwrite/json_graph/__init__.py b/networkx/readwrite/json_graph/__init__.py
index b60d3833..7715fbba 100644
--- a/networkx/readwrite/json_graph/__init__.py
+++ b/networkx/readwrite/json_graph/__init__.py
@@ -4,13 +4,13 @@ JSON data
*********
Generate and parse JSON serializable data for NetworkX graphs.
-These formats are suitable for use with the d3.js examples http://d3js.org/
+These formats are suitable for use with the d3.js examples https://d3js.org/
The three formats that you can generate with NetworkX are:
- - node-link like in the d3.js example http://bl.ocks.org/mbostock/4062045
- - tree like in the d3.js example http://bl.ocks.org/mbostock/4063550
- - adjacency like in the d3.js example http://bost.ocks.org/mike/miserables/
+ - node-link like in the d3.js example https://bl.ocks.org/mbostock/4062045
+ - tree like in the d3.js example https://bl.ocks.org/mbostock/4063550
+ - adjacency like in the d3.js example https://bost.ocks.org/mike/miserables/
"""
from networkx.readwrite.json_graph.node_link import *
from networkx.readwrite.json_graph.adjacency import *
diff --git a/networkx/readwrite/nx_shp.py b/networkx/readwrite/nx_shp.py
index 864f0cd2..e2b5f7f0 100644
--- a/networkx/readwrite/nx_shp.py
+++ b/networkx/readwrite/nx_shp.py
@@ -9,7 +9,7 @@ Generates a networkx.DiGraph from point and line shapefiles.
data format for geographic information systems software. It is developed
and regulated by Esri as a (mostly) open specification for data
interoperability among Esri and other software products."
-See http://en.wikipedia.org/wiki/Shapefile for additional information.
+See https://en.wikipedia.org/wiki/Shapefile for additional information.
"""
# Copyright (C) 2004-2017 by
# Ben Reilly <benwreilly@gmail.com>
@@ -64,7 +64,7 @@ def read_shp(path, simplify=True, geom_attrs=True):
References
----------
- .. [1] http://en.wikipedia.org/wiki/Shapefile
+ .. [1] https://en.wikipedia.org/wiki/Shapefile
"""
try:
from osgeo import ogr
@@ -188,7 +188,7 @@ def write_shp(G, outdir):
References
----------
- .. [1] http://en.wikipedia.org/wiki/Shapefile
+ .. [1] https://en.wikipedia.org/wiki/Shapefile
"""
try:
from osgeo import ogr
@@ -229,7 +229,7 @@ def write_shp(G, outdir):
def create_feature(geometry, lyr, attributes=None):
feature = ogr.Feature(lyr.GetLayerDefn())
feature.SetGeometry(g)
- if attributes != None:
+ if attributes is not None:
# Loop through attributes, assigning data to each field
for field, data in attributes.items():
feature.SetField(field, data)
@@ -269,23 +269,23 @@ def write_shp(G, outdir):
for key, data in e[2].items():
# Reject spatial data not required for attribute table
if (key != 'Json' and key != 'Wkt' and key != 'Wkb'
- and key != 'ShpName'):
- # For all edges check/add field and data type to fields dict
- if key not in fields:
- # Field not in previous edges so add to dict
- if type(data) in OGRTypes:
- fields[key] = OGRTypes[type(data)]
- else:
- # Data type not supported, default to string (char 80)
- fields[key] = ogr.OFTString
- # Create the new field
- newfield = ogr.FieldDefn(key, fields[key])
- edges.CreateField(newfield)
- # Store the data from new field to dict for CreateLayer()
- attributes[key] = data
+ and key != 'ShpName'):
+ # For all edges check/add field and data type to fields dict
+ if key not in fields:
+ # Field not in previous edges so add to dict
+ if type(data) in OGRTypes:
+ fields[key] = OGRTypes[type(data)]
else:
- # Field already exists, add data to dict for CreateLayer()
- attributes[key] = data
+ # Data type not supported, default to string (char 80)
+ fields[key] = ogr.OFTString
+ # Create the new field
+ newfield = ogr.FieldDefn(key, fields[key])
+ edges.CreateField(newfield)
+ # Store the data from new field to dict for CreateLayer()
+ attributes[key] = data
+ else:
+ # Field already exists, add data to dict for CreateLayer()
+ attributes[key] = data
# Create the feature with, passing new attribute data
create_feature(g, edges, attributes)
diff --git a/networkx/readwrite/tests/test_gexf.py b/networkx/readwrite/tests/test_gexf.py
index e06c3880..3f771eee 100644
--- a/networkx/readwrite/tests/test_gexf.py
+++ b/networkx/readwrite/tests/test_gexf.py
@@ -54,7 +54,7 @@ class TestGEXF(object):
<nodes>
<node id="0" label="Gephi">
<attvalues>
- <attvalue for="0" value="http://gephi.org"/>
+ <attvalue for="0" value="https://gephi.org"/>
<attvalue for="1" value="1"/>
<attvalue for="2" value="false"/>
</attvalues>
@@ -95,7 +95,7 @@ class TestGEXF(object):
self.attribute_graph.graph['node_default'] = {'frog': True}
self.attribute_graph.add_node('0',
label='Gephi',
- url='http://gephi.org',
+ url='https://gephi.org',
indegree=1, frog=False)
self.attribute_graph.add_node('1',
label='Webatlas',
diff --git a/networkx/testing/tests/test_utils.py b/networkx/testing/tests/test_utils.py
index 981b741b..d13e35e2 100644
--- a/networkx/testing/tests/test_utils.py
+++ b/networkx/testing/tests/test_utils.py
@@ -3,6 +3,8 @@ import networkx as nx
from networkx.testing import *
# thanks to numpy for this GenericTest class (numpy/testing/test_utils.py)
+
+
class _GenericTest(object):
def _test_equal(self, a, b):
self._assert_func(a, b)
@@ -22,27 +24,27 @@ class TestNodesEqual(_GenericTest):
self._assert_func = assert_nodes_equal
def test_nodes_equal(self):
- a = [1,2,5,4]
- b = [4,5,1,2]
- self._test_equal(a,b)
+ a = [1, 2, 5, 4]
+ b = [4, 5, 1, 2]
+ self._test_equal(a, b)
def test_nodes_not_equal(self):
- a = [1,2,5,4]
- b = [4,5,1,3]
- self._test_not_equal(a,b)
+ a = [1, 2, 5, 4]
+ b = [4, 5, 1, 3]
+ self._test_not_equal(a, b)
def test_nodes_with_data_equal(self):
G = nx.Graph()
- G.add_nodes_from([1,2,3],color='red')
+ G.add_nodes_from([1, 2, 3], color='red')
H = nx.Graph()
- H.add_nodes_from([1,2,3],color='red')
+ H.add_nodes_from([1, 2, 3], color='red')
self._test_equal(G.nodes(data=True), H.nodes(data=True))
def test_edges_with_data_not_equal(self):
G = nx.Graph()
- G.add_nodes_from([1,2,3],color='red')
+ G.add_nodes_from([1, 2, 3], color='red')
H = nx.Graph()
- H.add_nodes_from([1,2,3],color='blue')
+ H.add_nodes_from([1, 2, 3], color='blue')
self._test_not_equal(G.nodes(data=True), H.nodes(data=True))
@@ -51,14 +53,14 @@ class TestEdgesEqual(_GenericTest):
self._assert_func = assert_edges_equal
def test_edges_equal(self):
- a = [(1,2),(5,4)]
- b = [(4,5),(1,2)]
- self._test_equal(a,b)
+ a = [(1, 2), (5, 4)]
+ b = [(4, 5), (1, 2)]
+ self._test_equal(a, b)
def test_edges_not_equal(self):
- a = [(1,2),(5,4)]
- b = [(4,5),(1,3)]
- self._test_not_equal(a,b)
+ a = [(1, 2), (5, 4)]
+ b = [(4, 5), (1, 3)]
+ self._test_not_equal(a, b)
def test_edges_with_data_equal(self):
G = nx.MultiGraph()
@@ -83,34 +85,34 @@ class TestEdgesEqual(_GenericTest):
H.edges(data=True, keys=True))
def test_duplicate_edges(self):
- a = [(1,2),(5,4),(1,2)]
- b = [(4,5),(1,2)]
- self._test_not_equal(a,b)
+ a = [(1, 2), (5, 4), (1, 2)]
+ b = [(4, 5), (1, 2)]
+ self._test_not_equal(a, b)
def test_duplicate_edges_with_data(self):
a = [(1, 2, {'weight': 10}), (5, 4), (1, 2, {'weight': 1})]
b = [(4, 5), (1, 2), (1, 2, {'weight': 1})]
- self._test_not_equal(a,b)
+ self._test_not_equal(a, b)
def test_order_of_edges_with_data(self):
a = [(1, 2, {'weight': 10}), (1, 2, {'weight': 1})]
b = [(1, 2, {'weight': 1}), (1, 2, {'weight': 10})]
- self._test_equal(a,b)
+ self._test_equal(a, b)
def test_order_of_multiedges(self):
wt1 = {'weight': 1}
wt2 = {'weight': 2}
a = [(1, 2, wt1), (1, 2, wt1), (1, 2, wt2)]
b = [(1, 2, wt1), (1, 2, wt2), (1, 2, wt2)]
- self._test_not_equal(a,b)
+ self._test_not_equal(a, b)
def test_order_of_edges_with_keys(self):
a = [(1, 2, 0, {'weight': 10}), (1, 2, 1, {'weight': 1}), (1, 2, 2)]
b = [(1, 2, 1, {'weight': 1}), (1, 2, 2), (1, 2, 0, {'weight': 10})]
- self._test_equal(a,b)
+ self._test_equal(a, b)
a = [(1, 2, 1, {'weight': 10}), (1, 2, 0, {'weight': 1}), (1, 2, 2)]
b = [(1, 2, 1, {'weight': 1}), (1, 2, 2), (1, 2, 0, {'weight': 10})]
- self._test_not_equal(a,b)
+ self._test_not_equal(a, b)
class TestGraphsEqual(_GenericTest):
@@ -121,37 +123,37 @@ class TestGraphsEqual(_GenericTest):
G = nx.path_graph(4)
H = nx.Graph()
nx.add_path(H, range(4))
- self._test_equal(G,H)
+ self._test_equal(G, H)
def test_digraphs_equal(self):
G = nx.path_graph(4, create_using=nx.DiGraph())
H = nx.DiGraph()
nx.add_path(H, range(4))
- self._test_equal(G,H)
+ self._test_equal(G, H)
def test_multigraphs_equal(self):
G = nx.path_graph(4, create_using=nx.MultiGraph())
H = nx.MultiGraph()
nx.add_path(H, range(4))
- self._test_equal(G,H)
+ self._test_equal(G, H)
def test_multigraphs_equal(self):
G = nx.path_graph(4, create_using=nx.MultiDiGraph())
H = nx.MultiDiGraph()
nx.add_path(H, range(4))
- self._test_equal(G,H)
+ self._test_equal(G, H)
def test_graphs_not_equal(self):
G = nx.path_graph(4)
H = nx.Graph()
nx.add_cycle(H, range(4))
- self._test_not_equal(G,H)
+ self._test_not_equal(G, H)
def test_graphs_not_equal2(self):
G = nx.path_graph(4)
H = nx.Graph()
nx.add_path(H, range(3))
- self._test_not_equal(G,H)
+ self._test_not_equal(G, H)
def test_graphs_not_equal3(self):
G = nx.path_graph(4)
diff --git a/networkx/testing/utils.py b/networkx/testing/utils.py
index df40f6c2..818f6720 100644
--- a/networkx/testing/utils.py
+++ b/networkx/testing/utils.py
@@ -1,6 +1,6 @@
from nose.tools import assert_equal, assert_in
-__all__ = ['assert_nodes_equal', 'assert_edges_equal','assert_graphs_equal']
+__all__ = ['assert_nodes_equal', 'assert_edges_equal', 'assert_graphs_equal']
def assert_nodes_equal(nodes1, nodes2):
@@ -25,16 +25,16 @@ def assert_edges_equal(edges1, edges2):
d1 = defaultdict(dict)
d2 = defaultdict(dict)
c1 = 0
- for c1,e in enumerate(edges1):
- u,v = e[0],e[1]
+ for c1, e in enumerate(edges1):
+ u, v = e[0], e[1]
data = [e[2:]]
if v in d1[u]:
data = d1[u][v] + data
d1[u][v] = data
d1[v][u] = data
c2 = 0
- for c2,e in enumerate(edges2):
- u,v = e[0],e[1]
+ for c2, e in enumerate(edges2):
+ u, v = e[0], e[1]
data = [e[2:]]
if v in d2[u]:
data = d2[u][v] + data
diff --git a/networkx/tests/test_exceptions.py b/networkx/tests/test_exceptions.py
index 906a56f3..2e63fc47 100644
--- a/networkx/tests/test_exceptions.py
+++ b/networkx/tests/test_exceptions.py
@@ -3,31 +3,37 @@ import networkx as nx
# smoke tests for exceptions
+
@raises(nx.NetworkXException)
def test_raises_networkxexception():
raise nx.NetworkXException
+
@raises(nx.NetworkXError)
def test_raises_networkxerr():
raise nx.NetworkXError
+
@raises(nx.NetworkXPointlessConcept)
def test_raises_networkx_pointless_concept():
raise nx.NetworkXPointlessConcept
+
@raises(nx.NetworkXAlgorithmError)
def test_raises_networkxalgorithmerr():
raise nx.NetworkXAlgorithmError
+
@raises(nx.NetworkXUnfeasible)
def test_raises_networkx_unfeasible():
raise nx.NetworkXUnfeasible
+
@raises(nx.NetworkXNoPath)
def test_raises_networkx_no_path():
raise nx.NetworkXNoPath
+
@raises(nx.NetworkXUnbounded)
def test_raises_networkx_unbounded():
raise nx.NetworkXUnbounded
-
diff --git a/networkx/utils/contextmanagers.py b/networkx/utils/contextmanagers.py
index 7475ba5a..b525a5ae 100644
--- a/networkx/utils/contextmanagers.py
+++ b/networkx/utils/contextmanagers.py
@@ -6,6 +6,7 @@ __all__ = [
'reversed',
]
+
@contextmanager
def reversed(G):
"""A context manager for temporarily reversing a directed graph in place.
diff --git a/networkx/utils/random_sequence.py b/networkx/utils/random_sequence.py
index 2b1ded03..59409184 100644
--- a/networkx/utils/random_sequence.py
+++ b/networkx/utils/random_sequence.py
@@ -20,7 +20,7 @@ import networkx as nx
# The same helpers for choosing random sequences from distributions
# uses Python's random module
-# http://www.python.org/doc/current/lib/module-random.html
+# https://docs.python.org/2/library/random.html
def powerlaw_sequence(n, exponent=2.0):
"""
diff --git a/networkx/utils/tests/test_contextmanager.py b/networkx/utils/tests/test_contextmanager.py
index a8de6039..29cc62a4 100644
--- a/networkx/utils/tests/test_contextmanager.py
+++ b/networkx/utils/tests/test_contextmanager.py
@@ -4,6 +4,7 @@ from nose.tools import *
import networkx as nx
+
def test_reversed():
G = nx.DiGraph()
G.add_edge('A', 'B')
diff --git a/networkx/utils/tests/test_decorators.py b/networkx/utils/tests/test_decorators.py
index 0b5ce719..7cc2b1ad 100644
--- a/networkx/utils/tests/test_decorators.py
+++ b/networkx/utils/tests/test_decorators.py
@@ -4,7 +4,8 @@ import os
from nose.tools import *
import networkx as nx
-from networkx.utils.decorators import open_file,not_implemented_for
+from networkx.utils.decorators import open_file, not_implemented_for
+
def test_not_implemented_decorator():
@not_implemented_for('directed')
@@ -12,6 +13,7 @@ def test_not_implemented_decorator():
pass
test1(nx.Graph())
+
@raises(KeyError)
def test_not_implemented_decorator_key():
@not_implemented_for('foo')
@@ -19,6 +21,7 @@ def test_not_implemented_decorator_key():
pass
test1(nx.Graph())
+
@raises(nx.NetworkXNotImplemented)
def test_not_implemented_decorator_raise():
@not_implemented_for('graph')
@@ -27,7 +30,6 @@ def test_not_implemented_decorator_raise():
test1(nx.Graph())
-
class TestOpenFileDecorator(object):
def setUp(self):
self.text = ['Blah... ', 'BLAH ', 'BLAH!!!!']
@@ -84,24 +86,24 @@ class TestOpenFileDecorator(object):
def test_writer_arg1_str(self):
self.writer_arg1(self.name)
- assert_equal( self.read(self.name), ''.join(self.text) )
+ assert_equal(self.read(self.name), ''.join(self.text))
def test_writer_arg1_fobj(self):
self.writer_arg1(self.fobj)
assert_false(self.fobj.closed)
self.fobj.close()
- assert_equal( self.read(self.name), ''.join(self.text) )
+ assert_equal(self.read(self.name), ''.join(self.text))
def test_writer_arg2default_str(self):
self.writer_arg2default(0, path=None)
self.writer_arg2default(0, path=self.name)
- assert_equal( self.read(self.name), ''.join(self.text) )
+ assert_equal(self.read(self.name), ''.join(self.text))
def test_writer_arg2default_fobj(self):
self.writer_arg2default(0, path=self.fobj)
assert_false(self.fobj.closed)
self.fobj.close()
- assert_equal( self.read(self.name), ''.join(self.text) )
+ assert_equal(self.read(self.name), ''.join(self.text))
def test_writer_arg2default_fobj(self):
self.writer_arg2default(0, path=None)
@@ -109,16 +111,16 @@ class TestOpenFileDecorator(object):
def test_writer_arg4default_fobj(self):
self.writer_arg4default(0, 1, dog='dog', other='other2')
self.writer_arg4default(0, 1, dog='dog', other='other2', path=self.name)
- assert_equal( self.read(self.name), ''.join(self.text) )
+ assert_equal(self.read(self.name), ''.join(self.text))
def test_writer_kwarg_str(self):
self.writer_kwarg(path=self.name)
- assert_equal( self.read(self.name), ''.join(self.text) )
+ assert_equal(self.read(self.name), ''.join(self.text))
def test_writer_kwarg_fobj(self):
self.writer_kwarg(path=self.fobj)
self.fobj.close()
- assert_equal( self.read(self.name), ''.join(self.text) )
+ assert_equal(self.read(self.name), ''.join(self.text))
def test_writer_kwarg_fobj(self):
self.writer_kwarg(path=None)