summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorpinselimo <s.plakolb@gmail.com>2021-08-31 15:47:02 +0200
committerGitHub <noreply@github.com>2021-08-31 08:47:02 -0500
commit96831f99a01fcfb39ac5d85f7e814afa5f8c4c9a (patch)
treebe7818e70301e4994e5f40e93d4515b6ecb0cc04
parenta8b907df38d9d36f77d75c65d9e6a5518b849c6b (diff)
downloadnetworkx-96831f99a01fcfb39ac5d85f7e814afa5f8c4c9a.tar.gz
Add multigraph betweenness (#4976)
* Betweenness: Use lowest weight of parallel edges * Betweenness: Add edge keys for edge betweenness centrality * betweenness: Test multigraph edge betweenness centrality * betweenness: Add missing test docstrings * betweenness: Test distinct weighted shortest paths * betweenness: Create dict with edge keys for multigraphs * betweenness: Fix reference in docstring for weight
-rw-r--r--networkx/algorithms/centrality/betweenness.py43
-rw-r--r--networkx/algorithms/centrality/betweenness_subset.py5
-rw-r--r--networkx/algorithms/centrality/tests/test_betweenness_centrality.py122
3 files changed, 166 insertions, 4 deletions
diff --git a/networkx/algorithms/centrality/betweenness.py b/networkx/algorithms/centrality/betweenness.py
index dd478dcd..aab55367 100644
--- a/networkx/algorithms/centrality/betweenness.py
+++ b/networkx/algorithms/centrality/betweenness.py
@@ -5,12 +5,12 @@ import warnings
from networkx.utils import py_random_state
from networkx.utils.decorators import not_implemented_for
+from networkx.algorithms.shortest_paths.weighted import _weight_function
__all__ = ["betweenness_centrality", "edge_betweenness_centrality", "edge_betweenness"]
@py_random_state(5)
-@not_implemented_for("multigraph")
def betweenness_centrality(
G, k=None, normalized=True, weight=None, endpoints=False, seed=None
):
@@ -236,6 +236,8 @@ def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=No
betweenness = _rescale_e(
betweenness, len(G), normalized=normalized, directed=G.is_directed()
)
+ if G.is_multigraph():
+ betweenness = _add_edge_keys(G, betweenness, weight=weight)
return betweenness
@@ -276,6 +278,7 @@ def _single_source_shortest_path_basic(G, s):
def _single_source_dijkstra_path_basic(G, s, weight):
+ weight = _weight_function(G, weight)
# modified from Eppstein
S = []
P = {}
@@ -298,7 +301,7 @@ def _single_source_dijkstra_path_basic(G, s, weight):
S.append(v)
D[v] = dist
for w, edgedata in G[v].items():
- vw_dist = dist + edgedata.get(weight, 1)
+ vw_dist = dist + weight(v, w, edgedata)
if w not in D and (w not in seen or vw_dist < seen[w]):
seen[w] = vw_dist
push(Q, (vw_dist, next(c), v, w))
@@ -394,3 +397,39 @@ def _rescale_e(betweenness, n, normalized, directed=False, k=None):
for v in betweenness:
betweenness[v] *= scale
return betweenness
+
+
+@not_implemented_for("graph")
+def _add_edge_keys(G, betweenness, weight=None):
+ r"""Adds the corrected betweenness centrality (BC) values for multigraphs.
+
+ Parameters
+ ----------
+ G : NetworkX graph.
+
+ betweenness : dictionary
+ Dictionary mapping adjacent node tuples to betweenness centrality values.
+
+ weight : string or function
+ See `_weight_function´ for details. Defaults to `None`.
+
+ Returns
+ -------
+ edges : dictionary
+ The parameter `betweenness` including edges with keys and their
+ betweenness centrality values.
+
+ The BC value is divided among edges of equal weight.
+ """
+ _weight = _weight_function(G, weight)
+
+ edge_bc = dict.fromkeys(G.edges, 0.0)
+ for u, v in betweenness:
+ d = G[u][v]
+ wt = _weight(u, v, d)
+ keys = [k for k in d if _weight(u, v, {k: d[k]}) == wt]
+ bc = betweenness[(u, v)] / len(keys)
+ for k in keys:
+ edge_bc[(u, v, k)] = bc
+
+ return edge_bc
diff --git a/networkx/algorithms/centrality/betweenness_subset.py b/networkx/algorithms/centrality/betweenness_subset.py
index ed073855..473b42ca 100644
--- a/networkx/algorithms/centrality/betweenness_subset.py
+++ b/networkx/algorithms/centrality/betweenness_subset.py
@@ -3,9 +3,8 @@ import warnings
from networkx.algorithms.centrality.betweenness import (
_single_source_dijkstra_path_basic as dijkstra,
-)
-from networkx.algorithms.centrality.betweenness import (
_single_source_shortest_path_basic as shortest_path,
+ _add_edge_keys,
)
__all__ = [
@@ -193,6 +192,8 @@ def edge_betweenness_centrality_subset(
for n in G: # remove nodes to only return edges
del b[n]
b = _rescale_e(b, len(G), normalized=normalized, directed=G.is_directed())
+ if G.is_multigraph():
+ b = _add_edge_keys(G, b, weight=weight)
return b
diff --git a/networkx/algorithms/centrality/tests/test_betweenness_centrality.py b/networkx/algorithms/centrality/tests/test_betweenness_centrality.py
index d563db91..d66d4a31 100644
--- a/networkx/algorithms/centrality/tests/test_betweenness_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_betweenness_centrality.py
@@ -56,6 +56,7 @@ class TestBetweennessCentrality:
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
def test_sample_from_P3(self):
+ """Betweenness centrality: P3 sample"""
G = nx.path_graph(3)
b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
b = nx.betweenness_centrality(G, k=3, weight=None, normalized=False, seed=1)
@@ -513,6 +514,44 @@ class TestWeightedBetweennessCentrality:
for n in sorted(G):
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+ def test_G3(self):
+ """Weighted betweenness centrality: G3"""
+ G = nx.MultiGraph(weighted_G())
+ es = list(G.edges(data=True))[::2] # duplicate every other edge
+ G.add_edges_from(es)
+ b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0}
+ b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+ for n in sorted(G):
+ assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+ def test_G4(self):
+ """Weighted betweenness centrality: G4"""
+ G = nx.MultiDiGraph()
+ G.add_weighted_edges_from(
+ [
+ ("s", "u", 10),
+ ("s", "x", 5),
+ ("s", "x", 6),
+ ("u", "v", 1),
+ ("u", "x", 2),
+ ("v", "y", 1),
+ ("v", "y", 1),
+ ("x", "u", 3),
+ ("x", "v", 5),
+ ("x", "y", 2),
+ ("x", "y", 3),
+ ("y", "s", 7),
+ ("y", "v", 6),
+ ("y", "v", 6),
+ ]
+ )
+
+ b_answer = {"y": 5.0, "x": 5.0, "s": 4.0, "u": 2.0, "v": 2.0}
+
+ b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+ for n in sorted(G):
+ assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
class TestEdgeBetweennessCentrality:
def test_K5(self):
@@ -598,6 +637,7 @@ class TestWeightedEdgeBetweennessCentrality:
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
def test_weighted_graph(self):
+ """Edge betweenness centrality: weighted"""
eList = [
(0, 1, 5),
(0, 2, 4),
@@ -627,6 +667,7 @@ class TestWeightedEdgeBetweennessCentrality:
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
def test_normalized_weighted_graph(self):
+ """Edge betweenness centrality: normalized weighted"""
eList = [
(0, 1, 5),
(0, 2, 4),
@@ -655,3 +696,84 @@ class TestWeightedEdgeBetweennessCentrality:
norm = len(G) * (len(G) - 1) / 2
for n in sorted(G.edges()):
assert b[n] == pytest.approx(b_answer[n] / norm, abs=1e-7)
+
+ def test_weighted_multigraph(self):
+ """Edge betweenness centrality: weighted multigraph"""
+ eList = [
+ (0, 1, 5),
+ (0, 1, 4),
+ (0, 2, 4),
+ (0, 3, 3),
+ (0, 3, 3),
+ (0, 4, 2),
+ (1, 2, 4),
+ (1, 3, 1),
+ (1, 3, 2),
+ (1, 4, 3),
+ (1, 4, 4),
+ (2, 4, 5),
+ (3, 4, 4),
+ (3, 4, 4),
+ ]
+ G = nx.MultiGraph()
+ G.add_weighted_edges_from(eList)
+ b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
+ b_answer = {
+ (0, 1, 0): 0.0,
+ (0, 1, 1): 0.5,
+ (0, 2, 0): 1.0,
+ (0, 3, 0): 0.75,
+ (0, 3, 1): 0.75,
+ (0, 4, 0): 1.0,
+ (1, 2, 0): 2.0,
+ (1, 3, 0): 3.0,
+ (1, 3, 1): 0.0,
+ (1, 4, 0): 1.5,
+ (1, 4, 1): 0.0,
+ (2, 4, 0): 1.0,
+ (3, 4, 0): 0.25,
+ (3, 4, 1): 0.25,
+ }
+ for n in sorted(G.edges(keys=True)):
+ assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+ def test_normalized_weighted_multigraph(self):
+ """Edge betweenness centrality: normalized weighted multigraph"""
+ eList = [
+ (0, 1, 5),
+ (0, 1, 4),
+ (0, 2, 4),
+ (0, 3, 3),
+ (0, 3, 3),
+ (0, 4, 2),
+ (1, 2, 4),
+ (1, 3, 1),
+ (1, 3, 2),
+ (1, 4, 3),
+ (1, 4, 4),
+ (2, 4, 5),
+ (3, 4, 4),
+ (3, 4, 4),
+ ]
+ G = nx.MultiGraph()
+ G.add_weighted_edges_from(eList)
+ b = nx.edge_betweenness_centrality(G, weight="weight", normalized=True)
+ b_answer = {
+ (0, 1, 0): 0.0,
+ (0, 1, 1): 0.5,
+ (0, 2, 0): 1.0,
+ (0, 3, 0): 0.75,
+ (0, 3, 1): 0.75,
+ (0, 4, 0): 1.0,
+ (1, 2, 0): 2.0,
+ (1, 3, 0): 3.0,
+ (1, 3, 1): 0.0,
+ (1, 4, 0): 1.5,
+ (1, 4, 1): 0.0,
+ (2, 4, 0): 1.0,
+ (3, 4, 0): 0.25,
+ (3, 4, 1): 0.25,
+ }
+ norm = len(G) * (len(G) - 1) / 2
+ for n in sorted(G.edges(keys=True)):
+ assert b[n] == pytest.approx(b_answer[n] / norm, abs=1e-7)