summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMridul Seth <seth.mridul@gmail.com>2022-05-31 22:24:18 +0400
committerGitHub <noreply@github.com>2022-05-31 11:24:18 -0700
commit2a05ccdb07cff88e56661dee8a9271859354027f (patch)
tree545f85bbbd99987845fa5fab08cb61fccbf510df
parent9c29872a6358cb24b80765a25891dc75ca4379aa (diff)
downloadnetworkx-2a05ccdb07cff88e56661dee8a9271859354027f.tar.gz
Remove redundant py2 numeric conversions (#5661)
* Remove redundant float conversion * Remove redundant int conversion * Use integer division Co-authored-by: Miroslav Šedivý <6774676+eumiro@users.noreply.github.com>
-rw-r--r--examples/algorithms/plot_parallel_betweenness.py2
-rw-r--r--examples/drawing/plot_knuth_miles.py2
-rw-r--r--examples/graph/plot_napoleon_russian_campaign.py2
-rw-r--r--networkx/algorithms/approximation/clustering_coefficient.py2
-rw-r--r--networkx/algorithms/approximation/tests/test_approx_clust_coeff.py16
-rw-r--r--networkx/algorithms/assortativity/correlation.py2
-rw-r--r--networkx/algorithms/assortativity/tests/test_mixing.py16
-rw-r--r--networkx/algorithms/bipartite/basic.py4
-rw-r--r--networkx/algorithms/bipartite/centrality.py18
-rw-r--r--networkx/algorithms/bipartite/cluster.py8
-rw-r--r--networkx/algorithms/bipartite/projection.py6
-rw-r--r--networkx/algorithms/bipartite/tests/test_basic.py2
-rw-r--r--networkx/algorithms/bipartite/tests/test_project.py2
-rw-r--r--networkx/algorithms/centrality/current_flow_betweenness.py4
-rw-r--r--networkx/algorithms/centrality/current_flow_closeness.py4
-rw-r--r--networkx/algorithms/centrality/katz.py2
-rw-r--r--networkx/algorithms/centrality/load.py4
-rw-r--r--networkx/algorithms/centrality/subgraph_alg.py2
-rw-r--r--networkx/algorithms/centrality/tests/test_katz_centrality.py2
-rw-r--r--networkx/algorithms/connectivity/edge_augmentation.py2
-rw-r--r--networkx/algorithms/connectivity/tests/test_edge_augmentation.py2
-rw-r--r--networkx/algorithms/hierarchy.py2
-rw-r--r--networkx/algorithms/link_analysis/pagerank_alg.py8
-rw-r--r--networkx/algorithms/link_analysis/tests/test_pagerank.py2
-rw-r--r--networkx/algorithms/reciprocity.py4
-rw-r--r--networkx/algorithms/swap.py4
-rw-r--r--networkx/algorithms/tests/test_planar_drawing.py4
-rw-r--r--networkx/algorithms/tests/test_threshold.py2
-rw-r--r--networkx/algorithms/threshold.py8
-rw-r--r--networkx/drawing/layout.py4
-rw-r--r--networkx/generators/community.py2
-rw-r--r--networkx/generators/degree_seq.py4
-rw-r--r--networkx/generators/internet_as_graphs.py8
-rw-r--r--networkx/generators/joint_degree_seq.py2
-rw-r--r--networkx/generators/random_graphs.py6
-rw-r--r--networkx/generators/spectral_graph_forge.py2
-rw-r--r--networkx/generators/tests/test_internet_as_graphs.py4
-rw-r--r--networkx/utils/random_sequence.py2
38 files changed, 84 insertions, 88 deletions
diff --git a/examples/algorithms/plot_parallel_betweenness.py b/examples/algorithms/plot_parallel_betweenness.py
index aa88652b..d9333f45 100644
--- a/examples/algorithms/plot_parallel_betweenness.py
+++ b/examples/algorithms/plot_parallel_betweenness.py
@@ -38,7 +38,7 @@ def betweenness_centrality_parallel(G, processes=None):
"""Parallel betweenness centrality function"""
p = Pool(processes=processes)
node_divisor = len(p._pool) * 4
- node_chunks = list(chunks(G.nodes(), int(G.order() / node_divisor)))
+ node_chunks = list(chunks(G.nodes(), G.order() // node_divisor))
num_chunks = len(node_chunks)
bt_sc = p.starmap(
nx.betweenness_centrality_subset,
diff --git a/examples/drawing/plot_knuth_miles.py b/examples/drawing/plot_knuth_miles.py
index 45ef9f5c..e0ebea87 100644
--- a/examples/drawing/plot_knuth_miles.py
+++ b/examples/drawing/plot_knuth_miles.py
@@ -66,7 +66,7 @@ def miles_graph():
G.add_node(city)
# assign position - Convert string to lat/long
G.position[city] = (-float(x) / 100, float(y) / 100)
- G.population[city] = float(pop) / 1000.0
+ G.population[city] = float(pop) / 1000
return G
diff --git a/examples/graph/plot_napoleon_russian_campaign.py b/examples/graph/plot_napoleon_russian_campaign.py
index 1dd5b6a6..46ef64d6 100644
--- a/examples/graph/plot_napoleon_russian_campaign.py
+++ b/examples/graph/plot_napoleon_russian_campaign.py
@@ -122,7 +122,7 @@ plt.clf()
colors = ["b", "g", "r"]
for G in g:
c = colors.pop(0)
- node_size = [int(G.pop[n] / 300.0) for n in G]
+ node_size = [G.pop[n] // 300 for n in G]
nx.draw_networkx_edges(G, G.pos, edge_color=c, width=4, alpha=0.5)
nx.draw_networkx_nodes(G, G.pos, node_size=node_size, node_color=c, alpha=0.5)
nx.draw_networkx_nodes(G, G.pos, node_size=5, node_color="k")
diff --git a/networkx/algorithms/approximation/clustering_coefficient.py b/networkx/algorithms/approximation/clustering_coefficient.py
index 7adf7e01..291753db 100644
--- a/networkx/algorithms/approximation/clustering_coefficient.py
+++ b/networkx/algorithms/approximation/clustering_coefficient.py
@@ -62,4 +62,4 @@ def average_clustering(G, trials=1000, seed=None):
u, v = seed.sample(nbrs, 2)
if u in G[v]:
triangles += 1
- return triangles / float(trials)
+ return triangles / trials
diff --git a/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py b/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py
index 220d3850..5eab5c1e 100644
--- a/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py
+++ b/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py
@@ -8,36 +8,34 @@ from networkx.algorithms.approximation import average_clustering
def test_petersen():
# Actual coefficient is 0
G = nx.petersen_graph()
- assert average_clustering(G, trials=int(len(G) / 2)) == nx.average_clustering(G)
+ assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G)
def test_petersen_seed():
# Actual coefficient is 0
G = nx.petersen_graph()
- assert average_clustering(
- G, trials=int(len(G) / 2), seed=1
- ) == nx.average_clustering(G)
+ assert average_clustering(G, trials=len(G) // 2, seed=1) == nx.average_clustering(G)
def test_tetrahedral():
# Actual coefficient is 1
G = nx.tetrahedral_graph()
- assert average_clustering(G, trials=int(len(G) / 2)) == nx.average_clustering(G)
+ assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G)
def test_dodecahedral():
# Actual coefficient is 0
G = nx.dodecahedral_graph()
- assert average_clustering(G, trials=int(len(G) / 2)) == nx.average_clustering(G)
+ assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G)
def test_empty():
G = nx.empty_graph(5)
- assert average_clustering(G, trials=int(len(G) / 2)) == 0
+ assert average_clustering(G, trials=len(G) // 2) == 0
def test_complete():
G = nx.complete_graph(5)
- assert average_clustering(G, trials=int(len(G) / 2)) == 1
+ assert average_clustering(G, trials=len(G) // 2) == 1
G = nx.complete_graph(7)
- assert average_clustering(G, trials=int(len(G) / 2)) == 1
+ assert average_clustering(G, trials=len(G) // 2) == 1
diff --git a/networkx/algorithms/assortativity/correlation.py b/networkx/algorithms/assortativity/correlation.py
index 48801eb6..19ea04e0 100644
--- a/networkx/algorithms/assortativity/correlation.py
+++ b/networkx/algorithms/assortativity/correlation.py
@@ -285,7 +285,7 @@ def _numeric_ac(M, mapping):
import numpy as np
if M.sum() != 1.0:
- M = M / float(M.sum())
+ M = M / M.sum()
x = np.array(list(mapping.keys()))
y = x # x and y have the same support
idx = list(mapping.values())
diff --git a/networkx/algorithms/assortativity/tests/test_mixing.py b/networkx/algorithms/assortativity/tests/test_mixing.py
index c2b44e35..cb4ae072 100644
--- a/networkx/algorithms/assortativity/tests/test_mixing.py
+++ b/networkx/algorithms/assortativity/tests/test_mixing.py
@@ -50,7 +50,7 @@ class TestDegreeMixingMatrix(BaseTestDegreeMixing):
a = nx.degree_mixing_matrix(self.P4, normalized=False)
np.testing.assert_equal(a, a_result)
a = nx.degree_mixing_matrix(self.P4)
- np.testing.assert_equal(a, a_result / float(a_result.sum()))
+ np.testing.assert_equal(a, a_result / a_result.sum())
def test_degree_mixing_matrix_directed(self):
# fmt: off
@@ -62,7 +62,7 @@ class TestDegreeMixingMatrix(BaseTestDegreeMixing):
a = nx.degree_mixing_matrix(self.D, normalized=False)
np.testing.assert_equal(a, a_result)
a = nx.degree_mixing_matrix(self.D)
- np.testing.assert_equal(a, a_result / float(a_result.sum()))
+ np.testing.assert_equal(a, a_result / a_result.sum())
def test_degree_mixing_matrix_multigraph(self):
# fmt: off
@@ -74,7 +74,7 @@ class TestDegreeMixingMatrix(BaseTestDegreeMixing):
a = nx.degree_mixing_matrix(self.M, normalized=False)
np.testing.assert_equal(a, a_result)
a = nx.degree_mixing_matrix(self.M)
- np.testing.assert_equal(a, a_result / float(a_result.sum()))
+ np.testing.assert_equal(a, a_result / a_result.sum())
def test_degree_mixing_matrix_selfloop(self):
# fmt: off
@@ -83,7 +83,7 @@ class TestDegreeMixingMatrix(BaseTestDegreeMixing):
a = nx.degree_mixing_matrix(self.S, normalized=False)
np.testing.assert_equal(a, a_result)
a = nx.degree_mixing_matrix(self.S)
- np.testing.assert_equal(a, a_result / float(a_result.sum()))
+ np.testing.assert_equal(a, a_result / a_result.sum())
def test_degree_mixing_matrix_weighted(self):
a_result = np.array([[0.0, 1.0], [1.0, 6.0]])
@@ -137,7 +137,7 @@ class TestAttributeMixingMatrix(BaseTestAttributeMixing):
)
np.testing.assert_equal(a, a_result)
a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping)
- np.testing.assert_equal(a, a_result / float(a_result.sum()))
+ np.testing.assert_equal(a, a_result / a_result.sum())
def test_attribute_mixing_matrix_directed(self):
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
@@ -147,7 +147,7 @@ class TestAttributeMixingMatrix(BaseTestAttributeMixing):
)
np.testing.assert_equal(a, a_result)
a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping)
- np.testing.assert_equal(a, a_result / float(a_result.sum()))
+ np.testing.assert_equal(a, a_result / a_result.sum())
def test_attribute_mixing_matrix_multigraph(self):
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
@@ -157,7 +157,7 @@ class TestAttributeMixingMatrix(BaseTestAttributeMixing):
)
np.testing.assert_equal(a, a_result)
a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping)
- np.testing.assert_equal(a, a_result / float(a_result.sum()))
+ np.testing.assert_equal(a, a_result / a_result.sum())
class TestNumericMixingMatrix(BaseTestNumericMixing):
@@ -179,4 +179,4 @@ class TestNumericMixingMatrix(BaseTestNumericMixing):
)
np.testing.assert_equal(a, a_result)
a = nx.numeric_mixing_matrix(self.F, "margin", mapping=mapping)
- np.testing.assert_equal(a, a_result / float(a_result.sum()))
+ np.testing.assert_equal(a, a_result / a_result.sum())
diff --git a/networkx/algorithms/bipartite/basic.py b/networkx/algorithms/bipartite/basic.py
index 06db112b..ac4686a7 100644
--- a/networkx/algorithms/bipartite/basic.py
+++ b/networkx/algorithms/bipartite/basic.py
@@ -263,9 +263,9 @@ def density(B, nodes):
d = 0.0
else:
if B.is_directed():
- d = m / (2.0 * float(nb * nt))
+ d = m / (2 * nb * nt)
else:
- d = m / float(nb * nt)
+ d = m / (nb * nt)
return d
diff --git a/networkx/algorithms/bipartite/centrality.py b/networkx/algorithms/bipartite/centrality.py
index fa8d3e1c..cc3d370d 100644
--- a/networkx/algorithms/bipartite/centrality.py
+++ b/networkx/algorithms/bipartite/centrality.py
@@ -144,17 +144,15 @@ def betweenness_centrality(G, nodes):
"""
top = set(nodes)
bottom = set(G) - top
- n = float(len(top))
- m = float(len(bottom))
- s = (n - 1) // m
- t = (n - 1) % m
+ n = len(top)
+ m = len(bottom)
+ s, t = divmod(n - 1, m)
bet_max_top = (
((m**2) * ((s + 1) ** 2))
+ (m * (s + 1) * (2 * t - s - 1))
- (t * ((2 * s) - t + 3))
) / 2.0
- p = (m - 1) // n
- r = (m - 1) % n
+ p, r = divmod(m - 1, n)
bet_max_bot = (
((n**2) * ((p + 1) ** 2))
+ (n * (p + 1) * (2 * r - p - 1))
@@ -243,15 +241,15 @@ def closeness_centrality(G, nodes, normalized=True):
path_length = nx.single_source_shortest_path_length
top = set(nodes)
bottom = set(G) - top
- n = float(len(top))
- m = float(len(bottom))
+ n = len(top)
+ m = len(bottom)
for node in top:
sp = dict(path_length(G, node))
totsp = sum(sp.values())
if totsp > 0.0 and len(G) > 1:
closeness[node] = (m + 2 * (n - 1)) / totsp
if normalized:
- s = (len(sp) - 1.0) / (len(G) - 1)
+ s = (len(sp) - 1) / (len(G) - 1)
closeness[node] *= s
else:
closeness[n] = 0.0
@@ -261,7 +259,7 @@ def closeness_centrality(G, nodes, normalized=True):
if totsp > 0.0 and len(G) > 1:
closeness[node] = (n + 2 * (m - 1)) / totsp
if normalized:
- s = (len(sp) - 1.0) / (len(G) - 1)
+ s = (len(sp) - 1) / (len(G) - 1)
closeness[node] *= s
else:
closeness[n] = 0.0
diff --git a/networkx/algorithms/bipartite/cluster.py b/networkx/algorithms/bipartite/cluster.py
index d8aaf06b..a7103980 100644
--- a/networkx/algorithms/bipartite/cluster.py
+++ b/networkx/algorithms/bipartite/cluster.py
@@ -14,15 +14,15 @@ __all__ = [
def cc_dot(nu, nv):
- return float(len(nu & nv)) / len(nu | nv)
+ return len(nu & nv) / len(nu | nv)
def cc_max(nu, nv):
- return float(len(nu & nv)) / max(len(nu), len(nv))
+ return len(nu & nv) / max(len(nu), len(nv))
def cc_min(nu, nv):
- return float(len(nu & nv)) / min(len(nu), len(nv))
+ return len(nu & nv) / min(len(nu), len(nv))
modes = {"dot": cc_dot, "min": cc_min, "max": cc_max}
@@ -205,7 +205,7 @@ def average_clustering(G, nodes=None, mode="dot"):
if nodes is None:
nodes = G
ccs = latapy_clustering(G, nodes=nodes, mode=mode)
- return float(sum(ccs[v] for v in nodes)) / len(nodes)
+ return sum(ccs[v] for v in nodes) / len(nodes)
def robins_alexander_clustering(G):
diff --git a/networkx/algorithms/bipartite/projection.py b/networkx/algorithms/bipartite/projection.py
index c69f761e..8864195e 100644
--- a/networkx/algorithms/bipartite/projection.py
+++ b/networkx/algorithms/bipartite/projection.py
@@ -194,7 +194,7 @@ def weighted_projected_graph(B, nodes, ratio=False):
G = nx.Graph()
G.graph.update(B.graph)
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
- n_top = float(len(B) - len(nodes))
+ n_top = len(B) - len(nodes)
if n_top < 1:
raise NetworkXAlgorithmError(
@@ -402,9 +402,9 @@ def overlap_weighted_projected_graph(B, nodes, jaccard=True):
for v in nbrs2:
vnbrs = set(pred[v])
if jaccard:
- wt = float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
+ wt = len(unbrs & vnbrs) / len(unbrs | vnbrs)
else:
- wt = float(len(unbrs & vnbrs)) / min(len(unbrs), len(vnbrs))
+ wt = len(unbrs & vnbrs) / min(len(unbrs), len(vnbrs))
G.add_edge(u, v, weight=wt)
return G
diff --git a/networkx/algorithms/bipartite/tests/test_basic.py b/networkx/algorithms/bipartite/tests/test_basic.py
index bacf9bdf..655506b4 100644
--- a/networkx/algorithms/bipartite/tests/test_basic.py
+++ b/networkx/algorithms/bipartite/tests/test_basic.py
@@ -67,7 +67,7 @@ class TestBipartiteBasic:
def test_bipartite_density(self):
G = nx.path_graph(5)
X, Y = bipartite.sets(G)
- density = float(len(list(G.edges()))) / (len(X) * len(Y))
+ density = len(list(G.edges())) / (len(X) * len(Y))
assert bipartite.density(G, X) == density
D = nx.DiGraph(G.edges())
assert bipartite.density(D, X) == density / 2.0
diff --git a/networkx/algorithms/bipartite/tests/test_project.py b/networkx/algorithms/bipartite/tests/test_project.py
index 5eb16dcc..bc2c761e 100644
--- a/networkx/algorithms/bipartite/tests/test_project.py
+++ b/networkx/algorithms/bipartite/tests/test_project.py
@@ -375,7 +375,7 @@ class TestBipartiteWeightedProjection:
def jaccard(G, u, v):
unbrs = set(G[u])
vnbrs = set(G[v])
- return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
+ return len(unbrs & vnbrs) / len(unbrs | vnbrs)
def my_weight(G, u, v, weight="weight"):
w = 0
diff --git a/networkx/algorithms/centrality/current_flow_betweenness.py b/networkx/algorithms/centrality/current_flow_betweenness.py
index 0753229d..97ef053a 100644
--- a/networkx/algorithms/centrality/current_flow_betweenness.py
+++ b/networkx/algorithms/centrality/current_flow_betweenness.py
@@ -139,7 +139,7 @@ def approximate_current_flow_betweenness_centrality(
else:
factor = nb / 2.0
# remap to original node names and "unnormalize" if required
- return {ordering[k]: float(v * factor) for k, v in betweenness.items()}
+ return {ordering[k]: v * factor for k, v in betweenness.items()}
@not_implemented_for("directed")
@@ -339,4 +339,4 @@ def edge_current_flow_betweenness_centrality(
betweenness[e] += (i + 1 - pos[i]) * row[i]
betweenness[e] += (n - i - pos[i]) * row[i]
betweenness[e] /= nb
- return {(ordering[s], ordering[t]): float(v) for (s, t), v in betweenness.items()}
+ return {(ordering[s], ordering[t]): v for (s, t), v in betweenness.items()}
diff --git a/networkx/algorithms/centrality/current_flow_closeness.py b/networkx/algorithms/centrality/current_flow_closeness.py
index b1e0627d..ab40b910 100644
--- a/networkx/algorithms/centrality/current_flow_closeness.py
+++ b/networkx/algorithms/centrality/current_flow_closeness.py
@@ -90,8 +90,8 @@ def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"):
betweenness[v] += col[v] - 2 * col[w]
betweenness[w] += col[v]
for v in H:
- betweenness[v] = 1.0 / (betweenness[v])
- return {ordering[k]: float(v) for k, v in betweenness.items()}
+ betweenness[v] = 1 / (betweenness[v])
+ return {ordering[k]: v for k, v in betweenness.items()}
information_centrality = current_flow_closeness_centrality
diff --git a/networkx/algorithms/centrality/katz.py b/networkx/algorithms/centrality/katz.py
index 236f2c61..a3acde9b 100644
--- a/networkx/algorithms/centrality/katz.py
+++ b/networkx/algorithms/centrality/katz.py
@@ -318,7 +318,7 @@ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None):
except AttributeError:
nodelist = list(G)
try:
- b = np.ones((len(nodelist), 1)) * float(beta)
+ b = np.ones((len(nodelist), 1)) * beta
except (TypeError, ValueError, AttributeError) as err:
raise nx.NetworkXError("beta must be a number") from err
diff --git a/networkx/algorithms/centrality/load.py b/networkx/algorithms/centrality/load.py
index 9815041e..358b6560 100644
--- a/networkx/algorithms/centrality/load.py
+++ b/networkx/algorithms/centrality/load.py
@@ -118,7 +118,7 @@ def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None):
for x in pred[v]: # one shortest path.
if x == source: # stop if hit source because all remaining v
break # also have pred[v]==[source]
- between[x] += between[v] / float(num_paths)
+ between[x] += between[v] / num_paths
# remove source
for v in between:
between[v] -= 1
@@ -127,7 +127,7 @@ def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None):
l = len(between)
if l > 2:
# scale by 1/the number of possible paths
- scale = 1.0 / float((l - 1) * (l - 2))
+ scale = 1 / ((l - 1) * (l - 2))
for v in between:
between[v] *= scale
return between
diff --git a/networkx/algorithms/centrality/subgraph_alg.py b/networkx/algorithms/centrality/subgraph_alg.py
index 8766f56b..0f52d514 100644
--- a/networkx/algorithms/centrality/subgraph_alg.py
+++ b/networkx/algorithms/centrality/subgraph_alg.py
@@ -277,7 +277,7 @@ def communicability_betweenness_centrality(G):
B[i, :] = 0
B[:, i] = 0
B -= np.diag(np.diag(B))
- cbc[v] = float(B.sum())
+ cbc[v] = B.sum()
# put row and col back
A[i, :] = row
A[:, i] = col
diff --git a/networkx/algorithms/centrality/tests/test_katz_centrality.py b/networkx/algorithms/centrality/tests/test_katz_centrality.py
index 8f00df5f..da599e13 100644
--- a/networkx/algorithms/centrality/tests/test_katz_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_katz_centrality.py
@@ -339,7 +339,7 @@ class TestKatzEigenvectorVKatz:
def test_eigenvector_v_katz_random(self):
G = nx.gnp_random_graph(10, 0.5, seed=1234)
- l = float(max(np.linalg.eigvals(nx.adjacency_matrix(G).todense())))
+ l = max(np.linalg.eigvals(nx.adjacency_matrix(G).todense()))
e = nx.eigenvector_centrality_numpy(G)
k = nx.katz_centrality_numpy(G, 1.0 / l)
for n in G:
diff --git a/networkx/algorithms/connectivity/edge_augmentation.py b/networkx/algorithms/connectivity/edge_augmentation.py
index 7d03984f..99f31cc4 100644
--- a/networkx/algorithms/connectivity/edge_augmentation.py
+++ b/networkx/algorithms/connectivity/edge_augmentation.py
@@ -808,7 +808,7 @@ def unconstrained_bridge_augmentation(G):
v2 = [n for n in nx.dfs_preorder_nodes(T, root) if T.degree(n) == 1]
# connecting first half of the leafs in pre-order to the second
# half will bridge connect the tree with the fewest edges.
- half = int(math.ceil(len(v2) / 2.0))
+ half = math.ceil(len(v2) / 2)
A2 = list(zip(v2[:half], v2[-half:]))
# collect the edges used to augment the original forest
diff --git a/networkx/algorithms/connectivity/tests/test_edge_augmentation.py b/networkx/algorithms/connectivity/tests/test_edge_augmentation.py
index a1bbf5a5..f7f72c00 100644
--- a/networkx/algorithms/connectivity/tests/test_edge_augmentation.py
+++ b/networkx/algorithms/connectivity/tests/test_edge_augmentation.py
@@ -488,7 +488,7 @@ def _check_unconstrained_bridge_property(G, info1):
p = len([n for n, d in C.degree() if d == 1]) # leafs
q = len([n for n, d in C.degree() if d == 0]) # isolated
if p + q > 1:
- size_target = int(math.ceil(p / 2.0)) + q
+ size_target = math.ceil(p / 2) + q
size_aug = info1["num_edges"]
assert (
size_aug == size_target
diff --git a/networkx/algorithms/hierarchy.py b/networkx/algorithms/hierarchy.py
index fcf25bd0..4a4bc296 100644
--- a/networkx/algorithms/hierarchy.py
+++ b/networkx/algorithms/hierarchy.py
@@ -44,4 +44,4 @@ def flow_hierarchy(G, weight=None):
if not G.is_directed():
raise nx.NetworkXError("G must be a digraph in flow_hierarchy")
scc = nx.strongly_connected_components(G)
- return 1.0 - sum(G.subgraph(c).size(weight) for c in scc) / float(G.size(weight))
+ return 1 - sum(G.subgraph(c).size(weight) for c in scc) / G.size(weight)
diff --git a/networkx/algorithms/link_analysis/pagerank_alg.py b/networkx/algorithms/link_analysis/pagerank_alg.py
index b30b2437..ece444c8 100644
--- a/networkx/algorithms/link_analysis/pagerank_alg.py
+++ b/networkx/algorithms/link_analysis/pagerank_alg.py
@@ -134,21 +134,21 @@ def _pagerank_python(
x = dict.fromkeys(W, 1.0 / N)
else:
# Normalized nstart vector
- s = float(sum(nstart.values()))
+ s = sum(nstart.values())
x = {k: v / s for k, v in nstart.items()}
if personalization is None:
# Assign uniform personalization vector if not given
p = dict.fromkeys(W, 1.0 / N)
else:
- s = float(sum(personalization.values()))
+ s = sum(personalization.values())
p = {k: v / s for k, v in personalization.items()}
if dangling is None:
# Use personalization vector if dangling vector not specified
dangling_weights = p
else:
- s = float(sum(dangling.values()))
+ s = sum(dangling.values())
dangling_weights = {k: v / s for k, v in dangling.items()}
dangling_nodes = [n for n in W if W.out_degree(n, weight=weight) == 0.0]
@@ -359,7 +359,7 @@ def pagerank_numpy(G, alpha=0.85, personalization=None, weight="weight", danglin
ind = np.argmax(eigenvalues)
# eigenvector of largest eigenvalue is at ind, normalized
largest = np.array(eigenvectors[:, ind]).flatten().real
- norm = float(largest.sum())
+ norm = largest.sum()
return dict(zip(G, map(float, largest / norm)))
diff --git a/networkx/algorithms/link_analysis/tests/test_pagerank.py b/networkx/algorithms/link_analysis/tests/test_pagerank.py
index 6f9cd707..4c9722f1 100644
--- a/networkx/algorithms/link_analysis/tests/test_pagerank.py
+++ b/networkx/algorithms/link_analysis/tests/test_pagerank.py
@@ -141,7 +141,7 @@ class TestPageRank:
"""
G = self.G
dangling = self.dangling_edges
- dangling_sum = float(sum(dangling.values()))
+ dangling_sum = sum(dangling.values())
M1 = nx.google_matrix(G, personalization=dangling)
M2 = nx.google_matrix(G, personalization=dangling, dangling=dangling)
for i in range(len(G)):
diff --git a/networkx/algorithms/reciprocity.py b/networkx/algorithms/reciprocity.py
index f3128181..12a8f6b0 100644
--- a/networkx/algorithms/reciprocity.py
+++ b/networkx/algorithms/reciprocity.py
@@ -68,7 +68,7 @@ def _reciprocity_iter(G, nodes):
if n_total == 0:
yield (node, None)
else:
- reciprocity = 2.0 * float(len(overlap)) / float(n_total)
+ reciprocity = 2 * len(overlap) / n_total
yield (node, reciprocity)
@@ -90,4 +90,4 @@ def overall_reciprocity(G):
if n_all_edge == 0:
raise NetworkXError("Not defined for empty graphs")
- return float(n_overlap_edge) / float(n_all_edge)
+ return n_overlap_edge / n_all_edge
diff --git a/networkx/algorithms/swap.py b/networkx/algorithms/swap.py
index 8a06a5f3..208022ef 100644
--- a/networkx/algorithms/swap.py
+++ b/networkx/algorithms/swap.py
@@ -220,7 +220,7 @@ def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None):
fail = True
# If one of the swaps failed, reduce the window size.
if fail:
- window = int(math.ceil(window / 2))
+ window = math.ceil(window / 2)
else:
window += 1
# If the window is large, then there is a good chance that a bunch of
@@ -265,5 +265,5 @@ def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None):
G.remove_edge(u, x)
G.remove_edge(v, y)
swapcount -= 1
- window = int(math.ceil(window / 2))
+ window = math.ceil(window / 2)
return swapcount
diff --git a/networkx/algorithms/tests/test_planar_drawing.py b/networkx/algorithms/tests/test_planar_drawing.py
index 025e8229..af252b45 100644
--- a/networkx/algorithms/tests/test_planar_drawing.py
+++ b/networkx/algorithms/tests/test_planar_drawing.py
@@ -171,10 +171,10 @@ def check_edge_intersections(G, pos):
# https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
px = (x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (
x3 * y4 - y3 * x4
- ) / float(determinant)
+ ) / determinant
py = (x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (
x3 * y4 - y3 * x4
- ) / float(determinant)
+ ) / determinant
# Check if intersection lies between the points
if point_in_between(pos[a], pos[b], (px, py)) and point_in_between(
diff --git a/networkx/algorithms/tests/test_threshold.py b/networkx/algorithms/tests/test_threshold.py
index 9467e8de..c06784a2 100644
--- a/networkx/algorithms/tests/test_threshold.py
+++ b/networkx/algorithms/tests/test_threshold.py
@@ -189,7 +189,7 @@ class TestGeneratorThreshold:
assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]]
wseq = nxt.creation_sequence_to_weights("ddidiiidididid")
- ws = [s / float(12) for s in [6, 6, 5, 7, 4, 4, 4, 8, 3, 9, 2, 10, 1, 11]]
+ ws = [s / 12 for s in [6, 6, 5, 7, 4, 4, 4, 8, 3, 9, 2, 10, 1, 11]]
assert sum(abs(c - d) for c, d in zip(wseq, ws)) < 1e-14
def test_finding_routines(self):
diff --git a/networkx/algorithms/threshold.py b/networkx/algorithms/threshold.py
index 217b70e6..f2bb5dcf 100644
--- a/networkx/algorithms/threshold.py
+++ b/networkx/algorithms/threshold.py
@@ -231,7 +231,7 @@ def creation_sequence_to_weights(creation_sequence):
# Now scale weights
if prev == "d":
w += 1
- wscale = 1.0 / float(w)
+ wscale = 1 / w
return [ww * wscale for ww in wseq]
# return wseq
@@ -492,7 +492,7 @@ def cluster_sequence(creation_sequence):
cseq.append(0)
continue
max_size = (deg * (deg - 1)) // 2
- cseq.append(float(tri) / float(max_size))
+ cseq.append(tri / max_size)
return cseq
@@ -521,7 +521,7 @@ def density(creation_sequence):
N = len(creation_sequence)
two_size = sum(degree_sequence(creation_sequence))
two_possible = N * (N - 1)
- den = two_size / float(two_possible)
+ den = two_size / two_possible
return den
@@ -556,7 +556,7 @@ def degree_correlation(creation_sequence):
if numer == 0:
return 1
raise ValueError(f"Zero Denominator but Numerator is {numer}")
- return numer / float(denom)
+ return numer / denom
def shortest_path(creation_sequence, u, v):
diff --git a/networkx/drawing/layout.py b/networkx/drawing/layout.py
index c3d37d78..23381a24 100644
--- a/networkx/drawing/layout.py
+++ b/networkx/drawing/layout.py
@@ -530,7 +530,7 @@ def _fruchterman_reingold(
t = max(max(pos.T[0]) - min(pos.T[0]), max(pos.T[1]) - min(pos.T[1])) * 0.1
# simple cooling scheme.
# linearly step down by dt on each iteration so last iteration is size dt.
- dt = t / float(iterations + 1)
+ dt = t / (iterations + 1)
delta = np.zeros((pos.shape[0], pos.shape[0], pos.shape[1]), dtype=A.dtype)
# the inscrutable (but fast) version
# this is still O(V^2)
@@ -602,7 +602,7 @@ def _sparse_fruchterman_reingold(
t = max(max(pos.T[0]) - min(pos.T[0]), max(pos.T[1]) - min(pos.T[1])) * 0.1
# simple cooling scheme.
# linearly step down by dt on each iteration so last iteration is size dt.
- dt = t / float(iterations + 1)
+ dt = t / (iterations + 1)
displacement = np.zeros((dim, nnodes))
for iteration in range(iterations):
diff --git a/networkx/generators/community.py b/networkx/generators/community.py
index 2c03f0e7..44db7537 100644
--- a/networkx/generators/community.py
+++ b/networkx/generators/community.py
@@ -367,7 +367,7 @@ def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=N
assigned = 0
sizes = []
while True:
- size = int(seed.gauss(s, float(s) / v + 0.5))
+ size = int(seed.gauss(s, s / v + 0.5))
if size < 1: # how to handle 0 or negative sizes?
continue
if assigned + size >= n:
diff --git a/networkx/generators/degree_seq.py b/networkx/generators/degree_seq.py
index 46244f0e..f442bed0 100644
--- a/networkx/generators/degree_seq.py
+++ b/networkx/generators/degree_seq.py
@@ -428,7 +428,7 @@ def expected_degree_graph(w, seed=None, selfloops=True):
while v < n and p > 0:
if p != 1:
r = seed.random()
- v += int(math.floor(math.log(r, 1 - p)))
+ v += math.floor(math.log(r, 1 - p))
if v < n:
q = min(seq[v] * factor, 1)
if seed.random() < q / p:
@@ -805,7 +805,7 @@ class DegreeSequenceRandomGraph:
def q(self, u, v):
# remaining degree probability
- norm = float(max(self.remaining_degree.values())) ** 2
+ norm = max(self.remaining_degree.values()) ** 2
return self.remaining_degree[u] * self.remaining_degree[v] / norm
def suitable_edge(self):
diff --git a/networkx/generators/internet_as_graphs.py b/networkx/generators/internet_as_graphs.py
index a2bb16e8..c3c12788 100644
--- a/networkx/generators/internet_as_graphs.py
+++ b/networkx/generators/internet_as_graphs.py
@@ -25,7 +25,7 @@ def uniform_int_from_avg(a, m, seed):
assert m >= a
b = 2 * m - a
p = (b - floor(b)) / 2
- X1 = int(round(seed.random() * (floor(b) - a) + a))
+ X1 = round(seed.random() * (floor(b) - a) + a)
if seed.random() < p:
X2 = 1
else:
@@ -94,9 +94,9 @@ class AS_graph_generator:
"""
self.seed = seed
- self.n_t = min(n, int(round(self.seed.random() * 2 + 4))) # num of T nodes
- self.n_m = int(round(0.15 * n)) # number of M nodes
- self.n_cp = int(round(0.05 * n)) # number of CP nodes
+ self.n_t = min(n, round(self.seed.random() * 2 + 4)) # num of T nodes
+ self.n_m = round(0.15 * n) # number of M nodes
+ self.n_cp = round(0.05 * n) # number of CP nodes
self.n_c = max(0, n - self.n_t - self.n_m - self.n_cp) # number of C nodes
self.d_m = 2 + (2.5 * n) / 10000 # average multihoming degree for M nodes
diff --git a/networkx/generators/joint_degree_seq.py b/networkx/generators/joint_degree_seq.py
index 807a68c4..7ab7c76c 100644
--- a/networkx/generators/joint_degree_seq.py
+++ b/networkx/generators/joint_degree_seq.py
@@ -352,7 +352,7 @@ def is_valid_directed_joint_degree(in_degrees, out_degrees, nkk):
return False
for s in S:
- if not float(S[s]) / s[0] == V[s]: # condition 2
+ if not S[s] / s[0] == V[s]: # condition 2
return False
# if all conditions abive have been satisfied then the input nkk is
diff --git a/networkx/generators/random_graphs.py b/networkx/generators/random_graphs.py
index 9fabda30..d75bbd3a 100644
--- a/networkx/generators/random_graphs.py
+++ b/networkx/generators/random_graphs.py
@@ -1210,12 +1210,12 @@ def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100):
# get trial sequence
z = nx.utils.powerlaw_sequence(n, exponent=gamma, seed=seed)
# round to integer values in the range [0,n]
- zseq = [min(n, max(int(round(s)), 0)) for s in z]
+ zseq = [min(n, max(round(s), 0)) for s in z]
# another sequence to swap values from
z = nx.utils.powerlaw_sequence(tries, exponent=gamma, seed=seed)
# round to integer values in the range [0,n]
- swap = [min(n, max(int(round(s)), 0)) for s in z]
+ swap = [min(n, max(round(s), 0)) for s in z]
for deg in swap:
# If this degree sequence can be the degree sequence of a tree, return
@@ -1311,6 +1311,6 @@ def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None):
if kernel_integral(i / n, j / n, 1) <= r:
i, j = i + 1, i + 1
else:
- j = int(math.ceil(n * kernel_root(i / n, j / n, r)))
+ j = math.ceil(n * kernel_root(i / n, j / n, r))
graph.add_edge(i - 1, j - 1)
return graph
diff --git a/networkx/generators/spectral_graph_forge.py b/networkx/generators/spectral_graph_forge.py
index bc3508e5..e46851ec 100644
--- a/networkx/generators/spectral_graph_forge.py
+++ b/networkx/generators/spectral_graph_forge.py
@@ -87,7 +87,7 @@ def spectral_graph_forge(G, alpha, transformation="identity", seed=None):
alpha = np.clip(alpha, 0, 1)
A = nx.to_numpy_array(G)
n = A.shape[1]
- level = int(round(n * alpha))
+ level = round(n * alpha)
if transformation not in available_transformations:
msg = f"{transformation!r} is not a valid transformation. "
diff --git a/networkx/generators/tests/test_internet_as_graphs.py b/networkx/generators/tests/test_internet_as_graphs.py
index 08e54dd7..a3b14e81 100644
--- a/networkx/generators/tests/test_internet_as_graphs.py
+++ b/networkx/generators/tests/test_internet_as_graphs.py
@@ -82,8 +82,8 @@ class TestInternetASTopology:
def test_node_numbers(self):
assert len(self.G.nodes()) == self.n
assert len(self.T) < 7
- assert len(self.M) == int(round(self.n * 0.15))
- assert len(self.CP) == int(round(self.n * 0.05))
+ assert len(self.M) == round(self.n * 0.15)
+ assert len(self.CP) == round(self.n * 0.05)
numb = self.n - len(self.T) - len(self.M) - len(self.CP)
assert len(self.C) == numb
diff --git a/networkx/utils/random_sequence.py b/networkx/utils/random_sequence.py
index ac60b1f3..79372688 100644
--- a/networkx/utils/random_sequence.py
+++ b/networkx/utils/random_sequence.py
@@ -99,7 +99,7 @@ def cumulative_distribution(distribution):
"""Returns normalized cumulative distribution from discrete distribution."""
cdf = [0.0]
- psum = float(sum(distribution))
+ psum = sum(distribution)
for i in range(0, len(distribution)):
cdf.append(cdf[i] + distribution[i] / psum)
return cdf