summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGiampaolo Rodola <g.rodola@gmail.com>2017-05-12 16:27:52 +0200
committerGiampaolo Rodola <g.rodola@gmail.com>2017-05-12 16:27:52 +0200
commit971d4ebcb0f8991f3eafdaf543589298f19dcf93 (patch)
treef296fa709204c2c603c73078e6e0144b1b6504d5
parent3775929b7b6dc802abd32028f17585f82fbb12be (diff)
downloadpsutil-971d4ebcb0f8991f3eafdaf543589298f19dcf93.tar.gz
check broken links: also inspect py files
-rw-r--r--Makefile2
-rw-r--r--psutil/_pslinux.py3
-rwxr-xr-xscripts/internal/check_broken_links.py29
3 files changed, 30 insertions, 4 deletions
diff --git a/Makefile b/Makefile
index 31b84d1b..8b40d8c2 100644
--- a/Makefile
+++ b/Makefile
@@ -296,4 +296,4 @@ doc:
# check whether the links mentioned in some files are valid.
check-broken-links:
- git ls-files | grep \\.rst$ | xargs $(PYTHON) -Wa scripts/internal/check_broken_links.py
+ git ls-files | xargs $(PYTHON) -Wa scripts/internal/check_broken_links.py
diff --git a/psutil/_pslinux.py b/psutil/_pslinux.py
index 7c075f41..92e6c22b 100644
--- a/psutil/_pslinux.py
+++ b/psutil/_pslinux.py
@@ -319,7 +319,8 @@ except Exception:
def calculate_avail_vmem(mems):
"""Fallback for kernels < 3.14 where /proc/meminfo does not provide
- "MemAvailable:" column (see: https://blog.famzah.net/2014/09/24/).
+ "MemAvailable:" column, see:
+ https://blog.famzah.net/2014/09/24/
This code reimplements the algorithm outlined here:
https://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/
commit/?id=34e431b0ae398fc54ea69ff85ec700722c9da773
diff --git a/scripts/internal/check_broken_links.py b/scripts/internal/check_broken_links.py
index cd9875da..d01db28f 100755
--- a/scripts/internal/check_broken_links.py
+++ b/scripts/internal/check_broken_links.py
@@ -53,7 +53,7 @@ import requests
HERE = os.path.abspath(os.path.dirname(__file__))
REGEX = r'(?:http|ftp|https)?://' \
r'(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
-REQUEST_TIMEOUT = 30
+REQUEST_TIMEOUT = 10
# There are some status codes sent by websites on HEAD request.
# Like 503 by Microsoft, and 401 by Apple
# They need to be sent GET request
@@ -66,16 +66,41 @@ def get_urls_rst(filename, _regex=re.compile(REGEX)):
urls = _regex.findall(text)
# remove duplicates, list for sets are not iterable
urls = list(set(urls))
+ # HISTORY file has a lot of dead links.
+ if filename == 'HISTORY.rst':
+ urls = [
+ x for x in urls if
+ not x.startswith('https://github.com/giampaolo/psutil/issues/')]
# correct urls which are between < and/or >
for i, url in enumerate(urls):
urls[i] = re.sub("[\*<>\(\)\)]", '', url)
return urls
+def get_urls_py(filename, _regex=re.compile(REGEX)):
+ with open(filename) as f:
+ lines = f.readlines()
+ urls = set()
+ for i, line in enumerate(lines):
+ line = line.strip()
+ match = _regex.findall(line)
+ if match:
+ url = match[0]
+ if line.startswith('# '):
+ nextline = lines[i + 1].strip()
+ if re.match('^# .+', nextline):
+ url += nextline[1:].strip()
+ url = re.sub("[\*<>\(\)\)]", '', url)
+ urls.add(url)
+ return urls
+
+
def get_urls(filename):
"""Extracts all URLs available in specified filename."""
if filename.endswith('.rst'):
return get_urls_rst(filename)
+ elif filename.endswith('.py'):
+ return get_urls_py(filename)
else:
return []
@@ -145,7 +170,7 @@ def main():
else:
for fail in fails:
fname, url = fail
- print("%s : %s " % (url, fname))
+ print("%-30s: %s " % (fname, url))
print('-' * 20)
print("total: %s fails!" % len(fails))
sys.exit(1)