summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNeetu Mehta <aryacollection.hsp@gmail.com>2023-02-17 16:43:15 +0530
committerNeetu Mehta <aryacollection.hsp@gmail.com>2023-02-17 16:43:15 +0530
commit079ed5d4a36a7bf14d5586242650d01ec2b642bc (patch)
tree7d9013e811a4cab7969291e31ab3ec9b99189612
parentf782145cb1b11c9f1da86668c3761f98a7d5ea1c (diff)
downloadlibrsvg-079ed5d4a36a7bf14d5586242650d01ec2b642bc.tar.gz
Complete run-benchmarks.py script
-rwxr-xr-xbenchmark.py79
-rwxr-xr-x[-rw-r--r--]benchmarks/run-benchmarks.py80
2 files changed, 77 insertions, 82 deletions
diff --git a/benchmark.py b/benchmark.py
deleted file mode 100755
index f68dfc86..00000000
--- a/benchmark.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python3
-
-import os
-import sys
-import tempfile
-import requests
-from subprocess import check_call, check_output, CalledProcessError
-from typing import TextIO
-
-METRICS_URL = "https://librsvg-metrics.fly.dev/api/metrics/"
-
-
-def parse_output_file(file: TextIO):
- """ parse the cachegrind output file for metrics"""
- keys, values = None, None
- for line in file.readlines():
- line = line.strip()
- if line.startswith("events: "):
- keys = line.removeprefix("events: ").split(" ")
- if line.startswith("summary: "):
- values = line.removeprefix("summary: ").split(" ")
-
- if keys is None or values is None:
- raise Exception("Couldn't parse cachegrind file, event names or summary metrics not found")
-
- return {k: v for k, v in zip(keys, values)}
-
-
-def get_commit_details():
- """ Get commit details on which benchmarking is run """
- if os.environ.get("CI_COMMIT_SHA") and os.environ.get("CI_COMMIT_TIMESTAMP"):
- return {
- "commit": os.environ["CI_COMMIT_SHA"],
- "time": os.environ["CI_COMMIT_TIMESTAMP"]
- }
-
- commit_hash = check_output(["git", "show", "--format=%cI"]).strip()
- commit_time = check_output(["git", "show", "--format=%H"]).strip()
- return {
- "commit": str(commit_hash),
- "time": str(commit_time)
- }
-
-
-def submit_metrics(data):
- token = os.environ["METRICS_TOKEN"]
- response = requests.post(METRICS_URL, json=data, headers={"Authorization": f"Token {token}"})
- print(response.status_code, response.reason)
-
-
-def run_benchmark(cmd, path):
- command = ["valgrind", "--tool=cachegrind", f"--cachegrind-out-file={path}", *cmd]
- check_call(command)
-
-
-def check_working_tree():
- cmd = ["git", "diff-index", "--quiet", "HEAD"]
- try:
- check_call(cmd)
- except CalledProcessError as e:
- print("git working tree not clean, exiting.")
- raise e
-
-
-def main():
- check_working_tree()
- with tempfile.NamedTemporaryFile("r+") as file:
- run_benchmark(sys.argv[1:], file.name)
-
- metrics = parse_output_file(file)
- metrics["value"] = metrics["Ir"]
-
- metadata = get_commit_details()
- data = metadata | metrics
- submit_metrics(data)
-
-
-if __name__ == "__main__":
- main()
diff --git a/benchmarks/run-benchmarks.py b/benchmarks/run-benchmarks.py
index 7c8f763c..48ce4306 100644..100755
--- a/benchmarks/run-benchmarks.py
+++ b/benchmarks/run-benchmarks.py
@@ -1,17 +1,91 @@
#!/usr/bin/env python3
+import os
+import tempfile
+import requests
+from subprocess import check_call, check_output, CalledProcessError
+from typing import TextIO
+
+
# Benchmark name, Directory with SVGs to render
BENCHMARKS = [
- [ "hicolor-apps", "./hicolor-apps" ],
- [ "symbolic-icons", "../tests/fixtures/reftests/adwaita" ],
+ ["hicolor-apps", "./hicolor-apps"],
+ ["symbolic-icons", "../tests/fixtures/reftests/adwaita"],
]
+METRICS_URL = "https://librsvg-metrics.fly.dev/api/metrics/"
+PATH_TO_RSVG_BENCH = "../target/release/rsvg-bench"
+
+
+def parse_output_file(file: TextIO):
+ """ parse the cachegrind output file for metrics"""
+ keys, values = None, None
+ for line in file.readlines():
+ line = line.strip()
+ if line.startswith("events: "):
+ keys = line.removeprefix("events: ").split(" ")
+ if line.startswith("summary: "):
+ values = line.removeprefix("summary: ").split(" ")
+
+ if keys is None or values is None:
+ raise Exception("Couldn't parse cachegrind file, event names or summary metrics not found")
+
+ return {k: v for k, v in zip(keys, values)}
+
+
+def get_commit_details():
+ """ Get commit details on which benchmarking is run """
+ if os.environ.get("CI_COMMIT_SHA") and os.environ.get("CI_COMMIT_TIMESTAMP"):
+ return {
+ "commit": os.environ["CI_COMMIT_SHA"],
+ "time": os.environ["CI_COMMIT_TIMESTAMP"]
+ }
+
+ commit_hash = check_output(["git", "show", "--format=%cI"]).strip()
+ commit_time = check_output(["git", "show", "--format=%H"]).strip()
+ return {
+ "commit": str(commit_hash),
+ "time": str(commit_time)
+ }
+
+
+def submit_metrics(data):
+ token = os.environ["METRICS_TOKEN"]
+ response = requests.post(METRICS_URL, json=data, headers={"Authorization": f"Token {token}"})
+ response.raise_for_status()
+
+
+def run_with_cachegrind(directory, path):
+ command = ["valgrind", "--tool=cachegrind", f"--cachegrind-out-file={path}", PATH_TO_RSVG_BENCH, directory]
+ check_call(command)
+
+
+def check_working_tree():
+ cmd = ["git", "diff-index", "--quiet", "HEAD"]
+ try:
+ check_call(cmd)
+ except CalledProcessError as e:
+ print("git working tree not clean, exiting.")
+ raise e
+
def run_benchmark(name, directory):
- # FIXME
+ with tempfile.NamedTemporaryFile("r+") as file:
+ run_with_cachegrind(directory, file.name)
+
+ metrics = parse_output_file(file)
+ metrics["value"] = metrics["Ir"]
+ metrics["name"] = name
+
+ metadata = get_commit_details()
+ data = metadata | metrics
+ submit_metrics(data)
+
def main():
+ check_working_tree()
for name, directory in BENCHMARKS:
run_benchmark(name, directory)
+
if __name__ == "__main__":
main()