summaryrefslogtreecommitdiff
path: root/bin
diff options
context:
space:
mode:
authorDavid Heidelberg <david.heidelberg@collabora.com>2022-12-17 17:34:36 +0100
committerDavid Heidelberg <david.heidelberg@collabora.com>2022-12-19 16:36:19 +0100
commita04720dc1c16bee72a18609069387df0862f00bd (patch)
tree2e63ea71908296bfbc3f379b3398a00b1cd17e59 /bin
parent07f2c862e0517038b31ac2b521b191fcc9803186 (diff)
downloadmesa-a04720dc1c16bee72a18609069387df0862f00bd.tar.gz
ci: move the CI handling scripts into bin/ci
Any change in .gitlab-ci/ directory triggers an entire CI run, which is unnecessary for just changing the developers' scripts. Added symlink to `.gitlab-ci/bin` to keep original invocation compatibility. Reviewed-by: Martin Roukala <martin.roukala@mupuf.org> Reviewed-by: Guilherme Gallo <guilherme.gallo@collabora.com> Signed-off-by: David Heidelberg <david.heidelberg@collabora.com> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/20371>
Diffstat (limited to 'bin')
-rw-r--r--bin/ci/.gitignore2
-rwxr-xr-xbin/ci/ci_run_n_monitor.py308
-rwxr-xr-xbin/ci/download_gl_schema.sh11
-rw-r--r--bin/ci/gitlab_common.py42
-rwxr-xr-xbin/ci/gitlab_gql.py303
-rw-r--r--bin/ci/job_details.gql7
-rw-r--r--bin/ci/pipeline_details.gql86
-rw-r--r--bin/ci/requirements.txt8
-rwxr-xr-xbin/ci/update_traces_checksum.py142
9 files changed, 909 insertions, 0 deletions
diff --git a/bin/ci/.gitignore b/bin/ci/.gitignore
new file mode 100644
index 00000000000..9739d32aaae
--- /dev/null
+++ b/bin/ci/.gitignore
@@ -0,0 +1,2 @@
+schema.graphql
+gitlab_gql.py.cache.db
diff --git a/bin/ci/ci_run_n_monitor.py b/bin/ci/ci_run_n_monitor.py
new file mode 100755
index 00000000000..3e497e6dec0
--- /dev/null
+++ b/bin/ci/ci_run_n_monitor.py
@@ -0,0 +1,308 @@
+#!/usr/bin/env python3
+# Copyright © 2020 - 2022 Collabora Ltd.
+# Authors:
+# Tomeu Vizoso <tomeu.vizoso@collabora.com>
+# David Heidelberg <david.heidelberg@collabora.com>
+#
+# SPDX-License-Identifier: MIT
+
+"""
+Helper script to restrict running only required CI jobs
+and show the job(s) logs.
+"""
+
+import argparse
+import re
+from subprocess import check_output
+import sys
+import time
+from concurrent.futures import ThreadPoolExecutor
+from functools import partial
+from itertools import chain
+from typing import Optional
+
+import gitlab
+from colorama import Fore, Style
+from gitlab_common import get_gitlab_project, read_token, wait_for_pipeline
+from gitlab_gql import GitlabGQL, create_job_needs_dag, filter_dag, print_dag
+
+REFRESH_WAIT_LOG = 10
+REFRESH_WAIT_JOBS = 6
+
+URL_START = "\033]8;;"
+URL_END = "\033]8;;\a"
+
+STATUS_COLORS = {
+ "created": "",
+ "running": Fore.BLUE,
+ "success": Fore.GREEN,
+ "failed": Fore.RED,
+ "canceled": Fore.MAGENTA,
+ "manual": "",
+ "pending": "",
+ "skipped": "",
+}
+
+COMPLETED_STATUSES = ["success", "failed"]
+
+
+def print_job_status(job) -> None:
+ """It prints a nice, colored job status with a link to the job."""
+ if job.status == "canceled":
+ return
+
+ print(
+ STATUS_COLORS[job.status]
+ + "🞋 job "
+ + URL_START
+ + f"{job.web_url}\a{job.name}"
+ + URL_END
+ + f" :: {job.status}"
+ + Style.RESET_ALL
+ )
+
+
+def print_job_status_change(job) -> None:
+ """It reports job status changes."""
+ if job.status == "canceled":
+ return
+
+ print(
+ STATUS_COLORS[job.status]
+ + "🗘 job "
+ + URL_START
+ + f"{job.web_url}\a{job.name}"
+ + URL_END
+ + f" has new status: {job.status}"
+ + Style.RESET_ALL
+ )
+
+
+def pretty_wait(sec: int) -> None:
+ """shows progressbar in dots"""
+ for val in range(sec, 0, -1):
+ print(f"⏲ {val} seconds", end="\r")
+ time.sleep(1)
+
+
+def monitor_pipeline(
+ project,
+ pipeline,
+ target_job: Optional[str],
+ dependencies,
+ force_manual: bool,
+ stress: bool,
+) -> tuple[Optional[int], Optional[int]]:
+ """Monitors pipeline and delegate canceling jobs"""
+ statuses = {}
+ target_statuses = {}
+ stress_succ = 0
+ stress_fail = 0
+
+ if target_job:
+ target_jobs_regex = re.compile(target_job.strip())
+
+ while True:
+ to_cancel = []
+ for job in pipeline.jobs.list(all=True, sort="desc"):
+ # target jobs
+ if target_job and target_jobs_regex.match(job.name):
+ if force_manual and job.status == "manual":
+ enable_job(project, job, True)
+
+ if stress and job.status in ["success", "failed"]:
+ if job.status == "success":
+ stress_succ += 1
+ if job.status == "failed":
+ stress_fail += 1
+ retry_job(project, job)
+
+ if (job.id not in target_statuses) or (
+ job.status not in target_statuses[job.id]
+ ):
+ print_job_status_change(job)
+ target_statuses[job.id] = job.status
+ else:
+ print_job_status(job)
+
+ continue
+
+ # all jobs
+ if (job.id not in statuses) or (job.status not in statuses[job.id]):
+ print_job_status_change(job)
+ statuses[job.id] = job.status
+
+ # dependencies and cancelling the rest
+ if job.name in dependencies:
+ if job.status == "manual":
+ enable_job(project, job, False)
+
+ elif target_job and job.status not in [
+ "canceled",
+ "success",
+ "failed",
+ "skipped",
+ ]:
+ to_cancel.append(job)
+
+ if target_job:
+ cancel_jobs(project, to_cancel)
+
+ if stress:
+ print(
+ "∑ succ: " + str(stress_succ) + "; fail: " + str(stress_fail),
+ flush=False,
+ )
+ pretty_wait(REFRESH_WAIT_JOBS)
+ continue
+
+ print("---------------------------------", flush=False)
+
+ if len(target_statuses) == 1 and {"running"}.intersection(
+ target_statuses.values()
+ ):
+ return next(iter(target_statuses)), None
+
+ if {"failed", "canceled"}.intersection(target_statuses.values()):
+ return None, 1
+
+ if {"success", "manual"}.issuperset(target_statuses.values()):
+ return None, 0
+
+ pretty_wait(REFRESH_WAIT_JOBS)
+
+
+def enable_job(project, job, target: bool) -> None:
+ """enable manual job"""
+ pjob = project.jobs.get(job.id, lazy=True)
+ pjob.play()
+ if target:
+ jtype = "🞋 "
+ else:
+ jtype = "(dependency)"
+ print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL)
+
+
+def retry_job(project, job) -> None:
+ """retry job"""
+ pjob = project.jobs.get(job.id, lazy=True)
+ pjob.retry()
+ jtype = "↻"
+ print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL)
+
+
+def cancel_job(project, job) -> None:
+ """Cancel GitLab job"""
+ pjob = project.jobs.get(job.id, lazy=True)
+ pjob.cancel()
+ print(f"♲ {job.name}")
+
+
+def cancel_jobs(project, to_cancel) -> None:
+ """Cancel unwanted GitLab jobs"""
+ if not to_cancel:
+ return
+
+ with ThreadPoolExecutor(max_workers=6) as exe:
+ part = partial(cancel_job, project)
+ exe.map(part, to_cancel)
+
+
+def print_log(project, job_id) -> None:
+ """Print job log into output"""
+ printed_lines = 0
+ while True:
+ job = project.jobs.get(job_id)
+
+ # GitLab's REST API doesn't offer pagination for logs, so we have to refetch it all
+ lines = job.trace().decode("raw_unicode_escape").splitlines()
+ for line in lines[printed_lines:]:
+ print(line)
+ printed_lines = len(lines)
+
+ if job.status in COMPLETED_STATUSES:
+ print(Fore.GREEN + f"Job finished: {job.web_url}" + Style.RESET_ALL)
+ return
+ pretty_wait(REFRESH_WAIT_LOG)
+
+
+def parse_args() -> None:
+ """Parse args"""
+ parser = argparse.ArgumentParser(
+ description="Tool to trigger a subset of container jobs "
+ + "and monitor the progress of a test job",
+ epilog="Example: mesa-monitor.py --rev $(git rev-parse HEAD) "
+ + '--target ".*traces" ',
+ )
+ parser.add_argument("--target", metavar="target-job", help="Target job")
+ parser.add_argument(
+ "--rev", metavar="revision", help="repository git revision (default: HEAD)"
+ )
+ parser.add_argument(
+ "--token",
+ metavar="token",
+ help="force GitLab token, otherwise it's read from ~/.config/gitlab-token",
+ )
+ parser.add_argument(
+ "--force-manual", action="store_true", help="Force jobs marked as manual"
+ )
+ parser.add_argument("--stress", action="store_true", help="Stresstest job(s)")
+ return parser.parse_args()
+
+
+def find_dependencies(target_job: str, project_path: str, sha: str) -> set[str]:
+ gql_instance = GitlabGQL()
+ dag, _ = create_job_needs_dag(
+ gql_instance, {"projectPath": project_path.path_with_namespace, "sha": sha}
+ )
+
+ target_dep_dag = filter_dag(dag, target_job)
+ if not target_dep_dag:
+ print(Fore.RED + "The job(s) were not found in the pipeline." + Fore.RESET)
+ sys.exit(1)
+ print(Fore.YELLOW)
+ print("Detected job dependencies:")
+ print()
+ print_dag(target_dep_dag)
+ print(Fore.RESET)
+ return set(chain.from_iterable(target_dep_dag.values()))
+
+
+if __name__ == "__main__":
+ try:
+ t_start = time.perf_counter()
+
+ args = parse_args()
+
+ token = read_token(args.token)
+
+ gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org", private_token=token)
+
+ cur_project = get_gitlab_project(gl, "mesa")
+
+ REV: str = args.rev
+ if not REV:
+ REV = check_output(['git', 'rev-parse', 'HEAD']).decode('ascii').strip()
+ print(f"Revision: {REV}")
+ pipe = wait_for_pipeline(cur_project, REV)
+ print(f"Pipeline: {pipe.web_url}")
+ deps = set()
+ if args.target:
+ print("🞋 job: " + Fore.BLUE + args.target + Style.RESET_ALL)
+ deps = find_dependencies(
+ target_job=args.target, sha=REV, project_path=cur_project
+ )
+ target_job_id, ret = monitor_pipeline(
+ cur_project, pipe, args.target, deps, args.force_manual, args.stress
+ )
+
+ if target_job_id:
+ print_log(cur_project, target_job_id)
+
+ t_end = time.perf_counter()
+ spend_minutes = (t_end - t_start) / 60
+ print(f"⏲ Duration of script execution: {spend_minutes:0.1f} minutes")
+
+ sys.exit(ret)
+ except KeyboardInterrupt:
+ sys.exit(1)
diff --git a/bin/ci/download_gl_schema.sh b/bin/ci/download_gl_schema.sh
new file mode 100755
index 00000000000..41858df4c12
--- /dev/null
+++ b/bin/ci/download_gl_schema.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+# Helper script to download the schema GraphQL from Gitlab to enable IDEs to
+# assist the developer to edit gql files
+
+SOURCE_DIR=$(dirname "$(realpath "$0")")
+
+(
+ cd $SOURCE_DIR || exit 1
+ gql-cli https://gitlab.freedesktop.org/api/graphql --print-schema > schema.graphql
+)
diff --git a/bin/ci/gitlab_common.py b/bin/ci/gitlab_common.py
new file mode 100644
index 00000000000..85313cc1deb
--- /dev/null
+++ b/bin/ci/gitlab_common.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+# Copyright © 2020 - 2022 Collabora Ltd.
+# Authors:
+# Tomeu Vizoso <tomeu.vizoso@collabora.com>
+# David Heidelberg <david.heidelberg@collabora.com>
+#
+# SPDX-License-Identifier: MIT
+'''Shared functions between the scripts.'''
+
+import os
+import time
+from typing import Optional
+
+
+def get_gitlab_project(glab, name: str):
+ """Finds a specified gitlab project for given user"""
+ glab.auth()
+ username = glab.user.username
+ return glab.projects.get(f"{username}/mesa")
+
+
+def read_token(token_arg: Optional[str]) -> str:
+ """pick token from args or file"""
+ if token_arg:
+ return token_arg
+ return (
+ open(os.path.expanduser("~/.config/gitlab-token"), encoding="utf-8")
+ .readline()
+ .rstrip()
+ )
+
+
+def wait_for_pipeline(project, sha: str):
+ """await until pipeline appears in Gitlab"""
+ print("⏲ for the pipeline to appear..", end="")
+ while True:
+ pipelines = project.pipelines.list(sha=sha)
+ if pipelines:
+ print("", flush=True)
+ return pipelines[0]
+ print("", end=".", flush=True)
+ time.sleep(1)
diff --git a/bin/ci/gitlab_gql.py b/bin/ci/gitlab_gql.py
new file mode 100755
index 00000000000..bd58f320b42
--- /dev/null
+++ b/bin/ci/gitlab_gql.py
@@ -0,0 +1,303 @@
+#!/usr/bin/env python3
+
+import re
+from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace
+from dataclasses import dataclass, field
+from os import getenv
+from pathlib import Path
+from typing import Any, Iterable, Optional, Pattern, Union
+
+import yaml
+from filecache import DAY, filecache
+from gql import Client, gql
+from gql.transport.aiohttp import AIOHTTPTransport
+from graphql import DocumentNode
+
+Dag = dict[str, list[str]]
+TOKEN_DIR = Path(getenv("XDG_CONFIG_HOME") or Path.home() / ".config")
+
+
+def get_token_from_default_dir() -> str:
+ try:
+ token_file = TOKEN_DIR / "gitlab-token"
+ return token_file.resolve()
+ except FileNotFoundError as ex:
+ print(
+ f"Could not find {token_file}, please provide a token file as an argument"
+ )
+ raise ex
+
+
+def get_project_root_dir():
+ root_path = Path(__file__).parent.parent.parent.resolve()
+ gitlab_file = root_path / ".gitlab-ci.yml"
+ assert gitlab_file.exists()
+
+ return root_path
+
+
+@dataclass
+class GitlabGQL:
+ _transport: Any = field(init=False)
+ client: Client = field(init=False)
+ url: str = "https://gitlab.freedesktop.org/api/graphql"
+ token: Optional[str] = None
+
+ def __post_init__(self):
+ self._setup_gitlab_gql_client()
+
+ def _setup_gitlab_gql_client(self) -> Client:
+ # Select your transport with a defined url endpoint
+ headers = {}
+ if self.token:
+ headers["Authorization"] = f"Bearer {self.token}"
+ self._transport = AIOHTTPTransport(url=self.url, headers=headers)
+
+ # Create a GraphQL client using the defined transport
+ self.client = Client(
+ transport=self._transport, fetch_schema_from_transport=True
+ )
+
+ @filecache(DAY)
+ def query(
+ self, gql_file: Union[Path, str], params: dict[str, Any]
+ ) -> dict[str, Any]:
+ # Provide a GraphQL query
+ source_path = Path(__file__).parent
+ pipeline_query_file = source_path / gql_file
+
+ query: DocumentNode
+ with open(pipeline_query_file, "r") as f:
+ pipeline_query = f.read()
+ query = gql(pipeline_query)
+
+ # Execute the query on the transport
+ return self.client.execute(query, variable_values=params)
+
+ def invalidate_query_cache(self):
+ self.query._db.clear()
+
+
+def create_job_needs_dag(
+ gl_gql: GitlabGQL, params
+) -> tuple[Dag, dict[str, dict[str, Any]]]:
+
+ result = gl_gql.query("pipeline_details.gql", params)
+ dag = {}
+ jobs = {}
+ pipeline = result["project"]["pipeline"]
+ if not pipeline:
+ raise RuntimeError(f"Could not find any pipelines for {params}")
+
+ for stage in pipeline["stages"]["nodes"]:
+ for stage_job in stage["groups"]["nodes"]:
+ for job in stage_job["jobs"]["nodes"]:
+ needs = job.pop("needs")["nodes"]
+ jobs[job["name"]] = job
+ dag[job["name"]] = {node["name"] for node in needs}
+
+ for job, needs in dag.items():
+ needs: set
+ partial = True
+
+ while partial:
+ next_depth = {n for dn in needs for n in dag[dn]}
+ partial = not needs.issuperset(next_depth)
+ needs = needs.union(next_depth)
+
+ dag[job] = needs
+
+ return dag, jobs
+
+
+def filter_dag(dag: Dag, regex: Pattern) -> Dag:
+ return {job: needs for job, needs in dag.items() if re.match(regex, job)}
+
+
+def print_dag(dag: Dag) -> None:
+ for job, needs in dag.items():
+ print(f"{job}:")
+ print(f"\t{' '.join(needs)}")
+ print()
+
+
+def fetch_merged_yaml(gl_gql: GitlabGQL, params) -> dict[Any]:
+ gitlab_yml_file = get_project_root_dir() / ".gitlab-ci.yml"
+ content = Path(gitlab_yml_file).read_text().strip()
+ params["content"] = content
+ raw_response = gl_gql.query("job_details.gql", params)
+ if merged_yaml := raw_response["ciConfig"]["mergedYaml"]:
+ return yaml.safe_load(merged_yaml)
+
+ gl_gql.invalidate_query_cache()
+ raise ValueError(
+ """
+ Could not fetch any content for merged YAML,
+ please verify if the git SHA exists in remote.
+ Maybe you forgot to `git push`? """
+ )
+
+
+def recursive_fill(job, relationship_field, target_data, acc_data: dict, merged_yaml):
+ if relatives := job.get(relationship_field):
+ if isinstance(relatives, str):
+ relatives = [relatives]
+
+ for relative in relatives:
+ parent_job = merged_yaml[relative]
+ acc_data = recursive_fill(parent_job, acc_data, merged_yaml)
+
+ acc_data |= job.get(target_data, {})
+
+ return acc_data
+
+
+def get_variables(job, merged_yaml, project_path, sha) -> dict[str, str]:
+ p = get_project_root_dir() / ".gitlab-ci" / "image-tags.yml"
+ image_tags = yaml.safe_load(p.read_text())
+
+ variables = image_tags["variables"]
+ variables |= merged_yaml["variables"]
+ variables |= job["variables"]
+ variables["CI_PROJECT_PATH"] = project_path
+ variables["CI_PROJECT_NAME"] = project_path.split("/")[1]
+ variables["CI_REGISTRY_IMAGE"] = "registry.freedesktop.org/${CI_PROJECT_PATH}"
+ variables["CI_COMMIT_SHA"] = sha
+
+ while recurse_among_variables_space(variables):
+ pass
+
+ return variables
+
+
+# Based on: https://stackoverflow.com/a/2158532/1079223
+def flatten(xs):
+ for x in xs:
+ if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
+ yield from flatten(x)
+ else:
+ yield x
+
+
+def get_full_script(job) -> list[str]:
+ script = []
+ for script_part in ("before_script", "script", "after_script"):
+ script.append(f"# {script_part}")
+ lines = flatten(job.get(script_part, []))
+ script.extend(lines)
+ script.append("")
+
+ return script
+
+
+def recurse_among_variables_space(var_graph) -> bool:
+ updated = False
+ for var, value in var_graph.items():
+ value = str(value)
+ dep_vars = []
+ if match := re.findall(r"(\$[{]?[\w\d_]*[}]?)", value):
+ all_dep_vars = [v.lstrip("${").rstrip("}") for v in match]
+ # print(value, match, all_dep_vars)
+ dep_vars = [v for v in all_dep_vars if v in var_graph]
+
+ for dep_var in dep_vars:
+ dep_value = str(var_graph[dep_var])
+ new_value = var_graph[var]
+ new_value = new_value.replace(f"${{{dep_var}}}", dep_value)
+ new_value = new_value.replace(f"${dep_var}", dep_value)
+ var_graph[var] = new_value
+ updated |= dep_value != new_value
+
+ return updated
+
+
+def get_job_final_definiton(job_name, merged_yaml, project_path, sha):
+ job = merged_yaml[job_name]
+ variables = get_variables(job, merged_yaml, project_path, sha)
+
+ print("# --------- variables ---------------")
+ for var, value in sorted(variables.items()):
+ print(f"export {var}={value!r}")
+
+ # TODO: Recurse into needs to get full script
+ # TODO: maybe create a extra yaml file to avoid too much rework
+ script = get_full_script(job)
+ print()
+ print()
+ print("# --------- full script ---------------")
+ print("\n".join(script))
+
+ if image := variables.get("MESA_IMAGE"):
+ print()
+ print()
+ print("# --------- container image ---------------")
+ print(image)
+
+
+def parse_args() -> Namespace:
+ parser = ArgumentParser(
+ formatter_class=ArgumentDefaultsHelpFormatter,
+ description="CLI and library with utility functions to debug jobs via Gitlab GraphQL",
+ epilog=f"""Example:
+ {Path(__file__).name} --rev $(git rev-parse HEAD) --print-job-dag""",
+ )
+ parser.add_argument("-pp", "--project-path", type=str, default="mesa/mesa")
+ parser.add_argument("--sha", "--rev", type=str, required=True)
+ parser.add_argument(
+ "--regex",
+ type=str,
+ required=False,
+ help="Regex pattern for the job name to be considered",
+ )
+ parser.add_argument("--print-dag", action="store_true", help="Print job needs DAG")
+ parser.add_argument(
+ "--print-merged-yaml",
+ action="store_true",
+ help="Print the resulting YAML for the specific SHA",
+ )
+ parser.add_argument(
+ "--print-job-manifest", type=str, help="Print the resulting job data"
+ )
+ parser.add_argument(
+ "--gitlab-token-file",
+ type=str,
+ default=get_token_from_default_dir(),
+ help="force GitLab token, otherwise it's read from $XDG_CONFIG_HOME/gitlab-token",
+ )
+
+ args = parser.parse_args()
+ args.gitlab_token = Path(args.gitlab_token_file).read_text()
+ return args
+
+
+def main():
+ args = parse_args()
+ gl_gql = GitlabGQL(token=args.gitlab_token)
+
+ if args.print_dag:
+ dag, jobs = create_job_needs_dag(
+ gl_gql, {"projectPath": args.project_path, "sha": args.sha}
+ )
+
+ if args.regex:
+ dag = filter_dag(dag, re.compile(args.regex))
+ print_dag(dag)
+
+ if args.print_merged_yaml:
+ print(
+ fetch_merged_yaml(
+ gl_gql, {"projectPath": args.project_path, "sha": args.sha}
+ )
+ )
+
+ if args.print_job_manifest:
+ merged_yaml = fetch_merged_yaml(
+ gl_gql, {"projectPath": args.project_path, "sha": args.sha}
+ )
+ get_job_final_definiton(
+ args.print_job_manifest, merged_yaml, args.project_path, args.sha
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/bin/ci/job_details.gql b/bin/ci/job_details.gql
new file mode 100644
index 00000000000..8b8e4b2b885
--- /dev/null
+++ b/bin/ci/job_details.gql
@@ -0,0 +1,7 @@
+query getCiConfigData($projectPath: ID!, $sha: String, $content: String!) {
+ ciConfig(projectPath: $projectPath, sha: $sha, content: $content) {
+ errors
+ mergedYaml
+ __typename
+ }
+}
diff --git a/bin/ci/pipeline_details.gql b/bin/ci/pipeline_details.gql
new file mode 100644
index 00000000000..e735ed8d396
--- /dev/null
+++ b/bin/ci/pipeline_details.gql
@@ -0,0 +1,86 @@
+fragment LinkedPipelineData on Pipeline {
+ id
+ iid
+ path
+ cancelable
+ retryable
+ userPermissions {
+ updatePipeline
+ }
+ status: detailedStatus {
+ id
+ group
+ label
+ icon
+ }
+ sourceJob {
+ id
+ name
+ }
+ project {
+ id
+ name
+ fullPath
+ }
+}
+
+query getPipelineDetails($projectPath: ID!, $sha: String!) {
+ project(fullPath: $projectPath) {
+ id
+ pipeline(sha: $sha) {
+ id
+ iid
+ complete
+ downstream {
+ nodes {
+ ...LinkedPipelineData
+ }
+ }
+ upstream {
+ ...LinkedPipelineData
+ }
+ stages {
+ nodes {
+ id
+ name
+ status: detailedStatus {
+ id
+ action {
+ id
+ icon
+ path
+ title
+ }
+ }
+ groups {
+ nodes {
+ id
+ status: detailedStatus {
+ id
+ label
+ group
+ icon
+ }
+ name
+ size
+ jobs {
+ nodes {
+ id
+ name
+ kind
+ scheduledAt
+ needs {
+ nodes {
+ id
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/bin/ci/requirements.txt b/bin/ci/requirements.txt
new file mode 100644
index 00000000000..5b67ec7a1ef
--- /dev/null
+++ b/bin/ci/requirements.txt
@@ -0,0 +1,8 @@
+aiohttp==3.8.1
+colorama==0.4.5
+filecache==0.81
+gql==3.4.0
+python-gitlab==3.5.0
+PyYAML==6.0
+ruamel.yaml.clib==0.2.6
+ruamel.yaml==0.17.21
diff --git a/bin/ci/update_traces_checksum.py b/bin/ci/update_traces_checksum.py
new file mode 100755
index 00000000000..0e9c8437b07
--- /dev/null
+++ b/bin/ci/update_traces_checksum.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python3
+# Copyright © 2022 Collabora Ltd.
+# Authors:
+# David Heidelberg <david.heidelberg@collabora.com>
+#
+# SPDX-License-Identifier: MIT
+
+"""
+Helper script to update traces checksums
+"""
+
+import argparse
+import bz2
+import glob
+import re
+import json
+import sys
+from ruamel.yaml import YAML
+
+import gitlab
+from colorama import Fore, Style
+from gitlab_common import get_gitlab_project, read_token, wait_for_pipeline
+
+
+DESCRIPTION_FILE = "export PIGLIT_REPLAY_DESCRIPTION_FILE='.*/install/(.*)'$"
+DEVICE_NAME = "export PIGLIT_REPLAY_DEVICE_NAME='(.*)'$"
+
+
+def gather_results(
+ project,
+ pipeline,
+) -> None:
+ """Gather results"""
+
+ target_jobs_regex = re.compile(".*-traces([:].*)?$")
+
+ for job in pipeline.jobs.list(all=True, sort="desc"):
+ if target_jobs_regex.match(job.name) and job.status == "failed":
+ cur_job = project.jobs.get(job.id)
+ # get variables
+ print(f"👁 {job.name}...")
+ log: list[str] = cur_job.trace().decode("unicode_escape").splitlines()
+ filename: str = ''
+ dev_name: str = ''
+ for logline in log:
+ desc_file = re.search(DESCRIPTION_FILE, logline)
+ device_name = re.search(DEVICE_NAME, logline)
+ if desc_file:
+ filename = desc_file.group(1)
+ if device_name:
+ dev_name = device_name.group(1)
+
+ if not filename or not dev_name:
+ print(Fore.RED + "Couldn't find device name or YML file in the logs!" + Style.RESET_ALL)
+ return
+
+ print(f"👁 Found {dev_name} and file {filename}")
+
+ # find filename in Mesa source
+ traces_file = glob.glob('./**/' + filename, recursive=True)
+ # write into it
+ with open(traces_file[0], 'r', encoding='utf-8') as target_file:
+ yaml = YAML()
+ yaml.compact(seq_seq=False, seq_map=False)
+ yaml.version = 1,2
+ yaml.width = 2048 # do not break the text fields
+ yaml.default_flow_style = None
+ target = yaml.load(target_file)
+
+ # parse artifact
+ results_json_bz2 = cur_job.artifact(path="results/results.json.bz2", streamed=False)
+ results_json = bz2.decompress(results_json_bz2).decode("utf-8")
+ results = json.loads(results_json)
+
+ for _, value in results["tests"].items():
+ if (
+ not value['images'] or
+ not value['images'][0] or
+ "image_desc" not in value['images'][0]
+ ):
+ continue
+
+ trace: str = value['images'][0]['image_desc']
+ checksum: str = value['images'][0]['checksum_render']
+
+ if not checksum:
+ print(Fore.RED + f"{dev_name}: {trace}: checksum is missing! Crash?" + Style.RESET_ALL)
+ continue
+
+ if checksum == "error":
+ print(Fore.RED + f"{dev_name}: {trace}: crashed" + Style.RESET_ALL)
+ continue
+
+ if target['traces'][trace][dev_name].get('checksum') == checksum:
+ continue
+
+ if "label" in target['traces'][trace][dev_name]:
+ print(f'{dev_name}: {trace}: please verify that label {Fore.BLUE}{target["traces"][trace][dev_name]["label"]}{Style.RESET_ALL} is still valid')
+
+ print(Fore.GREEN + f'{dev_name}: {trace}: checksum updated' + Style.RESET_ALL)
+ target['traces'][trace][dev_name]['checksum'] = checksum
+
+ with open(traces_file[0], 'w', encoding='utf-8') as target_file:
+ yaml.dump(target, target_file)
+
+
+
+def parse_args() -> None:
+ """Parse args"""
+ parser = argparse.ArgumentParser(
+ description="Tool to generate patch from checksums ",
+ epilog="Example: update_traces_checksum.py --rev $(git rev-parse HEAD) "
+ )
+ parser.add_argument(
+ "--rev", metavar="revision", help="repository git revision", required=True
+ )
+ parser.add_argument(
+ "--token",
+ metavar="token",
+ help="force GitLab token, otherwise it's read from ~/.config/gitlab-token",
+ )
+ return parser.parse_args()
+
+
+if __name__ == "__main__":
+ try:
+ args = parse_args()
+
+ token = read_token(args.token)
+
+ gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org", private_token=token)
+
+ cur_project = get_gitlab_project(gl, "mesa")
+
+ print(f"Revision: {args.rev}")
+ pipe = wait_for_pipeline(cur_project, args.rev)
+ print(f"Pipeline: {pipe.web_url}")
+ gather_results(cur_project, pipe)
+
+ sys.exit()
+ except KeyboardInterrupt:
+ sys.exit(1)