summaryrefslogtreecommitdiff
path: root/.gitlab
diff options
context:
space:
mode:
authorNiels De Graef <nielsdegraef@gmail.com>2020-12-03 22:04:14 +0100
committerNiels De Graef <nielsdegraef@gmail.com>2020-12-03 23:18:49 +0000
commita13faa52cee9accd20894ed5fdf9ba6d50a6f66e (patch)
tree9d39496f56124710d2b64239d855e42ed9389d28 /.gitlab
parent804ba7cfb500118dae3abab37e1c157d26debdc1 (diff)
downloadgnome-contacts-a13faa52cee9accd20894ed5fdf9ba6d50a6f66e.tar.gz
ci: Do some basic style checks
Diffstat (limited to '.gitlab')
-rw-r--r--.gitlab/ci/meson-junit-report.py109
-rw-r--r--.gitlab/ci/run-tests.sh17
-rwxr-xr-x.gitlab/ci/style-check.sh77
3 files changed, 203 insertions, 0 deletions
diff --git a/.gitlab/ci/meson-junit-report.py b/.gitlab/ci/meson-junit-report.py
new file mode 100644
index 0000000..248ef6e
--- /dev/null
+++ b/.gitlab/ci/meson-junit-report.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python3
+
+# Turns a Meson testlog.json file into a JUnit XML report
+#
+# Copyright 2019 GNOME Foundation
+#
+# SPDX-License-Identifier: LGPL-2.1-or-later
+#
+# Original author: Emmanuele Bassi
+
+import argparse
+import datetime
+import json
+import os
+import sys
+import xml.etree.ElementTree as ET
+
+aparser = argparse.ArgumentParser(description='Turns a Meson test log into a JUnit report')
+aparser.add_argument('--project-name', metavar='NAME',
+ help='The project name',
+ default='unknown')
+aparser.add_argument('--job-id', metavar='ID',
+ help='The job ID for the report',
+ default='Unknown')
+aparser.add_argument('--branch', metavar='NAME',
+ help='Branch of the project being tested',
+ default='master')
+aparser.add_argument('--output', metavar='FILE',
+ help='The output file, stdout by default',
+ type=argparse.FileType('w', encoding='UTF-8'),
+ default=sys.stdout)
+aparser.add_argument('infile', metavar='FILE',
+ help='The input testlog.json, stdin by default',
+ type=argparse.FileType('r', encoding='UTF-8'),
+ default=sys.stdin)
+
+args = aparser.parse_args()
+
+outfile = args.output
+
+testsuites = ET.Element('testsuites')
+testsuites.set('id', '{}/{}'.format(args.job_id, args.branch))
+testsuites.set('package', args.project_name)
+testsuites.set('timestamp', datetime.datetime.utcnow().isoformat(timespec='minutes'))
+
+suites = {}
+for line in args.infile:
+ data = json.loads(line)
+ (full_suite, unit_name) = data['name'].split(' / ')
+ (project_name, suite_name) = full_suite.split(':')
+
+ duration = data['duration']
+ return_code = data['returncode']
+ log = data['stdout']
+
+ unit = {
+ 'suite': suite_name,
+ 'name': unit_name,
+ 'duration': duration,
+ 'returncode': return_code,
+ 'stdout': log,
+ }
+
+ units = suites.setdefault(suite_name, [])
+ units.append(unit)
+
+for name, units in suites.items():
+ print('Processing suite {} (units: {})'.format(name, len(units)))
+
+ def if_failed(unit):
+ if unit['returncode'] != 0:
+ return True
+ return False
+
+ def if_succeded(unit):
+ if unit['returncode'] == 0:
+ return True
+ return False
+
+ successes = list(filter(if_succeded, units))
+ failures = list(filter(if_failed, units))
+ print(' - {}: {} pass, {} fail'.format(name, len(successes), len(failures)))
+
+ testsuite = ET.SubElement(testsuites, 'testsuite')
+ testsuite.set('name', '{}/{}'.format(args.project_name, name))
+ testsuite.set('tests', str(len(units)))
+ testsuite.set('errors', str(len(failures)))
+ testsuite.set('failures', str(len(failures)))
+
+ for unit in successes:
+ testcase = ET.SubElement(testsuite, 'testcase')
+ testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
+ testcase.set('name', unit['name'])
+ testcase.set('time', str(unit['duration']))
+
+ for unit in failures:
+ testcase = ET.SubElement(testsuite, 'testcase')
+ testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
+ testcase.set('name', unit['name'])
+ testcase.set('time', str(unit['duration']))
+
+ failure = ET.SubElement(testcase, 'failure')
+ failure.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
+ failure.set('name', unit['name'])
+ failure.set('type', 'error')
+ failure.text = unit['stdout']
+
+output = ET.tostring(testsuites, encoding='unicode')
+outfile.write(output)
diff --git a/.gitlab/ci/run-tests.sh b/.gitlab/ci/run-tests.sh
new file mode 100644
index 0000000..b2a3096
--- /dev/null
+++ b/.gitlab/ci/run-tests.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+set +e
+
+xvfb-run -a -s "-screen 0 1024x768x24" \
+ flatpak build app \
+ meson test -C _build
+
+exit_code=$?
+
+python3 .gitlab-ci/meson-junit-report.py \
+ --project-name=gnome-contacts \
+ --job-id "${CI_JOB_NAME}" \
+ --output "_build/${CI_JOB_NAME}-report.xml" \
+ _build/meson-logs/testlog.json
+
+exit $exit_code
diff --git a/.gitlab/ci/style-check.sh b/.gitlab/ci/style-check.sh
new file mode 100755
index 0000000..f0997ad
--- /dev/null
+++ b/.gitlab/ci/style-check.sh
@@ -0,0 +1,77 @@
+#!/usr/bin/env bash
+
+###############################################################################
+# JUNIT HELPERS
+###############################################################################
+
+JUNIT_REPORT_TESTS_FILE=$(mktemp)
+
+# We need this to make sure we don't send funky stuff into the XML report
+function escape_xml() {
+ echo "$1" | sed -e 's/&/\&amp;/g; s/</\&lt;/g; s/>/\&gt;/g; s/"/\&quot;/g; s/'"'"'/\&#39;/g'
+}
+
+function append_failed_test_case() {
+ test_name="$1"
+ test_message="$2"
+
+ test_message_esc="$(escape_xml "$test_message")"
+ echo "<testcase name=\"$test_name\"><failure message=\"$test_message_esc\"/></testcase>" >> $JUNIT_REPORT_TESTS_FILE
+ echo >&2 "Test '$test_name' failed: $test_message"
+}
+
+function append_passed_test_case() {
+ test_name="$1"
+ commit="$2"
+
+ echo "<testcase name=\"$test_name\"></testcase>" >> $JUNIT_REPORT_TESTS_FILE
+}
+
+function generate_junit_report() {
+ junit_report_file="$1"
+ num_tests=$(cat "$JUNIT_REPORT_TESTS_FILE" | wc -l)
+ num_failures=$(grep '<failure' "$JUNIT_REPORT_TESTS_FILE" | wc -l )
+
+ echo Generating JUnit report \"$(pwd)/$junit_report_file\" with $num_tests tests and $num_failures failures.
+
+ cat > $junit_report_file << __EOF__
+<?xml version="1.0" encoding="utf-8"?>
+<testsuites tests="$num_tests" errors="0" failures="$num_failures">
+<testsuite name="style-review" tests="$num_tests" errors="0" failures="$num_failures" skipped="0">
+$(< $JUNIT_REPORT_TESTS_FILE)
+</testsuite>
+</testsuites>
+__EOF__
+}
+
+
+###############################################################################
+# STYLE CHECKS
+###############################################################################
+
+TESTNAME="No tabs"
+tabs_occurrences="$(fgrep -nR $'\t' src data)"
+if [[ -z "$tabs_occurrences" ]]; then
+ append_passed_test_case "$TESTNAME"
+else
+ append_failed_test_case "$TESTNAME" \
+ $'Please remove the tabs found at the following places:\n\n'"$tabs_occurrences"
+fi
+
+
+TESTNAME="No trailing whitespace"
+trailing_ws_occurrences="$(grep -nri '[[:blank:]]$' src data)"
+if [[ -z "$trailing_ws_occurrences" ]]; then
+ append_passed_test_case "$TESTNAME"
+else
+ append_failed_test_case "$TESTNAME" \
+ $'Please remove the trailing whitespace at the following places:\n\n'"$trailing_ws_occurrences"
+fi
+
+
+# Generate the report
+# and fail this step if any failure occurred
+generate_junit_report style-check-junit-report.xml
+
+! grep -q '<failure' style-check-junit-report.xml
+exit $?