ci: Use meson junit output

Just use the junit xml that is produced by meson, it works now.

Drop the junit conversion script, since we are no longer using it.
This commit is contained in:
Matthias Clasen
2024-11-05 13:17:42 -05:00
parent af87994115
commit 698496b375
4 changed files with 5 additions and 135 deletions

View File

@@ -62,18 +62,16 @@ style-check-diff:
.build-fedora-default:
image: $FEDORA_IMAGE
artifacts:
name: "gtk-${CI_COMMIT_REF_NAME}"
when: always
reports:
junit:
- "${CI_PROJECT_DIR}/_build/report-x11.xml"
- "${CI_PROJECT_DIR}/_build/report-wayland.xml"
- "${CI_PROJECT_DIR}/_build/report-wayland_gl.xml"
- "${CI_PROJECT_DIR}/_build/report-broadway.xml"
name: "gtk-${CI_COMMIT_REF_NAME}"
- "${CI_PROJECT_DIR}/_build/meson-logs/testlog-x11.junit.xml"
- "${CI_PROJECT_DIR}/_build/meson-logs/testlog-wayland.junit.xml"
- "${CI_PROJECT_DIR}/_build/meson-logs/testlog-wayland_gl.junit.xml"
- "${CI_PROJECT_DIR}/_build/meson-logs/testlog-broadway.junit.xml"
paths:
- "${CI_PROJECT_DIR}/_build/meson-logs"
- "${CI_PROJECT_DIR}/_build/report*.xml"
- "${CI_PROJECT_DIR}/_build/report*.html"
- "${CI_PROJECT_DIR}/_build/testsuite/reftests/output/*/*.png"
- "${CI_PROJECT_DIR}/_build/testsuite/reftests/output/*/*.node"
- "${CI_PROJECT_DIR}/_build/testsuite/tools/output/*/*"

View File

@@ -1,114 +0,0 @@
#!/usr/bin/env python3
# Turns a Meson testlog.json file into a JUnit XML report
#
# Copyright 2019 GNOME Foundation
#
# SPDX-License-Identifier: LGPL-2.1-or-later
#
# Original author: Emmanuele Bassi
import argparse
import datetime
import json
import os
import sys
import xml.etree.ElementTree as ET
aparser = argparse.ArgumentParser(description='Turns a Meson test log into a JUnit report')
aparser.add_argument('--project-name', metavar='NAME',
help='The project name',
default='unknown')
aparser.add_argument('--backend', metavar='NAME',
help='The used backend',
default='unknown')
aparser.add_argument('--job-id', metavar='ID',
help='The job ID for the report',
default='Unknown')
aparser.add_argument('--branch', metavar='NAME',
help='Branch of the project being tested',
default='main')
aparser.add_argument('--output', metavar='FILE',
help='The output file, stdout by default',
type=argparse.FileType('w', encoding='UTF-8'),
default=sys.stdout)
aparser.add_argument('infile', metavar='FILE',
help='The input testlog.json, stdin by default',
type=argparse.FileType('r', encoding='UTF-8'),
default=sys.stdin)
args = aparser.parse_args()
outfile = args.output
testsuites = ET.Element('testsuites')
testsuites.set('id', '{}/{}'.format(args.job_id, args.branch))
testsuites.set('package', args.project_name)
testsuites.set('timestamp', datetime.datetime.now(datetime.UTC).isoformat(timespec='minutes'))
suites = {}
for line in args.infile:
data = json.loads(line)
(full_suite, unit_name) = data['name'].split(' / ')
(project_name, suite_name) = full_suite.split(':')
duration = data['duration']
return_code = data['returncode']
result = data['result']
log = data['stdout']
unit = {
'suite': suite_name,
'name': unit_name,
'duration': duration,
'returncode': return_code,
'result': result,
'stdout': log,
}
units = suites.setdefault(suite_name, [])
units.append(unit)
for name, units in suites.items():
print('Processing suite {} (units: {})'.format(name, len(units)))
def if_failed(unit):
if unit['result'] in ['ERROR', 'FAIL', 'UNEXPECTEDPASS', 'TIMEOUT']:
return True
return False
def if_succeded(unit):
if unit['result'] in ['OK', 'EXPECTEDFAIL', 'SKIP']:
return True
return False
successes = list(filter(if_succeded, units))
failures = list(filter(if_failed, units))
print(' - {}: {} pass, {} fail'.format(name, len(successes), len(failures)))
testsuite = ET.SubElement(testsuites, 'testsuite')
testsuite.set('name', '{}/{}'.format(args.project_name, name))
testsuite.set('tests', str(len(units)))
testsuite.set('errors', str(len(failures)))
testsuite.set('failures', str(len(failures)))
for unit in successes:
testcase = ET.SubElement(testsuite, 'testcase')
testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
testcase.set('name', '{}/{}'.format(args.backend, unit['name']))
testcase.set('time', str(unit['duration']))
for unit in failures:
testcase = ET.SubElement(testsuite, 'testcase')
testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
testcase.set('name', '{}/{}'.format(args.backend, unit['name']))
testcase.set('time', str(unit['duration']))
failure = ET.SubElement(testcase, 'failure')
failure.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
testcase.set('name', '{}/{}'.format(args.backend, unit['name']))
failure.set('type', 'error')
failure.text = unit['stdout']
output = ET.tostring(testsuites, encoding='unicode')
outfile.write(output)

View File

@@ -95,13 +95,6 @@ esac
cd ${builddir}
$srcdir/.gitlab-ci/meson-junit-report.py \
--project-name=gtk \
--backend="${setup}" \
--job-id="${CI_JOB_NAME}" \
--output="report-${setup}.xml" \
"meson-logs/testlog-${setup}.json"
$srcdir/.gitlab-ci/meson-html-report.py \
--project-name=gtk \
--backend="${setup}" \

View File

@@ -39,13 +39,6 @@ xvfb-run -a -s "-screen 0 1024x768x24" \
# Save the exit code
exit_code=$?
# We always want to run the report generators
$srcdir/.gitlab-ci/meson-junit-report.py \
--project-name=gtk \
--job-id="${CI_JOB_NAME}" \
--output=report.xml \
meson-logs/testlog.json
$srcdir/.gitlab-ci/meson-html-report.py \
--project-name=GTK \
--job-id="${CI_JOB_NAME}" \