ci: implement unified sections

in after_script, variable $SCRIPTS_DIR is lost

Acked-by: Daniel Stone <daniels@collabora.com>
Signed-off-by: David Heidelberg <david.heidelberg@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/20272>
This commit is contained in:
David Heidelberg
2022-12-11 17:46:41 +01:00
committed by Marge Bot
parent 5bfc17b2da
commit 4cc0cec473
20 changed files with 192 additions and 59 deletions

View File

@@ -35,10 +35,13 @@ variables:
default: default:
before_script: before_script:
- echo -e "\e[0Ksection_start:$(date +%s):unset_env_vars_section[collapsed=true]\r\e[0KUnsetting vulnerable environment variables" - >
- echo -n "${CI_JOB_JWT}" > "${CI_JOB_JWT_FILE}" export SCRIPTS_DIR=$(mktemp -d) &&
- unset CI_JOB_JWT curl -L -s --retry 4 -f --retry-all-errors --retry-delay 60 -O --output-dir "${SCRIPTS_DIR}" "${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/setup-test-env.sh" &&
- echo -e "\e[0Ksection_end:$(date +%s):unset_env_vars_section\r\e[0K" chmod +x ${SCRIPTS_DIR}/setup-test-env.sh &&
. ${SCRIPTS_DIR}/setup-test-env.sh &&
echo -n "${CI_JOB_JWT}" > "${CI_JOB_JWT_FILE}" &&
unset CI_JOB_JWT # Unsetting vulnerable env variables
after_script: after_script:
- > - >

View File

@@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
. "$SCRIPTS_DIR"/setup-test-env.sh
BM=$CI_PROJECT_DIR/install/bare-metal BM=$CI_PROJECT_DIR/install/bare-metal
CI_COMMON=$CI_PROJECT_DIR/install/common CI_COMMON=$CI_PROJECT_DIR/install/common

View File

@@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
. "$SCRIPTS_DIR"/setup-test-env.sh
# Boot script for devices attached to a PoE switch, using NFS for the root # Boot script for devices attached to a PoE switch, using NFS for the root
# filesystem. # filesystem.

View File

@@ -18,6 +18,7 @@ date +'%F %T'
cp $CI_COMMON/capture-devcoredump.sh $rootfs_dst/ cp $CI_COMMON/capture-devcoredump.sh $rootfs_dst/
cp $CI_COMMON/intel-gpu-freq.sh $rootfs_dst/ cp $CI_COMMON/intel-gpu-freq.sh $rootfs_dst/
cp "$SCRIPTS_DIR/setup-test-env.sh" "$rootfs_dst/"
set +x set +x

View File

@@ -21,15 +21,14 @@
# Use ccache transparently, and print stats before/after # Use ccache transparently, and print stats before/after
before_script: before_script:
- !reference [default, before_script] - !reference [default, before_script]
- export PATH="/usr/lib/ccache:$PATH" - |
- export CCACHE_BASEDIR="$PWD" export PATH="/usr/lib/ccache:$PATH"
- echo -e "\e[0Ksection_start:$(date +%s):ccache_before[collapsed=true]\r\e[0Kccache stats before build" export CCACHE_BASEDIR="$PWD"
- ccache --show-stats section_start ccache_before "ccache stats before build"
- echo -e "\e[0Ksection_end:$(date +%s):ccache_before\r\e[0K" ccache --show-stats
section_end ccache_before
after_script: after_script:
- echo -e "\e[0Ksection_start:$(date +%s):ccache_after[collapsed=true]\r\e[0Kccache stats after build" - ccache --show-stats | grep "cache hit rate"
- ccache --show-stats
- echo -e "\e[0Ksection_end:$(date +%s):ccache_after\r\e[0K"
- !reference [default, after_script] - !reference [default, after_script]
.build-windows: .build-windows:
@@ -186,12 +185,17 @@ debian-build-testing:
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi -D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi
-D b_lto=true -D b_lto=true
LLVM_VERSION: 13 LLVM_VERSION: 13
script: script: |
- .gitlab-ci/lava/lava-pytest.sh section_start lava-pytest "lava-pytest"
- .gitlab-ci/run-shellcheck.sh .gitlab-ci/lava/lava-pytest.sh
- .gitlab-ci/run-yamllint.sh section_switch shellcheck "shellcheck"
- .gitlab-ci/meson/build.sh .gitlab-ci/run-shellcheck.sh
- .gitlab-ci/run-shader-db.sh section_switch yamllint "yamllint"
.gitlab-ci/run-yamllint.sh
section_switch meson "meson"
.gitlab-ci/meson/build.sh
section_switch shader-db "shader-db"
.gitlab-ci/run-shader-db.sh
# Test a release build with -Werror so new warnings don't sneak in. # Test a release build with -Werror so new warnings don't sneak in.
debian-release: debian-release:

View File

@@ -10,6 +10,7 @@ for var in \
CI_COMMIT_TITLE \ CI_COMMIT_TITLE \
CI_JOB_ID \ CI_JOB_ID \
CI_JOB_JWT_FILE \ CI_JOB_JWT_FILE \
CI_JOB_STARTED_AT \
CI_JOB_NAME \ CI_JOB_NAME \
CI_JOB_URL \ CI_JOB_URL \
CI_MERGE_REQUEST_SOURCE_BRANCH_NAME \ CI_MERGE_REQUEST_SOURCE_BRANCH_NAME \
@@ -27,6 +28,7 @@ for var in \
CI_SERVER_URL \ CI_SERVER_URL \
CROSVM_GALLIUM_DRIVER \ CROSVM_GALLIUM_DRIVER \
CROSVM_GPU_ARGS \ CROSVM_GPU_ARGS \
CURRENT_SECTION \
DEQP_BIN_DIR \ DEQP_BIN_DIR \
DEQP_CONFIG \ DEQP_CONFIG \
DEQP_EXPECTED_RENDERER \ DEQP_EXPECTED_RENDERER \

View File

@@ -1,4 +1,4 @@
#!/bin/sh #!/bin/bash
# Make sure to kill itself and all the children process from this script on # Make sure to kill itself and all the children process from this script on
# exiting, since any console output may interfere with LAVA signals handling, # exiting, since any console output may interfere with LAVA signals handling,
@@ -37,6 +37,7 @@ BACKGROUND_PIDS=
# running tests. # running tests.
. /set-job-env-vars.sh . /set-job-env-vars.sh
. "$SCRIPTS_DIR"/setup-test-env.sh
set -ex set -ex
@@ -160,7 +161,7 @@ fi
RESULT=fail RESULT=fail
set +e set +e
sh -c "$HWCI_TEST_SCRIPT" bash -c ". $SCRIPTS_DIR/setup-test-env.sh && $HWCI_TEST_SCRIPT"
EXIT_CODE=$? EXIT_CODE=$?
set -e set -e

View File

@@ -1,6 +1,6 @@
#!/bin/bash #!/bin/bash
echo -e "\e[0Ksection_start:$(date +%s):test_setup[collapsed=true]\r\e[0Kpreparing test setup" section_start test_setup "deqp: preparing test setup"
set -ex set -ex
@@ -160,11 +160,7 @@ if [ -z "$DEQP_SUITE" ]; then
fi fi
fi fi
set +x uncollapsed_section_switch deqp "deqp: deqp-runner"
echo -e "\e[0Ksection_end:$(date +%s):test_setup\r\e[0K"
echo -e "\e[0Ksection_start:$(date +%s):deqp[collapsed=false]\r\e[0Kdeqp-runner"
set -x
set +e set +e
if [ -z "$DEQP_SUITE" ]; then if [ -z "$DEQP_SUITE" ]; then
@@ -197,11 +193,10 @@ fi
DEQP_EXITCODE=$? DEQP_EXITCODE=$?
set +x set +x
echo -e "\e[0Ksection_end:$(date +%s):deqp\r\e[0K"
report_load report_load
echo -e "\e[0Ksection_start:$(date +%s):test_post_process[collapsed=true]\r\e[0Kpost-processing test results" section_switch test_post_process "deqp: post-processing test results"
set -x set -x
# Remove all but the first 50 individual XML files uploaded as artifacts, to # Remove all but the first 50 individual XML files uploaded as artifacts, to
@@ -243,6 +238,6 @@ fi
# 0.17s on a Ryzen 5950X (16 threads, 0.95s when limited to 1 thread). # 0.17s on a Ryzen 5950X (16 threads, 0.95s when limited to 1 thread).
zstd --rm -T0 -8qc $RESULTS/results.csv -o $RESULTS/results.csv.zst zstd --rm -T0 -8qc $RESULTS/results.csv -o $RESULTS/results.csv.zst
echo -e "\e[0Ksection_end:$(date +%s):test_post_process\r\e[0K" section_end test_post_process
exit $DEQP_EXITCODE exit $DEQP_EXITCODE

View File

@@ -21,7 +21,7 @@ variables:
ALPINE_X86_BUILD_TAG: "2023-02-26-add-bash-coreutils" ALPINE_X86_BUILD_TAG: "2023-02-26-add-bash-coreutils"
FEDORA_X86_BUILD_TAG: "2023-02-09-f36" FEDORA_X86_BUILD_TAG: "2023-02-09-f36"
KERNEL_ROOTFS_TAG: "2023-02-23-virglrenderer" KERNEL_ROOTFS_TAG: "2023-02-28-add-CI_JOB_STARTED_AT"
WINDOWS_X64_VS_PATH: "windows/x64_vs" WINDOWS_X64_VS_PATH: "windows/x64_vs"
WINDOWS_X64_VS_TAG: "2022-10-20-upgrade-zlib" WINDOWS_X64_VS_TAG: "2022-10-20-upgrade-zlib"

View File

@@ -1,4 +1,4 @@
#!/bin/sh #!/usr/bin/env bash
# #
# Copyright (C) 2022 Collabora Limited # Copyright (C) 2022 Collabora Limited
# Author: Guilherme Gallo <guilherme.gallo@collabora.com> # Author: Guilherme Gallo <guilherme.gallo@collabora.com>

View File

@@ -18,13 +18,14 @@ mkdir -p results/job-rootfs-overlay/
cp artifacts/ci-common/capture-devcoredump.sh results/job-rootfs-overlay/ cp artifacts/ci-common/capture-devcoredump.sh results/job-rootfs-overlay/
cp artifacts/ci-common/init-*.sh results/job-rootfs-overlay/ cp artifacts/ci-common/init-*.sh results/job-rootfs-overlay/
cp artifacts/ci-common/intel-gpu-freq.sh results/job-rootfs-overlay/ cp artifacts/ci-common/intel-gpu-freq.sh results/job-rootfs-overlay/
cp "$SCRIPTS_DIR"/setup-test-env.sh results/job-rootfs-overlay/
# Prepare env vars for upload. # Prepare env vars for upload.
KERNEL_IMAGE_BASE_URL="https://${BASE_SYSTEM_HOST_PATH}" \ KERNEL_IMAGE_BASE_URL="https://${BASE_SYSTEM_HOST_PATH}" \
artifacts/ci-common/generate-env.sh > results/job-rootfs-overlay/set-job-env-vars.sh artifacts/ci-common/generate-env.sh > results/job-rootfs-overlay/set-job-env-vars.sh
echo -e "\e[0Ksection_start:$(date +%s):variables[collapsed=true]\r\e[0KVariables passed through:" section_start variables "Variables passed through:"
cat results/job-rootfs-overlay/set-job-env-vars.sh cat results/job-rootfs-overlay/set-job-env-vars.sh
echo -e "\e[0Ksection_end:$(date +%s):variables\r\e[0K" section_end variables
tar zcf job-rootfs-overlay.tar.gz -C results/job-rootfs-overlay/ . tar zcf job-rootfs-overlay.tar.gz -C results/job-rootfs-overlay/ .
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" job-rootfs-overlay.tar.gz "https://${JOB_ROOTFS_OVERLAY_PATH}" ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" job-rootfs-overlay.tar.gz "https://${JOB_ROOTFS_OVERLAY_PATH}"

View File

@@ -1,4 +1,6 @@
#!/bin/bash #!/usr/bin/env bash
section_switch meson-configure "meson: configure"
set -e set -e
set -o xtrace set -o xtrace
@@ -81,11 +83,17 @@ meson setup _build \
${EXTRA_OPTION} ${EXTRA_OPTION}
cd _build cd _build
meson configure meson configure
section_switch meson-build "meson: build"
if command -V mold &> /dev/null ; then if command -V mold &> /dev/null ; then
mold --run ninja mold --run ninja
else else
ninja ninja
fi fi
section_switch meson-test "meson: test"
LC_ALL=C.UTF-8 meson test --num-processes ${FDO_CI_CONCURRENT:-4} --print-errorlogs ${MESON_TEST_ARGS} LC_ALL=C.UTF-8 meson test --num-processes ${FDO_CI_CONCURRENT:-4} --print-errorlogs ${MESON_TEST_ARGS}
if command -V mold &> /dev/null ; then if command -V mold &> /dev/null ; then
mold --run ninja install mold --run ninja install
@@ -93,3 +101,4 @@ else
ninja install ninja install
fi fi
cd .. cd ..
section_end meson-test

View File

@@ -1,4 +1,6 @@
#!/bin/bash #!/usr/bin/env bash
section_switch prepare-artifacts "artifacts: prepare"
set -e set -e
set -o xtrace set -o xtrace
@@ -56,3 +58,5 @@ if [ -n "$MINIO_ARTIFACT_NAME" ]; then
zstd artifacts/install.tar -o ${MINIO_ARTIFACT_NAME} zstd artifacts/install.tar -o ${MINIO_ARTIFACT_NAME}
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ${MINIO_ARTIFACT_NAME} https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME} ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ${MINIO_ARTIFACT_NAME} https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}
fi fi
section_end prepare-artifacts

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env bash #!/usr/bin/env bash
set -e set -e
echo -e "\e[0Ksection_start:$(date +%s):shader-db-prepare[collapsed=true]\r\e[0KPreparing shader-db"
ARTIFACTSDIR=$(pwd)/shader-db ARTIFACTSDIR=$(pwd)/shader-db
mkdir -p "$ARTIFACTSDIR" mkdir -p "$ARTIFACTSDIR"
export DRM_SHIM_DEBUG=true export DRM_SHIM_DEBUG=true
@@ -10,32 +9,31 @@ LIBDIR=$(pwd)/install/lib
export LD_LIBRARY_PATH=$LIBDIR export LD_LIBRARY_PATH=$LIBDIR
cd /usr/local/shader-db cd /usr/local/shader-db
echo -e "\e[0Ksection_end:$(date +%s):shader-db-prepare\r\e[0K"
for driver in freedreno intel v3d vc4; do for driver in freedreno intel v3d vc4; do
echo -e "\e[0Ksection_start:$(date +%s):shader-db-${driver}[collapsed=true]\r\e[0KRunning shader-db for $driver" section_start shader-db-${driver} "Running shader-db for $driver"
env LD_PRELOAD="$LIBDIR/lib${driver}_noop_drm_shim.so" \ env LD_PRELOAD="$LIBDIR/lib${driver}_noop_drm_shim.so" \
./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \ ./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \
> "$ARTIFACTSDIR/${driver}-shader-db.txt" > "$ARTIFACTSDIR/${driver}-shader-db.txt"
echo -e "\e[0Ksection_end:$(date +%s):shader-db-${driver}\r\e[0K" section_end shader-db-${driver}
done done
# Run shader-db over a number of supported chipsets for nouveau # Run shader-db over a number of supported chipsets for nouveau
for chipset in 40 a3 c0 e4 f0 134 162; do for chipset in 40 a3 c0 e4 f0 134 162; do
echo -e "\e[0Ksection_start:$(date +%s):shader-db-nouveau-${chipset}[collapsed=true]\r\e[0KRunning shader-db for nouveau - ${chipset}" section_start shader-db-nouveau-${chipset} "Running shader-db for nouveau - ${chipset}"
env LD_PRELOAD="$LIBDIR/libnouveau_noop_drm_shim.so" \ env LD_PRELOAD="$LIBDIR/libnouveau_noop_drm_shim.so" \
NOUVEAU_CHIPSET=${chipset} \ NOUVEAU_CHIPSET=${chipset} \
./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \ ./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \
> "$ARTIFACTSDIR/nouveau-${chipset}-shader-db.txt" > "$ARTIFACTSDIR/nouveau-${chipset}-shader-db.txt"
echo -e "\e[0Ksection_end:$(date +%s):shader-db-nouveau-${chipset}\r\e[0K" section_end shader-db-nouveau-${chipset}
done done
# Run shader-db for r300 (RV370 and RV515) # Run shader-db for r300 (RV370 and RV515)
for chipset in 0x5460 0x7140; do for chipset in 0x5460 0x7140; do
echo -e "\e[0Ksection_start:$(date +%s):shader-db-r300-${chipset}[collapsed=true]\r\e[0KRunning shader-db for r300 - ${chipset}" section_start shader-db-r300-${chipset} "Running shader-db for r300 - ${chipset}"
env LD_PRELOAD="$LIBDIR/libradeon_noop_drm_shim.so" \ env LD_PRELOAD="$LIBDIR/libradeon_noop_drm_shim.so" \
RADEON_GPU_ID=${chipset} \ RADEON_GPU_ID=${chipset} \
./run -j"${FDO_CI_CONCURRENT:-4}" -o r300 ./shaders \ ./run -j"${FDO_CI_CONCURRENT:-4}" -o r300 ./shaders \
> "$ARTIFACTSDIR/r300-${chipset}-shader-db.txt" > "$ARTIFACTSDIR/r300-${chipset}-shader-db.txt"
echo -e "\e[0Ksection_end:$(date +%s):shader-db-r300-${chipset}\r\e[0K" section_end shader-db-r300-${chipset}
done done

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env bash
function x_off {
if [[ "$-" == *"x"* ]]; then
state_x=1
set +x
else
state_x=0
fi
}
# TODO: implement x_on !
function error {
x_off 2>/dev/null
RED="\e[0;31m"
ENDCOLOR="\e[0m"
# we force the following to be not in a section
section_end $CURRENT_SECTION
DATE_S=$(date -u +"%s")
JOB_START_S=$(date -u +"%s" -d "${CI_JOB_STARTED_AT:?}")
CURR_TIME=$((DATE_S-JOB_START_S))
CURR_MINSEC="$(printf "%02d" $((CURR_TIME/60))):$(printf "%02d" $((CURR_TIME%60)))"
echo -e "\n${RED}[${CURR_MINSEC}] ERROR: $*${ENDCOLOR}\n"
[ "$state_x" -eq 0 ] || set -x
}
function trap_exit {
local ret=$1
shift
error $CURRENT_SECTION: $*
exit $ret
}
function build_section_start {
local section_params=$1
shift
local section_name=$1
CURRENT_SECTION=$section_name
shift
CYAN="\e[0;36m"
ENDCOLOR="\e[0m"
DATE_S=$(date -u +"%s")
JOB_START_S=$(date -u +"%s" -d "${CI_JOB_STARTED_AT:?}")
CURR_TIME=$((DATE_S-JOB_START_S))
CURR_MINSEC="$(printf "%02d" $((CURR_TIME/60))):$(printf "%02d" $((CURR_TIME%60)))"
echo -e "\n\e[0Ksection_start:$(date +%s):$section_name$section_params\r\e[0K${CYAN}[${CURR_MINSEC}] $*${ENDCOLOR}\n"
}
function section_start {
x_off 2>/dev/null
build_section_start "[collapsed=true]" $*
[ "$state_x" -eq 0 ] || set -x
}
function build_section_end {
echo -e "\e[0Ksection_end:$(date +%s):$1\r\e[0K"
CURRENT_SECTION=""
}
function section_end {
x_off >/dev/null
build_section_end $*
[ "$state_x" -eq 0 ] || set -x
}
function section_switch {
x_off 2>/dev/null
if [ -n "$CURRENT_SECTION" ]
then
build_section_end $CURRENT_SECTION
fi
build_section_start "[collapsed=true]" $*
[ "$state_x" -eq 0 ] || set -x
}
function uncollapsed_section_switch {
x_off 2>/dev/null
if [ -n "$CURRENT_SECTION" ]
then
build_section_end $CURRENT_SECTION
fi
build_section_start "" $*
[ "$state_x" -eq 0 ] || set -x
}
export -f x_off
export -f error
export -f trap_exit
export -f build_section_start
export -f section_start
export -f build_section_end
export -f section_end
export -f section_switch
export -f uncollapsed_section_switch
set -E
trap 'trap_exit $?' ERR

View File

@@ -8,9 +8,9 @@
# Note: Build dir (and thus install) may be dirty due to GIT_STRATEGY # Note: Build dir (and thus install) may be dirty due to GIT_STRATEGY
- rm -rf install - rm -rf install
- tar -xf artifacts/install.tar - tar -xf artifacts/install.tar
- echo -e "\e[0Ksection_start:$(date +%s):ldd_section[collapsed=true]\r\e[0KChecking ldd on driver build" - section_start ldd_section "Checking ldd on driver build"
- LD_LIBRARY_PATH=install/lib find install/lib -name "*.so" -print -exec ldd {} \; - LD_LIBRARY_PATH=install/lib find install/lib -name "*.so" -print -exec ldd {} \;
- echo -e "\e[0Ksection_end:$(date +%s):ldd_section\r\e[0K" - section_end ldd_section
artifacts: artifacts:
when: always when: always
name: "mesa_${CI_JOB_NAME}" name: "mesa_${CI_JOB_NAME}"
@@ -108,9 +108,9 @@ rustfmt:
variables: variables:
PIGLIT_REPLAY_EXTRA_ARGS: --keep-image --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_bucket=mesa-tracie-public --jwt-file=${CI_JOB_JWT_FILE} PIGLIT_REPLAY_EXTRA_ARGS: --keep-image --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_bucket=mesa-tracie-public --jwt-file=${CI_JOB_JWT_FILE}
script: script:
- echo -e "\e[0Ksection_start:$(date +%s):variables[collapsed=true]\r\e[0KVariables passed through:" - section_start variables "Variables passed through:"
- install/common/generate-env.sh - install/common/generate-env.sh
- echo -e "\e[0Ksection_end:$(date +%s):variables\r\e[0K" - section_end variables
- install/piglit/piglit-traces.sh - install/piglit/piglit-traces.sh
.deqp-test: .deqp-test:
@@ -148,11 +148,12 @@ rustfmt:
# instead of fd.o. Set FDO_HTTP_CACHE_URI to an http cache for your test lab to # instead of fd.o. Set FDO_HTTP_CACHE_URI to an http cache for your test lab to
# improve it even more (see https://docs.mesa3d.org/ci/bare-metal.html for # improve it even more (see https://docs.mesa3d.org/ci/bare-metal.html for
# setup). # setup).
- echo -e "\e[0Ksection_start:$(date +%s):artifacts_download[collapsed=true]\r\e[0KDownloading artifacts from minio" - section_start artifacts_download "Downloading artifacts from s3"
# Note: Build dir (and thus install) may be dirty due to GIT_STRATEGY # Note: Build dir (and thus install) may be dirty due to GIT_STRATEGY
- rm -rf install - rm -rf install
- curl -L --retry 4 -f --retry-all-errors --retry-delay 60 ${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}.tar.zst | tar --zstd -x - curl -L --retry 4 -f --retry-all-errors --retry-delay 60 ${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}.tar.zst | tar --zstd -x
- echo -e "\e[0Ksection_end:$(date +%s):artifacts_download\r\e[0K" - echo -e "\e[0Ksection_end:$(date +%s):artifacts_download\r\e[0K"
- section_end artifacts_download
artifacts: artifacts:
when: always when: always
name: "mesa_${CI_JOB_NAME}" name: "mesa_${CI_JOB_NAME}"
@@ -220,7 +221,7 @@ rustfmt:
# like FDO_DISTRIBUTION_TAG for *the* image, there is no way to # like FDO_DISTRIBUTION_TAG for *the* image, there is no way to
# depend on more than one image per job. So, the job container is # depend on more than one image per job. So, the job container is
# built as part of the CI in the boot2container project. # built as part of the CI in the boot2container project.
image: registry.freedesktop.org/mupuf/valve-infra/mesa-trigger:2022-12-08.1 image: registry.freedesktop.org/mupuf/valve-infra/mesa-trigger:2023-02-27.1
timeout: 1h 40m timeout: 1h 40m
variables: variables:
# No need by default to pull the whole repo # No need by default to pull the whole repo
@@ -283,7 +284,7 @@ rustfmt:
[ -d "$CI_COMMON_SCRIPTS" ] || exit 1 [ -d "$CI_COMMON_SCRIPTS" ] || exit 1
B2C_TEST_SCRIPT="bash -c 'source ./set-job-env-vars.sh; tar xf ${INSTALL_TARBALL_NAME}; ${B2C_TEST_SCRIPT}'" B2C_TEST_SCRIPT="bash -c 'source ./set-job-env-vars.sh; source ./setup-test-env.sh; tar xf ${INSTALL_TARBALL_NAME}; ${B2C_TEST_SCRIPT}'"
# The Valve CI gateway receives jobs in a YAML format. Create a # The Valve CI gateway receives jobs in a YAML format. Create a
# job description from the CI environment. # job description from the CI environment.
@@ -324,6 +325,7 @@ rustfmt:
echo "Variables passed through:" echo "Variables passed through:"
cat ${JOB_FOLDER}/set-job-env-vars.sh cat ${JOB_FOLDER}/set-job-env-vars.sh
echo "export CI_JOB_JWT=${CI_JOB_JWT}" >> ${JOB_FOLDER}/set-job-env-vars.sh echo "export CI_JOB_JWT=${CI_JOB_JWT}" >> ${JOB_FOLDER}/set-job-env-vars.sh
cp ${SCRIPTS_DIR}/setup-test-env.sh ${JOB_FOLDER}/setup-test-env.sh
set -x set -x
# Copy the mesa install tarball to the job folder, for later extraction # Copy the mesa install tarball to the job folder, for later extraction

View File

@@ -66,11 +66,15 @@ llvmpipe:
variables: variables:
DEQP_SUITE: llvmpipe DEQP_SUITE: llvmpipe
XDG_RUNTIME_DIR: /run/user XDG_RUNTIME_DIR: /run/user
XVFB_SCRIPT: "install/deqp-runner.sh"
extends: .llvmpipe-deqp-test extends: .llvmpipe-deqp-test
script: script: |
- mkdir -p $XDG_RUNTIME_DIR . "$SCRIPTS_DIR"/setup-test-env.sh
- LD_LIBRARY_PATH=`pwd`/install weston -Bheadless-backend.so -Swayland-0 & section_start weston "weston: prepare"
- LD_LIBRARY_PATH=`pwd`/install WAYLAND_DISPLAY=wayland-0 xvfb-run --server-args='-noreset' install/deqp-runner.sh mkdir -p $XDG_RUNTIME_DIR
LD_LIBRARY_PATH=`pwd`/install weston -Bheadless-backend.so -Swayland-0 &
section_end weston
LD_LIBRARY_PATH=`pwd`/install WAYLAND_DISPLAY=wayland-0 xvfb-run --server-args='-noreset' bash -c ". $SCRIPTS_DIR/setup-test-env.sh && ${XVFB_SCRIPT}"
llvmpipe-deqp-asan: llvmpipe-deqp-asan:
variables: variables:

View File

@@ -20,9 +20,11 @@ softpipe:
- mesa-swrast - mesa-swrast
variables: variables:
DEQP_SUITE: softpipe DEQP_SUITE: softpipe
XVFB_SCRIPT: "install/deqp-runner.sh"
extends: .softpipe-deqp-test extends: .softpipe-deqp-test
script: script: |
- LD_LIBRARY_PATH=`pwd`/install/lib xvfb-run --server-args='-noreset' install/deqp-runner.sh . "$SCRIPTS_DIR"/setup-test-env.sh
LD_LIBRARY_PATH=`pwd`/install/lib xvfb-run --server-args='-noreset' bash -c ". $SCRIPTS_DIR/setup-test-env.sh && ${XVFB_SCRIPT}"
softpipe-asan-gles31: softpipe-asan-gles31:
variables: variables:

View File

@@ -6,7 +6,7 @@
GALLIVM_PERF: nopt GALLIVM_PERF: nopt
FLAKES_CHANNEL: "#virgl-ci" FLAKES_CHANNEL: "#virgl-ci"
script: script:
- xvfb-run --server-args='-noreset' sh -c "GALLIUM_DRIVER=virpipe install/deqp-runner.sh" - xvfb-run --server-args='-noreset' bash -c ". $SCRIPTS_DIR/setup-test-env.sh && GALLIUM_DRIVER=virpipe install/deqp-runner.sh"
virpipe-on-gl: virpipe-on-gl:
extends: extends:
@@ -64,6 +64,7 @@ virgl-traces:
tags: tags:
- kvm - kvm
script: script:
- . "$SCRIPTS_DIR"/setup-test-env.sh
# Use all threads for rendering and only run one job at a time # Use all threads for rendering and only run one job at a time
# Couldn't get GitLab CI to correctly substitute the variable in the yaml # Couldn't get GitLab CI to correctly substitute the variable in the yaml
- LP_NUM_THREADS=${FDO_CI_CONCURRENT} FDO_CI_CONCURRENT=1 install/crosvm-runner.sh install/piglit/piglit-traces.sh - LP_NUM_THREADS=${FDO_CI_CONCURRENT} FDO_CI_CONCURRENT=1 install/crosvm-runner.sh install/piglit/piglit-traces.sh

View File

@@ -41,8 +41,10 @@ zink-lvp:
# to stdout and aborting on unknown failures. # to stdout and aborting on unknown failures.
ZINK_DEBUG: validation ZINK_DEBUG: validation
VK_LAYER_SETTINGS_PATH: ${CI_PROJECT_DIR}/install/zink-lvp-validation-settings.txt VK_LAYER_SETTINGS_PATH: ${CI_PROJECT_DIR}/install/zink-lvp-validation-settings.txt
script: XVFB_SCRIPT: "GALLIUM_DRIVER=zink VK_DRIVER=lvp install/deqp-runner.sh"
- xvfb-run --server-args='-noreset' sh -c "GALLIUM_DRIVER=zink VK_DRIVER=lvp install/deqp-runner.sh" script: |
. "$SCRIPTS_DIR"/setup-test-env.sh
xvfb-run --server-args='-noreset' bash -c ". $SCRIPTS_DIR/setup-test-env.sh ${XVFB_SCRIPT}"
.zink-anv-test: .zink-anv-test:
extends: extends: