4
# Cancel job if a newer commit is pushed to the same branch
7
GIT_STRATEGY: none # testing doesn't build anything from source
9
- !reference [default, before_script]
10
# Note: Build dir (and thus install) may be dirty due to GIT_STRATEGY
12
- tar -xf artifacts/install.tar
13
- echo -e "\e[0Ksection_start:$(date +%s):ldd_section[collapsed=true]\r\e[0KChecking ldd on driver build"
14
- LD_LIBRARY_PATH=install/lib find install/lib -name "*.so" -print -exec ldd {} \;
15
- echo -e "\e[0Ksection_end:$(date +%s):ldd_section\r\e[0K"
18
name: "mesa_${CI_JOB_NAME}"
25
- .use-debian/x86_test-gl
33
- .use-debian/x86_test-vk
41
- .use-debian/x86_test-gl
44
- debian-clover-testing
49
name: "mesa_${CI_JOB_NAME}"
51
- results/vkd3d-proton.log
53
- ./install/vkd3d-proton/run.sh
57
name: "mesa_${CI_JOB_NAME}"
61
junit: results/junit.xml
64
HWCI_TEST_SCRIPT: "/install/piglit/piglit-runner.sh"
66
- install/piglit/piglit-runner.sh
77
name: "mesa_${CI_JOB_NAME}"
79
junit: results/junit.xml
84
PIGLIT_REPLAY_EXTRA_ARGS: --keep-image --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_host=minio-packet.freedesktop.org --minio_bucket=mesa-tracie-public --role-session-name=${CI_PROJECT_PATH}:${CI_JOB_ID} --jwt-file=${CI_JOB_JWT_FILE}
86
- install/piglit/piglit-traces.sh
90
- ./install/deqp-runner.sh
93
- results/*.shader_cache
95
junit: results/junit.xml
105
- ./install/fossilize-runner.sh
108
name: "mesa_${CI_JOB_NAME}"
116
# Cancel job if a newer commit is pushed to the same branch
120
- !reference [default, before_script]
121
# Use this instead of gitlab's artifacts download because it hits packet.net
122
# instead of fd.o. Set FDO_HTTP_CACHE_URI to an http cache for your test lab to
123
# improve it even more (see https://docs.mesa3d.org/ci/bare-metal.html for
125
- wget ${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}.tar.gz -S --progress=dot:giga -O- | tar -xz
128
name: "mesa_${CI_JOB_NAME}"
133
- results/*.shader_cache
135
junit: results/junit.xml
137
.baremetal-test-armhf:
141
BM_ROOTFS: /rootfs-armhf
142
MINIO_ARTIFACT_NAME: mesa-armhf
144
.baremetal-test-arm64:
148
BM_ROOTFS: /rootfs-arm64
149
MINIO_ARTIFACT_NAME: mesa-arm64
151
.baremetal-arm64-asan-test:
153
DEQP_RUNNER_OPTIONS: "--env LD_PRELOAD=libasan.so.6:/install/lib/libdlclose-skip.so"
154
MINIO_ARTIFACT_NAME: mesa-arm64-asan
157
- job: debian-arm64-asan
160
.baremetal-deqp-test:
162
HWCI_TEST_SCRIPT: "/install/deqp-runner.sh"
163
FDO_CI_CONCURRENT: 0 # Default to number of CPUs
165
.baremetal-skqp-test:
168
HWCI_TEST_SCRIPT: "/install/skqp-runner.sh"
170
# For Valve's bare-metal testing farm jobs.
172
# It would be nice to use ci-templates within Mesa CI for this job's
173
# image:, but the integration is not possible for the current
174
# use-case. Within this job, two containers are managed. 1) the
175
# gitlab runner container from which the job is submitted to the
176
# DUT, and 2) the test container (e.g. debian/x86_test-vk) within
177
# which the test cases will run on the DUT. Since ci-templates and
178
# the associated image setting macros in this file rely on variables
179
# like FDO_DISTRIBUTION_TAG for *the* image, there is no way to
180
# depend on more than one image per job. So, the job container is
181
# built as part of the CI in the boot2container project.
182
image: registry.freedesktop.org/mupuf/valve-infra/mesa-trigger:2022-03-03.2
184
- .use-debian/x86_test-vk
186
# No need by default to pull the whole repo
188
# boot2container initrd configuration parameters.
189
B2C_KERNEL_URL: 'https://gitlab.freedesktop.org/mupuf/valve-infra/-/package_files/117/download' # 5.16-for-mesa-ci
190
B2C_INITRAMFS_URL: 'https://gitlab.freedesktop.org/mupuf/boot2container/-/releases/v0.9.4/downloads/initramfs.linux_amd64.cpio.xz'
191
B2C_JOB_SUCCESS_REGEX: '\[.*\]: Execution is over, pipeline status: 0\r$'
192
B2C_JOB_WARN_REGEX: 'null'
194
B2C_POWEROFF_DELAY: 15
195
B2C_SESSION_END_REGEX: '^.*It''s now safe to turn off your computer\r$'
196
B2C_SESSION_REBOOT_REGEX: 'GPU hang detected!'
197
B2C_TIMEOUT_BOOT_MINUTES: 240
198
B2C_TIMEOUT_BOOT_RETRIES: 2
199
B2C_TIMEOUT_FIRST_MINUTES: 5
200
B2C_TIMEOUT_FIRST_RETRIES: 3
201
B2C_TIMEOUT_MINUTES: 2
202
B2C_TIMEOUT_OVERALL_MINUTES: 240
203
B2C_TIMEOUT_RETRIES: 0
205
# As noted in the top description, we make a distinction between the
206
# container used by gitlab-runner to queue the work, and the container
207
# used by the DUTs/test machines. To make this distinction quite clear,
208
# we rename the MESA_IMAGE variable into IMAGE_UNDER_TEST.
209
IMAGE_UNDER_TEST: "$MESA_IMAGE"
211
INSTALL_TARBALL: "./artifacts/install.tar"
212
CI_VALVE_ARTIFACTS: "./artifacts/valve"
213
CI_COMMON_SCRIPTS: "./artifacts/ci-common"
214
GENERATE_ENV_SCRIPT: "${CI_COMMON_SCRIPTS}/generate-env.sh"
215
B2C_JOB_TEMPLATE: "${CI_VALVE_ARTIFACTS}/b2c.yml.jinja2.jinja2"
216
JOB_FOLDER: "job_folder"
218
# We don't want the tarball unpacking of .test, but will take the JWT bits.
219
- !reference [default, before_script]
223
# Useful as a hook point for runner admins. You may edit the
224
# config.toml for the Gitlab runner and use a bind-mount to
225
# populate the hook script with some executable commands. This
226
# allows quicker feedback than resubmitting pipelines and
227
# potentially having to wait for a debug build of Mesa to
229
if [ -x /runner-before-script.sh ]; then
230
echo "Executing runner before-script hook..."
231
sh /runner-before-script.sh
232
if [ $? -ne 0 ]; then
233
echo "Runner hook failed, goodbye"
238
[ -s "$INSTALL_TARBALL" ] || exit 1
239
[ -d "$CI_VALVE_ARTIFACTS" ] || exit 1
240
[ -d "$CI_COMMON_SCRIPTS" ] || exit 1
243
B2C_TEST_SCRIPT="bash -c 'source ./set-job-env-vars.sh ; ${B2C_TEST_SCRIPT}'"
245
# The Valve CI gateway receives jobs in a YAML format. Create a
246
# job description from the CI environment.
247
python3 "$CI_VALVE_ARTIFACTS"/generate_b2c.py \
248
--ci-job-id "${CI_JOB_ID}" \
249
--container-cmd "${B2C_TEST_SCRIPT}" \
250
--initramfs-url "${B2C_INITRAMFS_URL}" \
251
--job-success-regex "${B2C_JOB_SUCCESS_REGEX}" \
252
--job-warn-regex "${B2C_JOB_WARN_REGEX}" \
253
--kernel-url "${B2C_KERNEL_URL}" \
254
--log-level "${B2C_LOG_LEVEL}" \
255
--poweroff-delay "${B2C_POWEROFF_DELAY}" \
256
--session-end-regex "${B2C_SESSION_END_REGEX}" \
257
--session-reboot-regex "${B2C_SESSION_REBOOT_REGEX}" \
258
--tags "${CI_RUNNER_TAGS}" \
259
--template "${B2C_JOB_TEMPLATE}" \
260
--timeout-boot-minutes "${B2C_TIMEOUT_BOOT_MINUTES}" \
261
--timeout-boot-retries "${B2C_TIMEOUT_BOOT_RETRIES}" \
262
--timeout-first-minutes "${B2C_TIMEOUT_FIRST_MINUTES}" \
263
--timeout-first-retries "${B2C_TIMEOUT_FIRST_RETRIES}" \
264
--timeout-minutes "${B2C_TIMEOUT_MINUTES}" \
265
--timeout-overall-minutes "${B2C_TIMEOUT_OVERALL_MINUTES}" \
266
--timeout-retries "${B2C_TIMEOUT_RETRIES}" \
267
--job-volume-exclusions "${B2C_JOB_VOLUME_EXCLUSIONS}" \
268
--local-container "${IMAGE_UNDER_TEST}" \
269
${B2C_EXTRA_VOLUME_ARGS} \
270
--working-dir "$CI_PROJECT_DIR"
274
rm -rf ${JOB_FOLDER} || true
275
mkdir -v ${JOB_FOLDER}
276
# Create a script to regenerate the CI environment when this job
277
# begins running on the remote DUT.
279
"$CI_COMMON_SCRIPTS"/generate-env.sh > ${JOB_FOLDER}/set-job-env-vars.sh
280
chmod +x ${JOB_FOLDER}/set-job-env-vars.sh
281
echo "Variables passed through:"
282
cat ${JOB_FOLDER}/set-job-env-vars.sh
283
echo "export CI_JOB_JWT=${CI_JOB_JWT}" >> ${JOB_FOLDER}/set-job-env-vars.sh
286
# Extract the Mesa distribution into the location expected by
287
# the Mesa CI deqp-runner scripts.
288
tar x -C ${JOB_FOLDER} -f $INSTALL_TARBALL
292
echo "$1" | sed -r s/[~\^]+//g | sed -r s/[^a-zA-Z0-9]+/-/g | sed -r s/^-+\|-+$//g | tr A-Z a-z
295
# Submit the job to Valve's CI gateway service with the CI
296
# provisioned job_folder.
297
env PYTHONUNBUFFERED=1 executorctl \
298
run -w b2c.yml.jinja2 -j $(slugify "$CI_JOB_NAME") -s ${JOB_FOLDER}
301
# Anything our job places in results/ will be collected by the
302
# Gitlab coordinator for status presentation. results/junit.xml
303
# will be parsed by the UI for more detailed explanations of
310
name: "mesa_${CI_JOB_NAME}"
312
- ${JOB_FOLDER}/results
314
junit: ${JOB_FOLDER}/results/junit.xml