ci/lava: Forward environmental variables to DUT directly
Instead of uploading the environmental variables to S3, append it to the job definition instead. Signed-off-by: Valentine Burley <valentine.burley@collabora.com> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/35051>
This commit is contained in:
committed by
Marge Bot
parent
ffe8a2e023
commit
8b37cfae2e
@@ -29,7 +29,6 @@ variables:
|
||||
BASE_SYSTEM_MAINLINE_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${FDO_UPSTREAM_REPO}/${LAVA_DISTRIBUTION_TAG}"
|
||||
BASE_SYSTEM_FORK_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${CI_PROJECT_PATH}/${LAVA_DISTRIBUTION_TAG}"
|
||||
# per-job build artifacts
|
||||
JOB_ROOTFS_OVERLAY_PATH: "${JOB_ARTIFACTS_BASE}/job-rootfs-overlay.tar.gz"
|
||||
JOB_RESULTS_PATH: "${JOB_ARTIFACTS_BASE}/results.tar.zst"
|
||||
LAVA_S3_ARTIFACT_NAME: "mesa-${ARCH}-default-debugoptimized"
|
||||
S3_ARTIFACT_NAME: "mesa-python-ci-artifacts"
|
||||
|
||||
@@ -44,18 +44,15 @@ ROOTFS_URL="$(get_path_to_artifact lava-rootfs.tar.zst)"
|
||||
[ $? != 1 ] || exit 1
|
||||
|
||||
rm -rf results
|
||||
mkdir -p results/job-rootfs-overlay/
|
||||
mkdir results
|
||||
|
||||
filter_env_vars > results/job-rootfs-overlay/set-job-env-vars.sh
|
||||
filter_env_vars > dut-env-vars.sh
|
||||
# Set SCRIPTS_DIR to point to the Mesa install we download for the DUT
|
||||
echo "export SCRIPTS_DIR='$CI_PROJECT_DIR/install'" >> results/job-rootfs-overlay/set-job-env-vars.sh
|
||||
|
||||
tar zcf job-rootfs-overlay.tar.gz -C results/job-rootfs-overlay/ .
|
||||
ci-fairy s3cp --token-file "${S3_JWT_FILE}" job-rootfs-overlay.tar.gz "https://${JOB_ROOTFS_OVERLAY_PATH}"
|
||||
echo "export SCRIPTS_DIR='$CI_PROJECT_DIR/install'" >> dut-env-vars.sh
|
||||
|
||||
# Prepare env vars for upload.
|
||||
section_switch variables "Environment variables passed through to device:"
|
||||
cat results/job-rootfs-overlay/set-job-env-vars.sh
|
||||
cat dut-env-vars.sh
|
||||
|
||||
section_switch lava_submit "Submitting job for scheduling"
|
||||
|
||||
@@ -88,8 +85,9 @@ PYTHONPATH=artifacts/ artifacts/lava/lava_job_submitter.py \
|
||||
--pipeline-info "$CI_JOB_NAME: $CI_PIPELINE_URL on $CI_COMMIT_REF_NAME ${CI_NODE_INDEX}/${CI_NODE_TOTAL}" \
|
||||
--rootfs-url "${ROOTFS_URL}" \
|
||||
--kernel-url-prefix "${KERNEL_IMAGE_BASE}/${DEBIAN_ARCH}" \
|
||||
--first-stage-init artifacts/ci-common/init-stage1.sh \
|
||||
--dtb-filename "${DTB}" \
|
||||
--first-stage-init artifacts/ci-common/init-stage1.sh \
|
||||
--env-file dut-env-vars.sh \
|
||||
--jwt-file "${S3_JWT_FILE}" \
|
||||
--kernel-image-name "${KERNEL_IMAGE_NAME}" \
|
||||
--kernel-image-type "${KERNEL_IMAGE_TYPE}" \
|
||||
@@ -108,12 +106,6 @@ PYTHONPATH=artifacts/ artifacts/lava/lava_job_submitter.py \
|
||||
--compression=zstd \
|
||||
--path="${CI_PROJECT_DIR}" \
|
||||
--format=tar \
|
||||
- append-overlay \
|
||||
--name=job-overlay \
|
||||
--url="https://${JOB_ROOTFS_OVERLAY_PATH}" \
|
||||
--compression=gz \
|
||||
--path="/" \
|
||||
--format=tar \
|
||||
- append-overlay \
|
||||
--name=kernel-modules \
|
||||
--url="${KERNEL_IMAGE_BASE}/${DEBIAN_ARCH}/modules.tar.zst" \
|
||||
|
||||
@@ -403,6 +403,7 @@ class LAVAJobSubmitter(PathResolver):
|
||||
dtb_filename: str = None
|
||||
dump_yaml: bool = False # Whether to dump the YAML payload to stdout
|
||||
first_stage_init: str = None
|
||||
env_file: pathlib.Path = None
|
||||
jwt_file: pathlib.Path = None
|
||||
kernel_image_name: str = None
|
||||
kernel_image_type: str = ""
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from io import StringIO
|
||||
from typing import TYPE_CHECKING, Any
|
||||
import base64
|
||||
import shlex
|
||||
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
@@ -256,6 +258,15 @@ class LAVAJobDefinition:
|
||||
+ '-o "/lib/firmware/qcom/sm8350/a660_zap.mbn"'
|
||||
)
|
||||
|
||||
# Forward environmental variables to the DUT
|
||||
# base64-encoded to avoid YAML quoting issues
|
||||
with open(self.job_submitter.env_file, "rb") as f:
|
||||
encoded = base64.b64encode(f.read()).decode()
|
||||
safe_encoded = shlex.quote(encoded)
|
||||
run_steps += [
|
||||
f'echo "eval \\\"$(echo {safe_encoded} | base64 -d)\\\"" >> /set-job-env-vars.sh',
|
||||
]
|
||||
|
||||
run_steps.append("export CURRENT_SECTION=dut_boot")
|
||||
|
||||
return run_steps
|
||||
|
||||
@@ -89,6 +89,7 @@ actions:
|
||||
steps:
|
||||
- |-
|
||||
echo test FASTBOOT
|
||||
echo "eval \"$(echo ZWNobyB0ZXN0IEZBU1RCT09U | base64 -d)\"" >> /set-job-env-vars.sh
|
||||
export CURRENT_SECTION=dut_boot
|
||||
- export -p > /dut-env-vars.sh
|
||||
- test:
|
||||
|
||||
@@ -85,6 +85,7 @@ actions:
|
||||
run:
|
||||
steps:
|
||||
- echo test FASTBOOT
|
||||
- echo "eval \"$(echo ZWNobyB0ZXN0IEZBU1RCT09U | base64 -d)\"" >> /set-job-env-vars.sh
|
||||
- export CURRENT_SECTION=dut_boot
|
||||
- set -e
|
||||
- echo Could not find jwt file, disabling S3 requests...
|
||||
|
||||
@@ -60,6 +60,7 @@ actions:
|
||||
steps:
|
||||
- |-
|
||||
echo test UBOOT
|
||||
echo "eval \"$(echo ZWNobyB0ZXN0IFVCT09U | base64 -d)\"" >> /set-job-env-vars.sh
|
||||
export CURRENT_SECTION=dut_boot
|
||||
- export -p > /dut-env-vars.sh
|
||||
- test:
|
||||
|
||||
@@ -58,6 +58,7 @@ actions:
|
||||
run:
|
||||
steps:
|
||||
- echo test UBOOT
|
||||
- echo "eval \"$(echo ZWNobyB0ZXN0IFVCT09U | base64 -d)\"" >> /set-job-env-vars.sh
|
||||
- export CURRENT_SECTION=dut_boot
|
||||
- set -e
|
||||
- echo Could not find jwt file, disabling S3 requests...
|
||||
|
||||
@@ -58,6 +58,7 @@ def job_submitter_factory(mode: Literal["UBOOT", "FASTBOOT"], shell_file):
|
||||
farm="test_farm",
|
||||
dtb_filename="my_dtb_filename",
|
||||
first_stage_init=shell_file,
|
||||
env_file=shell_file,
|
||||
job_timeout_min=job_timeout_min,
|
||||
mesa_job_name=mesa_job_name,
|
||||
pipeline_info=pipeline_info,
|
||||
|
||||
Reference in New Issue
Block a user