Module: Mesa
Branch: main
Commit: 77c3091fdd3fcc776e5a9b634c79b46f0380ec7c
URL:    
http://cgit.freedesktop.org/mesa/mesa/commit/?id=77c3091fdd3fcc776e5a9b634c79b46f0380ec7c

Author: Guilherme Gallo <guilherme.ga...@collabora.com>
Date:   Wed Oct 25 12:07:27 2023 -0300

ci/lava: Refactor UART definition building blocks

Break it to smaller pieces with variable size (fastboot has 3 deploy
actions and uboot only one) to build the base definition nicely in the
end.

Extract kernel/dtb attachment and init_stage1 extraction into functions
to be later reused by SSH job definition.

Signed-off-by: Guilherme Gallo <guilherme.ga...@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/25912>

---

 .gitlab-ci/lava/utils/uart_job_definition.py | 194 ++++++++++++++++-----------
 1 file changed, 116 insertions(+), 78 deletions(-)

diff --git a/.gitlab-ci/lava/utils/uart_job_definition.py 
b/.gitlab-ci/lava/utils/uart_job_definition.py
index 53cbd7a0485..8a6386d3ffb 100644
--- a/.gitlab-ci/lava/utils/uart_job_definition.py
+++ b/.gitlab-ci/lava/utils/uart_job_definition.py
@@ -1,22 +1,27 @@
-from typing import Any
-from .lava_job_definition import (
-    generate_metadata,
-    NUMBER_OF_ATTEMPTS_LAVA_BOOT,
-    artifact_download_steps,
-)
+from typing import TYPE_CHECKING, Any
 
+if TYPE_CHECKING:
+    from lava.lava_job_submitter import LAVAJobSubmitter
 
-def generate_lava_yaml_payload(args) -> dict[str, Any]:
-    values = generate_metadata(args)
+from .lava_job_definition import (NUMBER_OF_ATTEMPTS_LAVA_BOOT,
+                                  artifact_download_steps, generate_metadata)
 
-    # URLs to our kernel rootfs to boot from, both generated by the base
-    # container build
+# Use the same image that is being used for the hardware enablement and 
health-checks.
+# They are pretty small (<100MB) and have all the tools we need to run LAVA, 
so it is a safe choice.
+# You can find the Dockerfile here:
+# https://gitlab.collabora.com/lava/health-check-docker/-/blob/main/Dockerfile
+# And the registry here: 
https://gitlab.collabora.com/lava/health-check-docker/container_registry/
+DOCKER_IMAGE = "registry.gitlab.collabora.com/lava/health-check-docker"
 
-    nfsrootfs = {
-        "url": f"{args.rootfs_url_prefix}/lava-rootfs.tar.zst",
-        "compression": "zstd",
-    }
 
+def attach_kernel_and_dtb(args, deploy_field):
+    if args.kernel_image_type:
+        deploy_field["kernel"]["type"] = args.kernel_image_type
+    if args.dtb_filename:
+        deploy_field["dtb"] = {"url": 
f"{args.kernel_url_prefix}/{args.dtb_filename}.dtb"}
+
+
+def fastboot_deploy_actions(args: "LAVAJobSubmitter", nfsrootfs) -> 
list[dict[str, Any]]:
     fastboot_deploy_nfs = {
         "timeout": {"minutes": 10},
         "to": "nfs",
@@ -34,7 +39,7 @@ def generate_lava_yaml_payload(args) -> dict[str, Any]:
         },
         "postprocess": {
             "docker": {
-                "image": 
"registry.gitlab.collabora.com/lava/health-check-docker",
+                "image": DOCKER_IMAGE,
                 "steps": [
                     f"cat Image.gz {args.dtb_filename}.dtb > Image.gz+dtb",
                     "mkbootimg --kernel Image.gz+dtb"
@@ -44,56 +49,63 @@ def generate_lava_yaml_payload(args) -> dict[str, Any]:
             }
         },
     }
-    if args.kernel_image_type:
-        fastboot_deploy_prepare["images"]["kernel"]["type"] = 
args.kernel_image_type
-    if args.dtb_filename:
-        fastboot_deploy_prepare["images"]["dtb"] = {
-            "url": f"{args.kernel_url_prefix}/{args.dtb_filename}.dtb"
-        }
-
-    tftp_deploy = {
-        "timeout": {"minutes": 5},
-        "to": "tftp",
-        "os": "oe",
-        "kernel": {
-            "url": f"{args.kernel_url_prefix}/{args.kernel_image_name}",
-        },
-        "nfsrootfs": nfsrootfs,
-    }
-    if args.kernel_image_type:
-        tftp_deploy["kernel"]["type"] = args.kernel_image_type
-    if args.dtb_filename:
-        tftp_deploy["dtb"] = {
-            "url": f"{args.kernel_url_prefix}/{args.dtb_filename}.dtb"
-        }
 
     fastboot_deploy = {
         "timeout": {"minutes": 2},
         "to": "fastboot",
         "docker": {
-            "image": "registry.gitlab.collabora.com/lava/health-check-docker",
+            "image": DOCKER_IMAGE,
         },
         "images": {
             "boot": {"url": "downloads://boot.img"},
         },
     }
 
-    fastboot_boot = {
-        "timeout": {"minutes": 2},
-        "docker": {"image": 
"registry.gitlab.collabora.com/lava/health-check-docker"},
-        "failure_retry": NUMBER_OF_ATTEMPTS_LAVA_BOOT,
-        "method": args.boot_method,
-        "prompts": ["lava-shell:"],
-        "commands": ["set_active a"],
-    }
+    # URLs to our kernel rootfs to boot from, both generated by the base
+    # container build
+    attach_kernel_and_dtb(args, fastboot_deploy_prepare)
 
-    tftp_boot = {
-        "failure_retry": NUMBER_OF_ATTEMPTS_LAVA_BOOT,
-        "method": args.boot_method,
-        "prompts": ["lava-shell:"],
-        "commands": "nfs",
+    return [{"deploy": d} for d in (fastboot_deploy_nfs, 
fastboot_deploy_prepare, fastboot_deploy)]
+
+
+def tftp_deploy_actions(args: "LAVAJobSubmitter", nfsrootfs) -> list[dict[str, 
Any]]:
+    tftp_deploy = {
+        "timeout": {"minutes": 5},
+        "to": "tftp",
+        "os": "oe",
+        "kernel": {
+            "url": f"{args.kernel_url_prefix}/{args.kernel_image_name}",
+        },
+        "nfsrootfs": nfsrootfs,
     }
+    attach_kernel_and_dtb(args, tftp_deploy)
+
+    return [{"deploy": d} for d in [tftp_deploy]]
+
+
+def init_stage1_steps(args: "LAVAJobSubmitter") -> list[str]:
+    run_steps = []
+    # job execution script:
+    #   - inline .gitlab-ci/common/init-stage1.sh
+    #   - fetch and unpack per-pipeline build artifacts from build job
+    #   - fetch and unpack per-job environment from lava-submit.sh
+    #   - exec .gitlab-ci/common/init-stage2.sh
 
+    with open(args.first_stage_init, "r") as init_sh:
+        run_steps += [x.rstrip() for x in init_sh if not x.startswith("#") and 
x.rstrip()]
+    # We cannot distribute the Adreno 660 shader firmware inside rootfs,
+    # since the license isn't bundled inside the repository
+    if args.device_type == "sm8350-hdk":
+        run_steps.append(
+            "curl -L --retry 4 -f --retry-all-errors --retry-delay 60 "
+            + 
"https://github.com/allahjasif1990/hdk888-firmware/raw/main/a660_zap.mbn "
+            + '-o "/lib/firmware/qcom/sm8350/a660_zap.mbn"'
+        )
+
+    return run_steps
+
+
+def test_actions(args: "LAVAJobSubmitter") -> list[dict[str, Any]]:
     # skeleton test definition: only declaring each job as a single 'test'
     # since LAVA's test parsing is not useful to us
     run_steps = []
@@ -120,25 +132,7 @@ def generate_lava_yaml_payload(args) -> dict[str, Any]:
         ],
     }
 
-    # job execution script:
-    #   - inline .gitlab-ci/common/init-stage1.sh
-    #   - fetch and unpack per-pipeline build artifacts from build job
-    #   - fetch and unpack per-job environment from lava-submit.sh
-    #   - exec .gitlab-ci/common/init-stage2.sh
-
-    with open(args.first_stage_init, "r") as init_sh:
-        run_steps += [
-            x.rstrip() for x in init_sh if not x.startswith("#") and x.rstrip()
-        ]
-    # We cannot distribute the Adreno 660 shader firmware inside rootfs,
-    # since the license isn't bundled inside the repository
-    if args.device_type == "sm8350-hdk":
-        run_steps.append(
-            "curl -L --retry 4 -f --retry-all-errors --retry-delay 60 "
-            + 
"https://github.com/allahjasif1990/hdk888-firmware/raw/main/a660_zap.mbn "
-            + '-o "/lib/firmware/qcom/sm8350/a660_zap.mbn"'
-        )
-
+    run_steps += init_stage1_steps(args)
     run_steps += artifact_download_steps(args)
 
     run_steps += [
@@ -153,19 +147,63 @@ def generate_lava_yaml_payload(args) -> dict[str, Any]:
         f"lava-test-case '{args.project_name}_{args.mesa_job_name}' --shell 
/init-stage2.sh",
     ]
 
+    return [{"test": t} for t in [test]]
+
+
+def tftp_boot_action(args: "LAVAJobSubmitter") -> dict[str, Any]:
+    tftp_boot = {
+        "failure_retry": NUMBER_OF_ATTEMPTS_LAVA_BOOT,
+        "method": args.boot_method,
+        "prompts": ["lava-shell:"],
+        "commands": "nfs",
+    }
+
+    return tftp_boot
+
+
+def fastboot_boot_action(args: "LAVAJobSubmitter") -> dict[str, Any]:
+    fastboot_boot = {
+        "timeout": {"minutes": 2},
+        "docker": {"image": DOCKER_IMAGE},
+        "failure_retry": NUMBER_OF_ATTEMPTS_LAVA_BOOT,
+        "method": args.boot_method,
+        "prompts": ["lava-shell:"],
+        "commands": ["set_active a"],
+    }
+
+    return fastboot_boot
+
+
+def generate_lava_yaml_payload(args: "LAVAJobSubmitter") -> dict[str, Any]:
+    """
+    Generates a YAML payload for submitting a LAVA job, based on the provided 
arguments.
+
+    Args:
+      args ("LAVAJobSubmitter"): The `args` parameter is an instance of the 
`LAVAJobSubmitter`
+        class. It contains various properties and methods that are used to 
configure and submit a
+        LAVA job.
+
+    Returns:
+        a dictionary containing the values generated by the 
`generate_metadata` function and the
+        actions for the LAVA job submission.
+    """
+    values = generate_metadata(args)
+    nfsrootfs = {
+        "url": f"{args.rootfs_url_prefix}/lava-rootfs.tar.zst",
+        "compression": "zstd",
+    }
+
     if args.boot_method == "fastboot":
         values["actions"] = [
-            {"deploy": fastboot_deploy_nfs},
-            {"deploy": fastboot_deploy_prepare},
-            {"deploy": fastboot_deploy},
-            {"boot": fastboot_boot},
-            {"test": test},
+            *fastboot_deploy_actions(args, nfsrootfs),
+            {"boot": fastboot_boot_action(args)},
         ]
     else:  # tftp
         values["actions"] = [
-            {"deploy": tftp_deploy},
-            {"boot": tftp_boot},
-            {"test": test},
+            *tftp_deploy_actions(args, nfsrootfs),
+            {"boot": tftp_boot_action(args)},
         ]
 
+    values["actions"].extend(test_actions(args))
+
     return values

Reply via email to