This is an automated email from the ASF dual-hosted git repository.
shahar pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 3b5cc50fed Fix cloud_build system tests (#42306)
3b5cc50fed is described below
commit 3b5cc50fedbaaf603e3bbdd15283452ca5eff386
Author: VladaZakharova <[email protected]>
AuthorDate: Thu Sep 19 21:01:37 2024 +0200
Fix cloud_build system tests (#42306)
Co-authored-by: Ulada Zakharava <[email protected]>
---
.../cloud/cloud_build/example_cloud_build.py | 10 ++--
.../cloud_build/example_cloud_build_trigger.py | 56 ++++++++++++++++------
2 files changed, 47 insertions(+), 19 deletions(-)
diff --git
a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py
b/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py
index 0b6b25dc65..d31c799d95 100644
--- a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py
+++ b/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py
@@ -44,7 +44,7 @@ from tests.system.providers.google import
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-DAG_ID = "example_gcp_cloud_build"
+DAG_ID = "gcp_cloud_build"
GCP_SOURCE_ARCHIVE_URL =
"gs://airflow-system-tests-resources/cloud-build/file.tar.gz"
# Repository with this name is expected created within the project
$SYSTEM_TESTS_GCP_PROJECT
@@ -52,21 +52,21 @@ GCP_SOURCE_ARCHIVE_URL =
"gs://airflow-system-tests-resources/cloud-build/file.t
# 1. Create Cloud Source Repository
# 2. Push into a master branch the following file:
#
tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml
-GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repo"
+GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repository"
CURRENT_FOLDER = Path(__file__).parent
# [START howto_operator_gcp_create_build_from_storage_body]
CREATE_BUILD_FROM_STORAGE_BODY = {
"source": {"storage_source": GCP_SOURCE_ARCHIVE_URL},
- "steps": [{"name": "ubuntu", "args": ["echo", "Hello world"]}],
+ "steps": [{"name": "ubuntu", "args": ["echo", "Hello world", "sleep
200"]}],
}
# [END howto_operator_gcp_create_build_from_storage_body]
# [START howto_operator_create_build_from_repo_body]
CREATE_BUILD_FROM_REPO_BODY: dict[str, Any] = {
"source": {"repo_source": {"repo_name": GCP_SOURCE_REPOSITORY_NAME,
"branch_name": "master"}},
- "steps": [{"name": "ubuntu", "args": ["echo", "Hello world"]}],
+ "steps": [{"name": "ubuntu", "args": ["echo", "Hello world", "sleep
200"]}],
}
# [END howto_operator_create_build_from_repo_body]
@@ -76,7 +76,7 @@ with DAG(
schedule="@once",
start_date=datetime(2021, 1, 1),
catchup=False,
- tags=["example"],
+ tags=["example", "cloud_build"],
) as dag:
@task_group(group_id="build_from_storage")
diff --git
a/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py
b/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py
index fa8bf9b7f3..47d4e9f5d1 100644
---
a/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py
+++
b/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py
@@ -25,7 +25,11 @@ import os
from datetime import datetime
from typing import Any, cast
-from airflow.models.baseoperator import chain
+from googleapiclient.discovery import build
+from googleapiclient.errors import HttpError
+
+from airflow.decorators import task
+from airflow.exceptions import AirflowException
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.operators.cloud_build import (
@@ -42,16 +46,17 @@ from tests.system.providers.google import
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-DAG_ID = "example_gcp_cloud_build_trigger"
+DAG_ID = "gcp_cloud_build_trigger"
# Repository with this name is expected created within the project
$SYSTEM_TESTS_GCP_PROJECT
# If you'd like to run this system test locally, please
# 1. Create Cloud Source Repository
# 2. Push into a master branch the following file:
#
tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml
-GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repo"
+GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repository"
-TRIGGER_NAME = f"cloud-build-trigger-{ENV_ID}"
+TRIGGER_NAME = f"cloud-build-trigger-{ENV_ID}".replace("_", "-")
+PROJECT_NUMBER = "{{ task_instance.xcom_pull('get_project_number') }}"
# [START howto_operator_gcp_create_build_trigger_body]
create_build_trigger_body = {
@@ -59,9 +64,10 @@ create_build_trigger_body = {
"trigger_template": {
"project_id": PROJECT_ID,
"repo_name": GCP_SOURCE_REPOSITORY_NAME,
- "branch_name": "main",
+ "branch_name": "master",
},
"filename": "example_cloud_build.yaml",
+ "service_account":
f"projects/{PROJECT_ID}/serviceAccounts/{PROJECT_NUMBER}[email protected]",
}
# [END howto_operator_gcp_create_build_trigger_body]
@@ -74,23 +80,41 @@ update_build_trigger_body = {
"branch_name": "master",
},
"filename": "example_cloud_build.yaml",
+ "service_account":
f"projects/{PROJECT_ID}/serviceAccounts/{PROJECT_NUMBER}[email protected]",
}
# [END START howto_operator_gcp_update_build_trigger_body]
# [START howto_operator_create_build_from_repo_body]
create_build_from_repo_body: dict[str, Any] = {
"source": {"repo_source": {"repo_name": GCP_SOURCE_REPOSITORY_NAME,
"branch_name": "master"}},
- "steps": [{"name": "ubuntu", "args": ["echo", "Hello world"]}],
+ "steps": [{"name": "ubuntu", "args": ["echo", "Hello world", "sleep
200"]}],
}
# [END howto_operator_create_build_from_repo_body]
+@task(task_id="get_project_number")
+def get_project_number():
+ """Helper function to retrieve the number of the project based on
PROJECT_ID"""
+ try:
+ with build("cloudresourcemanager", "v1") as service:
+ response = service.projects().get(projectId=PROJECT_ID).execute()
+ return response["projectNumber"]
+ except HttpError as exc:
+ if exc.status_code == 403:
+ raise AirflowException(
+ "No project found with specified name, "
+ "or caller does not have permissions to read specified project"
+ )
+ else:
+ raise exc
+
+
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
catchup=False,
- tags=["example"],
+ tags=["example", "cloud_build_trigger"],
) as dag:
# [START howto_operator_create_build_trigger]
create_build_trigger = CloudBuildCreateBuildTriggerOperator(
@@ -144,13 +168,17 @@ with DAG(
)
# [END howto_operator_list_build_triggers]
- chain(
- create_build_trigger,
- run_build_trigger,
- update_build_trigger,
- get_build_trigger,
- delete_build_trigger,
- list_build_triggers,
+ (
+ # TEST SETUP
+ get_project_number()
+ # TEST BODY
+ >> create_build_trigger
+ >> run_build_trigger
+ >> update_build_trigger
+ >> get_build_trigger
+ # TEST TEARDOWN
+ >> delete_build_trigger
+ >> list_build_triggers
)
from tests.system.utils.watcher import watcher