This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 42dfa7eee1d Small fix of system tests (#44815)
42dfa7eee1d is described below
commit 42dfa7eee1d4816dfb95863c7e472c250eb5765b
Author: VladaZakharova <[email protected]>
AuthorDate: Wed Dec 11 14:50:02 2024 +0100
Small fix of system tests (#44815)
Co-authored-by: Ulada Zakharava <[email protected]>
---
.../google/cloud/dataproc/example_dataproc_batch_persistent.py | 2 +-
.../example_dataproc_cluster_create_existing_stopped_cluster.py | 2 +-
.../google/cloud/dataproc/example_dataproc_cluster_deferrable.py | 2 +-
providers/tests/system/google/cloud/gcs/example_firestore.py | 2 +-
.../cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py | 2 +-
.../system/google/marketing_platform/example_campaign_manager.py | 8 +++++++-
scripts/ci/pre_commit/check_system_tests.py | 5 +++--
7 files changed, 15 insertions(+), 8 deletions(-)
diff --git
a/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py
b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py
index 5bf500aeef3..fbd9f748521 100644
---
a/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py
+++
b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py
@@ -42,7 +42,7 @@ ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_batch_ps"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}".replace("-", "_")
-REGION = "europe-north1"
+REGION = "us-east4"
CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
diff --git
a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py
b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py
index e64a0914941..97697284ed6 100644
---
a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py
+++
b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py
@@ -46,7 +46,7 @@ PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TE
CLUSTER_NAME_BASE = f"{DAG_ID}".replace("_", "-")
CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
-REGION = "europe-north1"
+REGION = "us-east4"
# Cluster definition
CLUSTER_CONFIG = {
diff --git
a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py
b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py
index 2f7153d4405..543e5044e75 100644
---
a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py
+++
b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py
@@ -43,7 +43,7 @@ PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TE
CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
-REGION = "europe-north1"
+REGION = "us-east4"
# Cluster definition
diff --git a/providers/tests/system/google/cloud/gcs/example_firestore.py
b/providers/tests/system/google/cloud/gcs/example_firestore.py
index 1860d30c0d5..4f4a5777a70 100644
--- a/providers/tests/system/google/cloud/gcs/example_firestore.py
+++ b/providers/tests/system/google/cloud/gcs/example_firestore.py
@@ -142,7 +142,7 @@ with DAG(
environment={
"tempLocation": f"gs://{BUCKET_NAME}/tmp",
},
- location="us-central1",
+ location="us-east4",
append_job_name=False,
trigger_rule=TriggerRule.ALL_DONE,
)
diff --git
a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py
b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py
index ea68e55fa26..92133b82718 100644
---
a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py
+++
b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py
@@ -59,7 +59,7 @@ VIDEO_DATASET = {
VIDEO_DATA_CONFIG = [
{
"import_schema_uri": schema.dataset.ioformat.video.classification,
- "gcs_source": {"uris":
[f"gs://{RESOURCE_DATA_BUCKET}/vertex-ai/datasets/video-dataset.csv"]},
+ "gcs_source": {"uris":
[f"gs://{RESOURCE_DATA_BUCKET}/automl/datasets/video/classification.csv"]},
},
]
diff --git
a/providers/tests/system/google/marketing_platform/example_campaign_manager.py
b/providers/tests/system/google/marketing_platform/example_campaign_manager.py
index 70e8c208d50..6bf68adc415 100644
---
a/providers/tests/system/google/marketing_platform/example_campaign_manager.py
+++
b/providers/tests/system/google/marketing_platform/example_campaign_manager.py
@@ -326,7 +326,13 @@ with DAG(
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
- list(dag.tasks) >> watcher()
+
+ # Excluding sensor because we expect it to fail due to cancelled operation
+ [
+ task
+ for task in dag.tasks
+ if task.task_id not in ["insert_conversion", "update_conversion",
"delete_connection"]
+ ] >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
diff --git a/scripts/ci/pre_commit/check_system_tests.py
b/scripts/ci/pre_commit/check_system_tests.py
index 1eceafaab1d..3d5c743b54f 100755
--- a/scripts/ci/pre_commit/check_system_tests.py
+++ b/scripts/ci/pre_commit/check_system_tests.py
@@ -35,6 +35,7 @@ console = Console(color_system="standard", width=200)
errors: list[str] = []
WATCHER_APPEND_INSTRUCTION = "list(dag.tasks) >> watcher()"
+WATCHER_APPEND_INSTRUCTION_SHORT = " >> watcher()"
PYTEST_FUNCTION = """
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
@@ -52,13 +53,13 @@ PYTEST_FUNCTION_PATTERN = re.compile(
def _check_file(file: Path):
content = file.read_text()
if "from tests_common.test_utils.watcher import watcher" in content:
- index = content.find(WATCHER_APPEND_INSTRUCTION)
+ index = content.find(WATCHER_APPEND_INSTRUCTION_SHORT)
if index == -1:
errors.append(
f"[red]The example {file} imports
tests_common.test_utils.watcher "
f"but does not use it properly![/]\n\n"
"[yellow]Make sure you have:[/]\n\n"
- f" {WATCHER_APPEND_INSTRUCTION}\n\n"
+ f" {WATCHER_APPEND_INSTRUCTION_SHORT}\n\n"
"[yellow]as the last instruction in your example DAG.[/]\n"
)
else: