This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 32e81b11d59 Fix managed kafka system tests to use correct network name 
(#56654)
32e81b11d59 is described below

commit 32e81b11d59614a2f6d737563410fa16e80308e8
Author: VladaZakharova <[email protected]>
AuthorDate: Wed Oct 15 14:54:43 2025 +0000

    Fix managed kafka system tests to use correct network name (#56654)
---
 .../managed_kafka/example_managed_kafka_consumer_group.py   | 13 ++++++++++++-
 .../cloud/managed_kafka/example_managed_kafka_topic.py      | 13 ++++++++++++-
 2 files changed, 24 insertions(+), 2 deletions(-)

diff --git 
a/providers/google/tests/system/google/cloud/managed_kafka/example_managed_kafka_consumer_group.py
 
b/providers/google/tests/system/google/cloud/managed_kafka/example_managed_kafka_consumer_group.py
index 2df76653374..54ad325776b 100644
--- 
a/providers/google/tests/system/google/cloud/managed_kafka/example_managed_kafka_consumer_group.py
+++ 
b/providers/google/tests/system/google/cloud/managed_kafka/example_managed_kafka_consumer_group.py
@@ -18,6 +18,15 @@
 
 """
 Example Airflow DAG for Google Cloud Managed Service for Apache Kafka testing 
Topic operations.
+
+Requirements:
+    Operator to create a cluster requires GOOGLE_PROVIDER_NETWORK 
environmental variable
+    that will contain the name of the network that will be used for cluster 
creation.
+
+    Please, note that if you are running this operator in Google Cloud 
Composer, this value will be set
+    automatically and will not require any additional configuration.
+    In other cases, the network in which the cluster will be created should be 
the same as your machine
+    is running in.
 """
 
 from __future__ import annotations
@@ -59,6 +68,8 @@ from tests_common.test_utils.api_client_helpers import 
create_airflow_connection
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
+IS_COMPOSER = bool(os.environ.get("COMPOSER_ENVIRONMENT", ""))
+NETWORK = os.environ.get("GOOGLE_PROVIDER_NETWORK") if not IS_COMPOSER else 
"default"
 DAG_ID = "managed_kafka_consumer_group_operations"
 LOCATION = "us-central1"
 
@@ -67,7 +78,7 @@ CLUSTER_CONF = {
     "gcp_config": {
         "access_config": {
             "network_configs": [
-                {"subnet": 
f"projects/{PROJECT_ID}/regions/{LOCATION}/subnetworks/default"},
+                {"subnet": 
f"projects/{PROJECT_ID}/regions/{LOCATION}/subnetworks/{NETWORK}"},
             ],
         },
     },
diff --git 
a/providers/google/tests/system/google/cloud/managed_kafka/example_managed_kafka_topic.py
 
b/providers/google/tests/system/google/cloud/managed_kafka/example_managed_kafka_topic.py
index 2797e22c3f9..8354df3cd0d 100644
--- 
a/providers/google/tests/system/google/cloud/managed_kafka/example_managed_kafka_topic.py
+++ 
b/providers/google/tests/system/google/cloud/managed_kafka/example_managed_kafka_topic.py
@@ -19,6 +19,15 @@
 
 """
 Example Airflow DAG for Google Cloud Managed Service for Apache Kafka testing 
Topic operations.
+
+Requirements:
+    Operator to create a cluster requires GOOGLE_PROVIDER_NETWORK 
environmental variable
+    that will contain the name of the network that will be used for cluster 
creation.
+
+    Please, note that if you are running this operator in Google Cloud 
Composer, this value will be set
+    automatically and will not require any additional configuration.
+    In other cases, the network in which the cluster will be created should be 
the same as your machine
+    is running in.
 """
 
 from __future__ import annotations
@@ -45,6 +54,8 @@ except ImportError:
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
+IS_COMPOSER = bool(os.environ.get("COMPOSER_ENVIRONMENT", ""))
+NETWORK = os.environ.get("GOOGLE_PROVIDER_NETWORK") if not IS_COMPOSER else 
"default"
 DAG_ID = "managed_kafka_topic_operations"
 LOCATION = "us-central1"
 
@@ -53,7 +64,7 @@ CLUSTER_CONF = {
     "gcp_config": {
         "access_config": {
             "network_configs": [
-                {"subnet": 
f"projects/{PROJECT_ID}/regions/{LOCATION}/subnetworks/default"},
+                {"subnet": 
f"projects/{PROJECT_ID}/regions/{LOCATION}/subnetworks/{NETWORK}"},
             ],
         },
     },

Reply via email to