This is an automated email from the ASF dual-hosted git repository.

taragolis pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 99fdc2fa8d Fix system test for GoogleCalendarToGCSOperator (#34452)
99fdc2fa8d is described below

commit 99fdc2fa8d49e75a22c3329d7cb5723f8235ba8e
Author: Maksim <[email protected]>
AuthorDate: Thu Nov 2 12:44:12 2023 +0100

    Fix system test for GoogleCalendarToGCSOperator (#34452)
    
    * Fix system test for GoogleCalendarToGCSOperator
    
    * Update variables
    
    * Add gcp_conn_id variable
---
 .../google/cloud/gcs/example_calendar_to_gcs.py    | 42 ++++++++++++++++++++--
 1 file changed, 39 insertions(+), 3 deletions(-)

diff --git a/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py 
b/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py
index d0d1c96f28..37e2f0265d 100644
--- a/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py
+++ b/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py
@@ -17,22 +17,30 @@
 # under the License.
 from __future__ import annotations
 
+import json
 import os
 from datetime import datetime
 
+from airflow.decorators import task
+from airflow.models import Connection
 from airflow.models.dag import DAG
+from airflow.operators.bash import BashOperator
 from airflow.providers.google.cloud.operators.gcs import 
GCSCreateBucketOperator, GCSDeleteBucketOperator
 from airflow.providers.google.cloud.transfers.calendar_to_gcs import 
GoogleCalendarToGCSOperator
+from airflow.settings import Session
 from airflow.utils.trigger_rule import TriggerRule
 
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 DAG_ID = "example_calendar_to_gcs"
 
 BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
-CALENDAR_ID = os.environ.get("CALENDAR_ID", "1234567890qwerty")
+CALENDAR_ID = os.environ.get("CALENDAR_ID", "primary")
 API_VERSION = "v3"
 
+CONNECTION_ID = f"connection_{DAG_ID}_{ENV_ID}"
+
+
 with DAG(
     DAG_ID,
     schedule="@once",  # Override to match your needs
@@ -44,15 +52,41 @@ with DAG(
         task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
     )
 
+    @task
+    def create_temp_gcp_connection():
+        conn = Connection(
+            conn_id=CONNECTION_ID,
+            conn_type="google_cloud_platform",
+        )
+        conn_extra = {
+            "scope": "https://www.googleapis.com/auth/calendar";,
+            "project": PROJECT_ID,
+            "keyfile_dict": "",  # Override to match your needs
+        }
+        conn_extra_json = json.dumps(conn_extra)
+        conn.set_extra(conn_extra_json)
+
+        session: Session = Session()
+        session.add(conn)
+        session.commit()
+
+    create_temp_gcp_connection_task = create_temp_gcp_connection()
+
     # [START upload_calendar_to_gcs]
     upload_calendar_to_gcs = GoogleCalendarToGCSOperator(
         task_id="upload_calendar_to_gcs",
         destination_bucket=BUCKET_NAME,
         calendar_id=CALENDAR_ID,
         api_version=API_VERSION,
+        gcp_conn_id=CONNECTION_ID,
     )
     # [END upload_calendar_to_gcs]
 
+    delete_temp_gcp_connection_task = BashOperator(
+        task_id="delete_temp_gcp_connection",
+        bash_command=f"airflow connections delete {CONNECTION_ID}",
+    )
+
     delete_bucket = GCSDeleteBucketOperator(
         task_id="delete_bucket", bucket_name=BUCKET_NAME, 
trigger_rule=TriggerRule.ALL_DONE
     )
@@ -60,9 +94,11 @@ with DAG(
     (
         # TEST SETUP
         create_bucket
+        >> create_temp_gcp_connection_task
         # TEST BODY
         >> upload_calendar_to_gcs
         # TEST TEARDOWN
+        >> delete_temp_gcp_connection_task
         >> delete_bucket
     )
 

Reply via email to