pankajkoti commented on code in PR #22808:
URL: https://github.com/apache/airflow/pull/22808#discussion_r848088855


##########
airflow/providers/google/cloud/example_dags/example_gcs.py:
##########
@@ -37,35 +37,44 @@
 from airflow.providers.google.cloud.sensors.gcs import (
     GCSObjectExistenceSensor,
     GCSObjectsWithPrefixExistenceSensor,
+    GCSObjectUpdateSensor,
+    GCSUploadSessionCompleteSensor,
 )
 from airflow.providers.google.cloud.transfers.gcs_to_gcs import 
GCSToGCSOperator
 from airflow.providers.google.cloud.transfers.gcs_to_local import 
GCSToLocalFilesystemOperator
 from airflow.providers.google.cloud.transfers.local_to_gcs import 
LocalFilesystemToGCSOperator
 
 START_DATE = datetime(2021, 1, 1)
 
-PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-id")
-BUCKET_1 = os.environ.get("GCP_GCS_BUCKET_1", "test-gcs-example-bucket")
-GCS_ACL_ENTITY = os.environ.get("GCS_ACL_ENTITY", "allUsers")
+PROJECT_ID = os.getenv("GCP_PROJECT_ID", "example-id")
+BUCKET_1 = os.getenv("GCP_GCS_BUCKET_1", "test-gcs-example-bucket")
+GCS_ACL_ENTITY = os.getenv("GCS_ACL_ENTITY", "allUsers")
 GCS_ACL_BUCKET_ROLE = "OWNER"
 GCS_ACL_OBJECT_ROLE = "OWNER"
 
-BUCKET_2 = os.environ.get("GCP_GCS_BUCKET_2", "test-gcs-example-bucket-2")
+BUCKET_2 = os.getenv("GCP_GCS_BUCKET_2", "test-gcs-example-bucket-2")
 
 temp_dir_path = gettempdir()
-PATH_TO_TRANSFORM_SCRIPT = os.environ.get(
+PATH_TO_TRANSFORM_SCRIPT = os.getenv(
     "GCP_GCS_PATH_TO_TRANSFORM_SCRIPT", os.path.join(temp_dir_path, 
"transform_script.py")
 )
-PATH_TO_UPLOAD_FILE = os.environ.get(
+PATH_TO_UPLOAD_FILE = os.getenv(
     "GCP_GCS_PATH_TO_UPLOAD_FILE", os.path.join(temp_dir_path, 
"test-gcs-example-upload.txt")
 )
-PATH_TO_UPLOAD_FILE_PREFIX = 
os.environ.get("GCP_GCS_PATH_TO_UPLOAD_FILE_PREFIX", "test-gcs-")
-PATH_TO_SAVED_FILE = os.environ.get(
+PATH_TO_UPLOAD_FILE_PREFIX = os.getenv("GCP_GCS_PATH_TO_UPLOAD_FILE_PREFIX", 
"test-gcs-")
+PATH_TO_SAVED_FILE = os.getenv(
     "GCP_GCS_PATH_TO_SAVED_FILE", os.path.join(temp_dir_path, 
"test-gcs-example-download.txt")
 )
 
 BUCKET_FILE_LOCATION = PATH_TO_UPLOAD_FILE.rpartition("/")[-1]
 
+# Upload 'test-gcs-manual-example-upload.txt' manually in the <BUCKET_1> after 
triggering the DAG.

Review Comment:
   Right. Since, we are starting in parallel, it may happen that, the upload 
task is picked up before the sensor task begins and it may not detect the 
change as expected. Hence, added the comment to manually upload the file. :)
   
   Also, the gcs_object_update_sensor_task needs to be activated after 
gcs_upload_session_complete_task (and not in parallel with it) as the object is 
expected to be detected by sensor and the object_update task is to confirm the 
manually upload has happened prior to it.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to