sdevani commented on a change in pull request #4792: [AIRFLOW-3659] Create Google Cloud Transfer Service Operators URL: https://github.com/apache/airflow/pull/4792#discussion_r260962340
########## File path: airflow/contrib/example_dags/example_gcp_transfer.py ########## @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Example Airflow DAG that demonstrates interactions with Google Cloud Transfer. + +This DAG relies on the following OS environment variables + +* GCP_PROJECT_ID - Google Cloud Project to use for the Google Cloud Transfer Service. +* GCP_DESCRIPTION - Description of transfer job +* GCP_TRANSFER_SOURCE_AWS_BUCKET - Amazon Web Services Storage bucket from which files are copied. +* GCP_TRANSFER_FIRST_TARGET_BUCKET - Google Cloud Storage bucket to which files are copied from AWS. + It is also a source bucket in next step +* GCP_TRANSFER_SECOND_TARGET_BUCKET - Google Cloud Storage bucket bucket to which files are copied +* WAIT_FOR_OPERATION_POKE_INTERVAL - interval of what to check the status of the operation + +""" +import os +from datetime import datetime, timedelta + +from airflow import models +from airflow.contrib.hooks.gcp_transfer_hook import GcpTransferOperationStatus, GcpTransferJobsStatus +from airflow.contrib.operators.gcp_transfer_operator import ( + GcpTransferServiceJobCreateOperator, + GcpTransferServiceJobDeleteOperator, + GcpTransferServiceJobUpdateOperator, + GcpTransferServiceOperationsListOperator, + GcpTransferServiceOperationGetOperator, + GcpTransferServiceOperationPauseOperator, + GcpTransferServiceOperationResumeOperator, + GcpTransferServiceOperationCancelOperator, +) +from airflow.contrib.sensors.gcp_transfer_sensor import GCPTransferServiceWaitForJobStatusSensor +from airflow.utils.dates import days_ago + +# [START howto_operator_gct_common_variables] +GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project') +GCP_DESCRIPTION = os.environ.get('GCP_DESCRIPTION', 'description') +GCP_TRANSFER_TARGET_BUCKET = os.environ.get('GCP_TRANSFER_TARGET_BUCKET') +WAIT_FOR_OPERATION_POKE_INTERVAL = os.environ.get('WAIT_FOR_OPERATION_POKE_INTERVAL', 5) + +GCP_TRANSFER_SOURCE_AWS_BUCKET = os.environ.get('GCP_TRANSFER_SOURCE_AWS_BUCKET') +GCP_TRANSFER_FIRST_TARGET_BUCKET = os.environ.get( + 'GCP_TRANSFER_FIRST_TARGET_BUCKET', 'gcp-transfer-first-target' +) +GCP_TRANSFER_SECOND_TARGET_BUCKET = os.environ.get( + 'GCP_TRANSFER_SECOND_TARGET_BUCKET', 'gcp-transfer-second-target' +) +# [END howto_operator_gct_common_variables] + +# [START howto_operator_gct_create_job_body_aws] +create_body_aws = { + "description": GCP_DESCRIPTION, + "status": GcpTransferJobsStatus.ENABLED, + "projectId": GCP_PROJECT_ID, + "schedule": { + "scheduleStartDate": datetime(2015, 1, 1).date(), + "scheduleEndDate": datetime(2030, 1, 1).date(), + "startTimeOfDay": datetime.utcnow() + timedelta(minutes=2), + }, + "transferSpec": { + 'awsS3DataSource': {'bucketName': GCP_TRANSFER_SOURCE_AWS_BUCKET}, + "gcsDataSink": {"bucketName": GCP_TRANSFER_FIRST_TARGET_BUCKET}, + "transferOptions": {"overwriteObjectsAlreadyExistingInSink": True}, + }, +} +# [END howto_operator_gct_create_job_body_aws] + +# [START howto_operator_gct_create_job_body_gcp] +create_body_gcs = { + "description": GCP_DESCRIPTION, + "status": GcpTransferJobsStatus.ENABLED, + "projectId": GCP_PROJECT_ID, + "schedule": { + "scheduleStartDate": datetime(2015, 1, 1).date(), + "scheduleEndDate": datetime(2030, 1, 1).date(), + "startTimeOfDay": datetime.utcnow() + timedelta(minutes=2), + }, + "transferSpec": { + 'gcsDataSource': {'bucketName': GCP_TRANSFER_FIRST_TARGET_BUCKET}, + "gcsDataSink": {"bucketName": GCP_TRANSFER_SECOND_TARGET_BUCKET}, + "transferOptions": {"overwriteObjectsAlreadyExistingInSink": True}, + }, +} +# [END howto_operator_gct_create_job_body_gcp] + +# [START howto_operator_gct_update_job_body] +update_body = { + "projectId": GCP_PROJECT_ID, + "transferJob": {"description": "%s_updated".format(GCP_DESCRIPTION)}, + "updateTransferJobFieldMask": "description", +} +# [END howto_operator_gct_update_job_body] + +list_filter_dict = {"projectId": GCP_PROJECT_ID, "jobNames": []} + +# [START howto_operator_gct_default_args] +default_args = {'start_date': days_ago(1)} +# [END howto_operator_gct_default_args] + +with models.DAG( + 'example_gcp_transfer', default_args=default_args, schedule_interval=None # Override to match your needs +) as dag: + + def next_dep(task, prev): + prev >> task + return task + + # [START howto_operator_gct_create_job] + create_transfer_job_from_aws = GcpTransferServiceJobCreateOperator( + task_id="create_transfer_job_from_aws", body=create_body_aws + ) + # [END howto_operator_gct_create_job] + + prev_task = create_transfer_job_from_aws Review comment: We should set all of the dependencies at the end of the DAG similar to https://github.com/apache/airflow/blob/master/airflow/contrib/example_dags/example_gcp_compute.py#L113. next_dep is essentially a wrapper around >>. It doesn't seem necessary. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [email protected] With regards, Apache Git Services
