ferruzzi commented on code in PR #23428:
URL: https://github.com/apache/airflow/pull/23428#discussion_r865028544
##########
airflow/providers/amazon/aws/example_dags/example_athena.py:
##########
@@ -82,66 +65,60 @@ def read_results_from_s3(query_execution_id):
dag_id='example_athena',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
- dagrun_timeout=timedelta(minutes=60),
tags=['example'],
catchup=False,
) as dag:
- # [START howto_athena_operator_and_sensor]
- # Using a task-decorated function to create a CSV file in S3
- add_sample_data_to_s3 = add_sample_data_to_s3()
+ upload_sample_data = S3CreateObjectOperator(
+ task_id='upload_sample_data',
+ s3_bucket=S3_BUCKET,
+ s3_key=f'{S3_KEY}/{ATHENA_TABLE}/{SAMPLE_FILENAME}',
+ data=SAMPLE_DATA,
+ replace=True,
+ )
create_table = AthenaOperator(
- task_id='setup__create_table',
+ task_id='create_table',
query=QUERY_CREATE_TABLE,
database=ATHENA_DATABASE,
output_location=f's3://{S3_BUCKET}/{S3_KEY}',
- sleep_time=30,
- max_tries=None,
)
+ # [START howto_athena_operator]
read_table = AthenaOperator(
- task_id='query__read_table',
+ task_id='query_table',
Review Comment:
Sure, I can change that quick
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]