This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 509b277dce Update sample dag and doc for RDS (#23651)
509b277dce is described below
commit 509b277dce50fb1fbc25aea565182933bb506ee2
Author: Vincent <[email protected]>
AuthorDate: Sun May 22 07:40:21 2022 -0400
Update sample dag and doc for RDS (#23651)
---
.../amazon/aws/example_dags/example_rds.py | 149 ---------------
.../amazon/aws/example_dags/example_rds_event.py | 58 ++++++
.../amazon/aws/example_dags/example_rds_export.py | 71 ++++++++
.../aws/example_dags/example_rds_snapshot.py | 76 ++++++++
airflow/providers/amazon/aws/sensors/rds.py | 4 +-
.../operators/rds.rst | 200 +++++++++------------
6 files changed, 296 insertions(+), 262 deletions(-)
diff --git a/airflow/providers/amazon/aws/example_dags/example_rds.py
b/airflow/providers/amazon/aws/example_dags/example_rds.py
deleted file mode 100644
index f30404b9d6..0000000000
--- a/airflow/providers/amazon/aws/example_dags/example_rds.py
+++ /dev/null
@@ -1,149 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from datetime import datetime
-
-from airflow import DAG
-from airflow.providers.amazon.aws.operators.rds import (
- RdsCancelExportTaskOperator,
- RdsCopyDbSnapshotOperator,
- RdsCreateDbSnapshotOperator,
- RdsCreateEventSubscriptionOperator,
- RdsDeleteDbSnapshotOperator,
- RdsDeleteEventSubscriptionOperator,
- RdsStartExportTaskOperator,
-)
-from airflow.providers.amazon.aws.sensors.rds import
RdsExportTaskExistenceSensor, RdsSnapshotExistenceSensor
-
-# [START rds_snapshots_howto_guide]
-with DAG(
- dag_id='rds_snapshots', start_date=datetime(2021, 1, 1),
schedule_interval=None, catchup=False
-) as dag:
- # [START howto_guide_rds_create_snapshot]
- create_snapshot = RdsCreateDbSnapshotOperator(
- task_id='create_snapshot',
- db_type='instance',
- db_identifier='auth-db',
- db_snapshot_identifier='auth-db-snap',
- aws_conn_id='aws_default',
- hook_params={'region_name': 'us-east-1'},
- )
- # [END howto_guide_rds_create_snapshot]
-
- # [START howto_guide_rds_copy_snapshot]
- copy_snapshot = RdsCopyDbSnapshotOperator(
- task_id='copy_snapshot',
- db_type='instance',
- target_db_snapshot_identifier='auth-db-snap-backup',
- source_db_snapshot_identifier='auth-db-snap',
- aws_conn_id='aws_default',
- hook_params={'region_name': 'us-east-1'},
- )
- # [END howto_guide_rds_copy_snapshot]
-
- # [START howto_guide_rds_delete_snapshot]
- delete_snapshot = RdsDeleteDbSnapshotOperator(
- task_id='delete_snapshot',
- db_type='instance',
- db_snapshot_identifier='auth-db-snap-backup',
- aws_conn_id='aws_default',
- hook_params={'region_name': 'us-east-1'},
- )
- # [END howto_guide_rds_delete_snapshot]
-
- create_snapshot >> copy_snapshot >> delete_snapshot
-# [END rds_snapshots_howto_guide]
-
-# [START rds_exports_howto_guide]
-with DAG(dag_id='rds_exports', start_date=datetime(2021, 1, 1),
schedule_interval=None, catchup=False) as dag:
- # [START howto_guide_rds_start_export]
- start_export = RdsStartExportTaskOperator(
- task_id='start_export',
- export_task_identifier='export-auth-db-snap-{{ ds }}',
- source_arn='arn:aws:rds:<region>:<account
number>:snapshot:auth-db-snap',
- s3_bucket_name='my_s3_bucket',
- s3_prefix='some/prefix',
- iam_role_arn='arn:aws:iam:<region>:<account number>:role/MyRole',
- kms_key_id='arn:aws:kms:<region>:<account
number>:key/*****-****-****-****-********',
- aws_conn_id='aws_default',
- hook_params={'region_name': 'us-east-1'},
- )
- # [END howto_guide_rds_start_export]
-
- # [START howto_guide_rds_cancel_export]
- cancel_export = RdsCancelExportTaskOperator(
- task_id='cancel_export',
- export_task_identifier='export-auth-db-snap-{{ ds }}',
- aws_conn_id='aws_default',
- hook_params={'region_name': 'us-east-1'},
- )
- # [END howto_guide_rds_cancel_export]
-
- start_export >> cancel_export
-# [END rds_exports_howto_guide]
-
-# [START rds_events_howto_guide]
-with DAG(dag_id='rds_events', start_date=datetime(2021, 1, 1),
schedule_interval=None, catchup=False) as dag:
- # [START howto_guide_rds_create_subscription]
- create_subscription = RdsCreateEventSubscriptionOperator(
- task_id='create_subscription',
- subscription_name='my-topic-subscription',
- sns_topic_arn='arn:aws:sns:<region>:<account number>:MyTopic',
- source_type='db-instance',
- source_ids=['auth-db'],
- event_categories=['Availability', 'Backup'],
- aws_conn_id='aws_default',
- hook_params={'region_name': 'us-east-1'},
- )
- # [END howto_guide_rds_create_subscription]
-
- # [START howto_guide_rds_delete_subscription]
- delete_subscription = RdsDeleteEventSubscriptionOperator(
- task_id='delete_subscription',
- subscription_name='my-topic-subscription',
- aws_conn_id='aws_default',
- hook_params={'region_name': 'us-east-1'},
- )
- # [END howto_guide_rds_delete_subscription]
-
- create_subscription >> delete_subscription
-# [END rds_events_howto_guide]
-
-# [START rds_sensors_howto_guide]
-with DAG(dag_id='rds_events', start_date=datetime(2021, 1, 1),
schedule_interval=None, catchup=False) as dag:
- # [START howto_guide_rds_snapshot_sensor]
- snapshot_sensor = RdsSnapshotExistenceSensor(
- task_id='snapshot_sensor',
- db_type='instance',
- db_snapshot_identifier='auth-db-snap-{{ ds }}',
- target_statuses=['available'],
- aws_conn_id='aws_default',
- hook_params={'region_name': 'us-east-1'},
- )
- # [END howto_guide_rds_snapshot_sensor]
-
- # [START howto_guide_rds_export_sensor]
- export_sensor = RdsExportTaskExistenceSensor(
- task_id='export_sensor',
- export_task_identifier='export-auth-db-snap-{{ ds }}',
- target_statuses=['starting', 'in_progress', 'complete', 'canceling',
'canceled'],
- aws_conn_id='aws_default',
- hook_params={'region_name': 'us-east-1'},
- )
- # [END howto_guide_rds_export_sensor]
-# [END rds_sensors_howto_guide]
diff --git a/airflow/providers/amazon/aws/example_dags/example_rds_event.py
b/airflow/providers/amazon/aws/example_dags/example_rds_event.py
new file mode 100644
index 0000000000..4ec8b6f5be
--- /dev/null
+++ b/airflow/providers/amazon/aws/example_dags/example_rds_event.py
@@ -0,0 +1,58 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from datetime import datetime
+from os import getenv
+
+from airflow import DAG
+from airflow.models.baseoperator import chain
+from airflow.providers.amazon.aws.operators.rds import (
+ RdsCreateEventSubscriptionOperator,
+ RdsDeleteEventSubscriptionOperator,
+)
+
+SUBSCRIPTION_NAME = getenv("SUBSCRIPTION_NAME", "subscription-name")
+SNS_TOPIC_ARN = getenv("SNS_TOPIC_ARN", "arn:aws:sns:<region>:<account
number>:MyTopic")
+RDS_DB_IDENTIFIER = getenv("RDS_DB_IDENTIFIER", "database-identifier")
+
+with DAG(
+ dag_id='example_rds_event',
+ schedule_interval=None,
+ start_date=datetime(2021, 1, 1),
+ tags=['example'],
+ catchup=False,
+) as dag:
+ # [START howto_operator_rds_create_event_subscription]
+ create_subscription = RdsCreateEventSubscriptionOperator(
+ task_id='create_subscription',
+ subscription_name=SUBSCRIPTION_NAME,
+ sns_topic_arn=SNS_TOPIC_ARN,
+ source_type='db-instance',
+ source_ids=[RDS_DB_IDENTIFIER],
+ event_categories=['availability'],
+ )
+ # [END howto_operator_rds_create_event_subscription]
+
+ # [START howto_operator_rds_delete_event_subscription]
+ delete_subscription = RdsDeleteEventSubscriptionOperator(
+ task_id='delete_subscription',
+ subscription_name=SUBSCRIPTION_NAME,
+ )
+ # [END howto_operator_rds_delete_event_subscription]
+
+ chain(create_subscription, delete_subscription)
diff --git a/airflow/providers/amazon/aws/example_dags/example_rds_export.py
b/airflow/providers/amazon/aws/example_dags/example_rds_export.py
new file mode 100644
index 0000000000..1dce580491
--- /dev/null
+++ b/airflow/providers/amazon/aws/example_dags/example_rds_export.py
@@ -0,0 +1,71 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from datetime import datetime
+from os import getenv
+
+from airflow import DAG
+from airflow.models.baseoperator import chain
+from airflow.providers.amazon.aws.operators.rds import
RdsCancelExportTaskOperator, RdsStartExportTaskOperator
+from airflow.providers.amazon.aws.sensors.rds import
RdsExportTaskExistenceSensor
+
+RDS_EXPORT_TASK_IDENTIFIER = getenv("RDS_EXPORT_TASK_IDENTIFIER",
"export-task-identifier")
+RDS_EXPORT_SOURCE_ARN = getenv(
+ "RDS_EXPORT_SOURCE_ARN", "arn:aws:rds:<region>:<account
number>:snapshot:snap-id"
+)
+BUCKET_NAME = getenv("BUCKET_NAME", "bucket-name")
+BUCKET_PREFIX = getenv("BUCKET_PREFIX", "bucket-prefix")
+ROLE_ARN = getenv("ROLE_ARN", "arn:aws:iam::<account number>:role/Role")
+KMS_KEY_ID = getenv("KMS_KEY_ID", "arn:aws:kms:<region>:<account
number>:key/key-id")
+
+
+with DAG(
+ dag_id='example_rds_export',
+ schedule_interval=None,
+ start_date=datetime(2021, 1, 1),
+ tags=['example'],
+ catchup=False,
+) as dag:
+ # [START howto_operator_rds_start_export_task]
+ start_export = RdsStartExportTaskOperator(
+ task_id='start_export',
+ export_task_identifier=RDS_EXPORT_TASK_IDENTIFIER,
+ source_arn=RDS_EXPORT_SOURCE_ARN,
+ s3_bucket_name=BUCKET_NAME,
+ s3_prefix=BUCKET_PREFIX,
+ iam_role_arn=ROLE_ARN,
+ kms_key_id=KMS_KEY_ID,
+ )
+ # [END howto_operator_rds_start_export_task]
+
+ # [START howto_operator_rds_cancel_export]
+ cancel_export = RdsCancelExportTaskOperator(
+ task_id='cancel_export',
+ export_task_identifier=RDS_EXPORT_TASK_IDENTIFIER,
+ )
+ # [END howto_operator_rds_cancel_export]
+
+ # [START howto_sensor_rds_export_task_existence]
+ export_sensor = RdsExportTaskExistenceSensor(
+ task_id='export_sensor',
+ export_task_identifier=RDS_EXPORT_TASK_IDENTIFIER,
+ target_statuses=['canceled'],
+ )
+ # [END howto_sensor_rds_export_task_existence]
+
+ chain(start_export, cancel_export, export_sensor)
diff --git a/airflow/providers/amazon/aws/example_dags/example_rds_snapshot.py
b/airflow/providers/amazon/aws/example_dags/example_rds_snapshot.py
new file mode 100644
index 0000000000..f7e1d02e07
--- /dev/null
+++ b/airflow/providers/amazon/aws/example_dags/example_rds_snapshot.py
@@ -0,0 +1,76 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from datetime import datetime
+from os import getenv
+
+from airflow import DAG
+from airflow.models.baseoperator import chain
+from airflow.providers.amazon.aws.operators.rds import (
+ RdsCopyDbSnapshotOperator,
+ RdsCreateDbSnapshotOperator,
+ RdsDeleteDbSnapshotOperator,
+)
+from airflow.providers.amazon.aws.sensors.rds import RdsSnapshotExistenceSensor
+
+RDS_DB_IDENTIFIER = getenv("RDS_DB_IDENTIFIER", "database-identifier")
+RDS_DB_SNAPSHOT_IDENTIFIER = getenv("RDS_DB_SNAPSHOT_IDENTIFIER",
"database-1-snap")
+
+with DAG(
+ dag_id='example_rds_snapshot',
+ schedule_interval=None,
+ start_date=datetime(2021, 1, 1),
+ tags=['example'],
+ catchup=False,
+) as dag:
+ # [START howto_operator_rds_create_db_snapshot]
+ create_snapshot = RdsCreateDbSnapshotOperator(
+ task_id='create_snapshot',
+ db_type='instance',
+ db_identifier=RDS_DB_IDENTIFIER,
+ db_snapshot_identifier=RDS_DB_SNAPSHOT_IDENTIFIER,
+ )
+ # [END howto_operator_rds_create_db_snapshot]
+
+ # [START howto_sensor_rds_snapshot_existence]
+ snapshot_sensor = RdsSnapshotExistenceSensor(
+ task_id='snapshot_sensor',
+ db_type='instance',
+ db_snapshot_identifier=RDS_DB_IDENTIFIER,
+ target_statuses=['available'],
+ )
+ # [END howto_sensor_rds_snapshot_existence]
+
+ # [START howto_operator_rds_copy_snapshot]
+ copy_snapshot = RdsCopyDbSnapshotOperator(
+ task_id='copy_snapshot',
+ db_type='instance',
+ source_db_snapshot_identifier=RDS_DB_IDENTIFIER,
+ target_db_snapshot_identifier=f'{RDS_DB_IDENTIFIER}-copy',
+ )
+ # [END howto_operator_rds_copy_snapshot]
+
+ # [START howto_operator_rds_delete_snapshot]
+ delete_snapshot = RdsDeleteDbSnapshotOperator(
+ task_id='delete_snapshot',
+ db_type='instance',
+ db_snapshot_identifier=RDS_DB_IDENTIFIER,
+ )
+ # [END howto_operator_rds_delete_snapshot]
+
+ chain(create_snapshot, snapshot_sensor, copy_snapshot, delete_snapshot)
diff --git a/airflow/providers/amazon/aws/sensors/rds.py
b/airflow/providers/amazon/aws/sensors/rds.py
index 1c74d5ae8f..3c24c82fbf 100644
--- a/airflow/providers/amazon/aws/sensors/rds.py
+++ b/airflow/providers/amazon/aws/sensors/rds.py
@@ -71,7 +71,7 @@ class RdsSnapshotExistenceSensor(RdsBaseSensor):
.. seealso::
For more information on how to use this operator, take a look at the
guide:
- :ref:`howto/operator:RdsSnapshotExistenceSensor`
+ :ref:`howto/sensor:RdsSnapshotExistenceSensor`
:param db_type: Type of the DB - either "instance" or "cluster"
:param db_snapshot_identifier: The identifier for the DB snapshot
@@ -113,7 +113,7 @@ class RdsExportTaskExistenceSensor(RdsBaseSensor):
.. seealso::
For more information on how to use this operator, take a look at the
guide:
- :ref:`howto/operator:RdsExportTaskExistenceSensor`
+ :ref:`howto/sensor:RdsExportTaskExistenceSensor`
:param export_task_identifier: A unique identifier for the snapshot export
task.
:param target_statuses: Target status of export task
diff --git a/docs/apache-airflow-providers-amazon/operators/rds.rst
b/docs/apache-airflow-providers-amazon/operators/rds.rst
index ee65a55660..7e48a983f4 100644
--- a/docs/apache-airflow-providers-amazon/operators/rds.rst
+++ b/docs/apache-airflow-providers-amazon/operators/rds.rst
@@ -15,186 +15,164 @@
specific language governing permissions and limitations
under the License.
-RDS management operators
-=====================================
+======================================================
+Amazon Relational Database Service Documentation (RDS)
+======================================================
+
+`Amazon Relational Database Service (Amazon RDS)
<https://aws.amazon.com/rds/>`__ is a web service that makes it
+easier to set up, operate, and scale a relational database in the cloud.
+It provides cost-efficient, resizable capacity for an industry-standard
relational database and manages
+common database administration tasks.
+
+Prerequisite Tasks
+------------------
-.. contents::
- :depth: 1
- :local:
+.. include:: _partials/prerequisite_tasks.rst
+Operators
+---------
.. _howto/operator:RDSCreateDBSnapshotOperator:
-Create DB snapshot
-""""""""""""""""""
+Create a database snapshot
+==========================
-To create a snapshot of AWS RDS DB instance or DB cluster snapshot you can use
+To create a snapshot of an Amazon RDS database instance or cluster you can use
:class:`~airflow.providers.amazon.aws.operators.rds.RDSCreateDBSnapshotOperator`.
-The source DB instance must be in the ``available`` or
``storage-optimization`` state.
+The source database instance must be in the ``available`` or
``storage-optimization`` state.
-.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds.py
+.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds_snapshot.py
:language: python
- :start-after: [START rds_snapshots_howto_guide]
- :end-before: [END rds_snapshots_howto_guide]
-
-
-This Operator leverages the AWS CLI
-`create-db-snapshot
<https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-snapshot.html>`__
API
-`create-db-cluster-snapshot
<https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-cluster-snapshot.html>`__
API
-
+ :dedent: 4
+ :start-after: [START howto_operator_rds_create_db_snapshot]
+ :end-before: [END howto_operator_rds_create_db_snapshot]
.. _howto/operator:RDSCopyDBSnapshotOperator:
-Copy DB snapshot
-""""""""""""""""
+Copy a database snapshot
+========================
-To copy AWS RDS DB instance or DB cluster snapshot you can use
+To copy a snapshot of an Amazon RDS database instance or cluster you can use
:class:`~airflow.providers.amazon.aws.operators.rds.RDSCopyDBSnapshotOperator`.
-The source DB snapshot must be in the ``available`` state.
+The source database snapshot must be in the ``available`` state.
-.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds.py
+.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds_snapshot.py
:language: python
- :start-after: [START howto_guide_rds_copy_snapshot]
- :end-before: [END howto_guide_rds_copy_snapshot]
-
-This Operator leverages the AWS CLI
-`copy-db-snapshot
<https://docs.aws.amazon.com/cli/latest/reference/rds/copy-db-snapshot.html>`__
API
-`copy-db-cluster-snapshot
<https://docs.aws.amazon.com/cli/latest/reference/rds/copy-db-cluster-snapshot.html>`__
API
-
+ :dedent: 4
+ :start-after: [START howto_operator_rds_copy_snapshot]
+ :end-before: [END howto_operator_rds_copy_snapshot]
.. _howto/operator:RDSDeleteDBSnapshotOperator:
-Delete DB snapshot
-""""""""""""""""""
+Delete a database snapshot
+==========================
-To delete AWS RDS DB instance or DB cluster snapshot you can use
+To delete a snapshot of an Amazon RDS database instance or cluster you can use
:class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteDBSnapshotOperator`.
-The DB snapshot must be in the ``available`` state to be deleted.
+The database snapshot must be in the ``available`` state to be deleted.
-.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds.py
+.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds_snapshot.py
:language: python
- :start-after: [START howto_guide_rds_delete_snapshot]
- :end-before: [END howto_guide_rds_delete_snapshot]
-
-This Operator leverages the AWS CLI
-`delete-db-snapshot
<https://docs.aws.amazon.com/cli/latest/reference/rds/delete-db-snapshot.html>`__
API
-`delete-db-cluster-snapshot
<https://docs.aws.amazon.com/cli/latest/reference/rds/delete-db-cluster-snapshot.html>`__
API
-
+ :dedent: 4
+ :start-after: [START howto_operator_rds_delete_snapshot]
+ :end-before: [END howto_operator_rds_delete_snapshot]
.. _howto/operator:RDSStartExportTaskOperator:
-Start export task
-"""""""""""""""""
+Export an Amazon RDS snapshot to Amazon S3
+==========================================
-To start task that exports RDS snapshot to S3 you can use
+To export an Amazon RDS snapshot to Amazon S3 you can use
:class:`~airflow.providers.amazon.aws.operators.rds.RDSStartExportTaskOperator`.
The provided IAM role must have access to the S3 bucket.
-.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds.py
+.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds_export.py
:language: python
- :start-after: [START howto_guide_rds_start_export]
- :end-before: [END howto_guide_rds_start_export]
-
-This Operator leverages the AWS CLI
-`start-export-task
<https://docs.aws.amazon.com/cli/latest/reference/rds/start-export-task.html>`__
API
-
+ :dedent: 4
+ :start-after: [START howto_operator_rds_start_export_task]
+ :end-before: [END howto_operator_rds_start_export_task]
.. _howto/operator:RDSCancelExportTaskOperator:
-Cancel export task
-""""""""""""""""""
+Cancel an Amazon RDS export task
+================================
-To cancel task that exports RDS snapshot to S3 you can use
+To cancel an Amazon RDS export task to S3 you can use
:class:`~airflow.providers.amazon.aws.operators.rds.RDSCancelExportTaskOperator`.
Any data that has already been written to the S3 bucket isn't removed.
-.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds.py
+.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds_export.py
:language: python
- :start-after: [START howto_guide_rds_cancel_export]
- :end-before: [END howto_guide_rds_cancel_export]
-
-This Operator leverages the AWS CLI
-`cancel-export-task
<https://docs.aws.amazon.com/cli/latest/reference/rds/cancel-export-task.html>`__
API
-
+ :dedent: 4
+ :start-after: [START howto_operator_rds_cancel_export]
+ :end-before: [END howto_operator_rds_cancel_export]
.. _howto/operator:RDSCreateEventSubscriptionOperator:
-Create event subscription
-"""""""""""""""""""""""""
+Subscribe to an Amazon RDS event notification
+=============================================
-To create event subscription you can use
+To create an Amazon RDS event subscription you can use
:class:`~airflow.providers.amazon.aws.operators.rds.RDSCreateEventSubscriptionOperator`.
-This action requires a topic Amazon Resource Name (ARN) created by either the
RDS console, the SNS console, or the SNS API.
-To obtain an ARN with SNS, you must create a topic in Amazon SNS and subscribe
to the topic.
-RDS event notification is only available for not encrypted SNS topics.
+This action requires an Amazon SNS topic Amazon Resource Name (ARN).
+Amazon RDS event notification is only available for not encrypted SNS topics.
If you specify an encrypted SNS topic, event notifications are not sent for
the topic.
-.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds.py
+.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds_event.py
:language: python
- :start-after: [START howto_guide_rds_create_subscription]
- :end-before: [END howto_guide_rds_create_subscription]
-
-This Operator leverages the AWS CLI
-`create-event-subscription
<https://docs.aws.amazon.com/cli/latest/reference/rds/create-event-subscription.html>`__
API
-
+ :dedent: 4
+ :start-after: [START howto_operator_rds_create_event_subscription]
+ :end-before: [END howto_operator_rds_create_event_subscription]
.. _howto/operator:RDSDeleteEventSubscriptionOperator:
-Delete event subscription
-"""""""""""""""""""""""""
+Unsubscribe to an Amazon RDS event notification
+===============================================
-To delete event subscription you can use
-:class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteEventSubscriptionOperator`
+To delete an Amazon RDS event subscription you can use
+:class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteEventSubscriptionOperator`.
-.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds.py
+.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds_event.py
:language: python
- :start-after: [START howto_guide_rds_delete_subscription]
- :end-before: [END howto_guide_rds_delete_subscription]
+ :dedent: 4
+ :start-after: [START howto_operator_rds_delete_event_subscription]
+ :end-before: [END howto_operator_rds_delete_event_subscription]
-This Operator leverages the AWS CLI
-`delete-event-subscription
<https://docs.aws.amazon.com/cli/latest/reference/rds/delete-event-subscription.html>`__
API
+Sensors
+-------
+.. _howto/sensor:RdsSnapshotExistenceSensor:
-RDS management sensors
+Wait on an Amazon RDS snapshot status
=====================================
-.. contents::
- :depth: 1
- :local:
-
-
-.. _howto/operator:RdsSnapshotExistenceSensor:
-
-DB snapshot sensor
-""""""""""""""""""
-
-To wait a snapshot with certain statuses of AWS RDS DB instance or DB cluster
snapshot you can use
+To wait for an Amazon RDS snapshot with specific statuses you can use
:class:`~airflow.providers.amazon.aws.sensors.rds.RdsSnapshotExistenceSensor`.
-By default, sensor waits existence of snapshot with status ``available``.
+By default, the sensor waits for the existence of a snapshot with status
``available``.
-.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds.py
+.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds_snapshot.py
:language: python
- :start-after: [START howto_guide_rds_snapshot_sensor]
- :end-before: [END howto_guide_rds_snapshot_sensor]
+ :dedent: 4
+ :start-after: [START howto_sensor_rds_snapshot_existence]
+ :end-before: [END howto_sensor_rds_snapshot_existence]
-.. _howto/operator:RdsExportTaskExistenceSensor:
+.. _howto/sensor:RdsExportTaskExistenceSensor:
-Export task sensor
-""""""""""""""""""
+Wait on an Amazon RDS export task status
+========================================
-To wait a snapshot export task with certain statuses you can use
+To wait a for an Amazon RDS snapshot export task with specific statuses you
can use
:class:`~airflow.providers.amazon.aws.sensors.rds.RdsExportTaskExistenceSensor`.
-By default, sensor waits existence of export task with status ``available``.
+By default, the sensor waits for the existence of a snapshot with status
``available``.
-.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds.py
+.. exampleinclude::
/../../airflow/providers/amazon/aws/example_dags/example_rds_export.py
:language: python
- :start-after: [START howto_guide_rds_export_sensor]
- :end-before: [END howto_guide_rds_export_sensor]
+ :dedent: 4
+ :start-after: [START howto_sensor_rds_export_task_existence]
+ :end-before: [END howto_sensor_rds_export_task_existence]
Reference
---------
-For further information, look at:
-
-* `Boto3 Library Documentation for RDS
<https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/rds.html>`__
+* `AWS boto3 library documentation for RDS
<https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/rds.html>`__