This is an automated email from the ASF dual-hosted git repository.

joshfell pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 7354d2eac9 Add sample dag and doc for S3ListOperator (#23449)
7354d2eac9 is described below

commit 7354d2eac990208d5a631c05649c482560bd827b
Author: Vincent <[email protected]>
AuthorDate: Mon May 9 12:21:51 2022 -0600

    Add sample dag and doc for S3ListOperator (#23449)
    
    * Add sample dag and doc for S3ListOperator
    
    * Fix doc
---
 airflow/providers/amazon/aws/example_dags/example_s3.py | 10 ++++++++++
 airflow/providers/amazon/aws/operators/s3.py            |  4 ++++
 docs/apache-airflow-providers-amazon/operators/s3.rst   | 15 +++++++++++++++
 3 files changed, 29 insertions(+)

diff --git a/airflow/providers/amazon/aws/example_dags/example_s3.py 
b/airflow/providers/amazon/aws/example_dags/example_s3.py
index ecd9d374cf..9627703c0b 100644
--- a/airflow/providers/amazon/aws/example_dags/example_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_s3.py
@@ -30,6 +30,7 @@ from airflow.providers.amazon.aws.operators.s3 import (
     S3DeleteObjectsOperator,
     S3FileTransformOperator,
     S3GetBucketTaggingOperator,
+    S3ListOperator,
     S3PutBucketTaggingOperator,
 )
 from airflow.providers.amazon.aws.sensors.s3 import S3KeySensor, 
S3KeysUnchangedSensor
@@ -115,6 +116,14 @@ with DAG(
     )
     # [END howto_operator_s3_create_object]
 
+    # [START howto_operator_s3_list]
+    list_keys = S3ListOperator(
+        task_id="s3_list_operator",
+        bucket=BUCKET_NAME,
+        prefix=PREFIX,
+    )
+    # [END howto_operator_s3_list]
+
     # [START howto_sensor_s3_key_single_key]
     # Check if a file exists
     s3_sensor_one_key = S3KeySensor(
@@ -193,6 +202,7 @@ with DAG(
         get_tagging,
         delete_tagging,
         s3_create_object,
+        list_keys,
         [s3_sensor_one_key, s3_sensor_two_keys, s3_sensor_key_function],
         s3_copy_object,
         s3_sensor_keys_unchanged,
diff --git a/airflow/providers/amazon/aws/operators/s3.py 
b/airflow/providers/amazon/aws/operators/s3.py
index 9238a1bb51..1f4e49755f 100644
--- a/airflow/providers/amazon/aws/operators/s3.py
+++ b/airflow/providers/amazon/aws/operators/s3.py
@@ -618,6 +618,10 @@ class S3ListOperator(BaseOperator):
     This operator returns a python list with the name of objects which can be
     used by `xcom` in the downstream task.
 
+    .. seealso::
+        For more information on how to use this operator, take a look at the 
guide:
+        :ref:`howto/operator:S3ListOperator`
+
     :param bucket: The S3 bucket where to find the objects. (templated)
     :param prefix: Prefix string to filters the objects whose name begin with
         such prefix. (templated)
diff --git a/docs/apache-airflow-providers-amazon/operators/s3.rst 
b/docs/apache-airflow-providers-amazon/operators/s3.rst
index 00f1fe1143..0122220566 100644
--- a/docs/apache-airflow-providers-amazon/operators/s3.rst
+++ b/docs/apache-airflow-providers-amazon/operators/s3.rst
@@ -196,6 +196,21 @@ To create a new (or replace) Amazon S3 object you can use
     :start-after: [START howto_operator_s3_create_object]
     :end-before: [END howto_operator_s3_create_object]
 
+.. _howto/operator:S3ListOperator:
+
+List Amazon S3 objects
+----------------------
+
+To list all Amazon S3 objects within an Amazon S3 bucket you can use
+:class:`~airflow.providers.amazon.aws.operators.s3.S3ListOperator`.
+You can specify a ``prefix`` to filter the objects whose name begins with such 
prefix.
+
+.. exampleinclude:: 
/../../airflow/providers/amazon/aws/example_dags/example_s3.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_s3_list]
+    :end-before: [END howto_operator_s3_list]
+
 .. _howto/operator:S3CopyObjectOperator:
 
 Copy an Amazon S3 object

Reply via email to