This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new ffe8fab  Added select_query to the templated fields in 
RedshiftToS3Operator (#16767)
ffe8fab is described below

commit ffe8fab6536ac4eec076d48548d7b2e814a55b1f
Author: Weiping He <[email protected]>
AuthorDate: Sat Jul 3 05:50:50 2021 -0700

    Added select_query to the templated fields in RedshiftToS3Operator (#16767)
    
    Co-authored-by: Weiping He <[email protected]>
---
 airflow/providers/amazon/aws/transfers/redshift_to_s3.py    | 9 ++++-----
 tests/providers/amazon/aws/transfers/test_redshift_to_s3.py | 1 +
 2 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py 
b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
index bbd2c40..98c784f 100644
--- a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
@@ -71,7 +71,7 @@ class RedshiftToS3Operator(BaseOperator):
     :type table_as_file_name: bool
     """
 
-    template_fields = ('s3_bucket', 's3_key', 'schema', 'table', 
'unload_options')
+    template_fields = ('s3_bucket', 's3_key', 'schema', 'table', 
'unload_options', 'select_query')
     template_ext = ()
     ui_color = '#ededed'
 
@@ -105,11 +105,10 @@ class RedshiftToS3Operator(BaseOperator):
         self.include_header = include_header
         self.table_as_file_name = table_as_file_name
 
-        self._select_query = None
         if select_query:
-            self._select_query = select_query
+            self.select_query = select_query
         elif self.schema and self.table:
-            self._select_query = f"SELECT * FROM {self.schema}.{self.table}"
+            self.select_query = f"SELECT * FROM {self.schema}.{self.table}"
         else:
             raise ValueError(
                 'Please provide both `schema` and `table` params or 
`select_query` to fetch the data.'
@@ -140,7 +139,7 @@ class RedshiftToS3Operator(BaseOperator):
         unload_options = '\n\t\t\t'.join(self.unload_options)
 
         unload_query = self._build_unload_query(
-            credentials_block, self._select_query, self.s3_key, unload_options
+            credentials_block, self.select_query, self.s3_key, unload_options
         )
 
         self.log.info('Executing UNLOAD command...')
diff --git a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
index b818823..89797e9 100644
--- a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
@@ -213,4 +213,5 @@ class TestRedshiftToS3Transfer(unittest.TestCase):
             'schema',
             'table',
             'unload_options',
+            'select_query',
         )

Reply via email to