ashb commented on a change in pull request #11227:
URL: https://github.com/apache/airflow/pull/11227#discussion_r507643421



##########
File path: airflow/providers/amazon/aws/transfers/s3_to_redshift.py
##########
@@ -17,8 +17,10 @@
 
 from typing import List, Optional, Union
 
+
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+from airflow.providers.amazon.aws.utils.redshift import build_credentials_block

Review comment:
       Please don't create a new file just for this -- add this method on to 
the Hook instead.

##########
File path: tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
##########
@@ -66,24 +70,79 @@ def test_execute(
             task_id="task_id",
             table_as_file_name=table_as_file_name,
             dag=None,
-        ).execute(None)
+        )
+
+        op.execute(None)
 
         unload_options = '\n\t\t\t'.join(unload_options)
         select_query = "SELECT * FROM {schema}.{table}".format(schema=schema, 
table=table)
-        unload_query = """
-                    UNLOAD ('{select_query}')
-                    TO 's3://{s3_bucket}/{s3_key}'
-                    with credentials
-                    
'aws_access_key_id={access_key};aws_secret_access_key={secret_key}'
-                    {unload_options};
-                    """.format(
-            select_query=select_query,
+        credentials_block = build_credentials_block(mock_session.return_value)
+
+        unload_query = op._build_unload_query(
+            credentials_block, select_query, expected_s3_key, unload_options
+        )
+
+        assert mock_run.call_count == 1
+        assert access_key in unload_query
+        assert secret_key in unload_query
+        assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], 
unload_query)
+
+    @parameterized.expand(
+        [
+            [True, "key/table_"],
+            [False, "key"],
+        ]
+    )
+    @mock.patch("boto3.session.Session")
+    @mock.patch("airflow.providers.postgres.hooks.postgres.PostgresHook.run")
+    def test_execute_sts_token(
+        self,
+        table_as_file_name,
+        expected_s3_key,
+        mock_run,
+        mock_session,
+    ):
+        access_key = "ASIA_aws_access_key_id"
+        secret_key = "aws_secret_access_key"
+        token = "token"
+        mock_session.return_value = Session(access_key, secret_key, token)
+        mock_session.return_value.access_key = access_key
+        mock_session.return_value.secret_key = secret_key
+        mock_session.return_value.token = token
+        schema = "schema"
+        table = "table"
+        s3_bucket = "bucket"
+        s3_key = "key"
+        unload_options = [
+            'HEADER',
+        ]
+
+        op = RedshiftToS3Operator(
+            schema=schema,
+            table=table,
             s3_bucket=s3_bucket,
-            s3_key=expected_s3_key,
-            access_key=access_key,
-            secret_key=secret_key,
+            s3_key=s3_key,
             unload_options=unload_options,
+            include_header=True,
+            redshift_conn_id="redshift_conn_id",
+            aws_conn_id="aws_conn_id",
+            task_id="task_id",
+            table_as_file_name=table_as_file_name,
+            dag=None,
+        )
+
+        op.execute(None)
+
+        unload_options = '\n\t\t\t'.join(unload_options)
+        select_query = "SELECT * FROM {schema}.{table}".format(schema=schema, 
table=table)
+        credentials_block = build_credentials_block(mock_session.return_value)
+
+        unload_query = op._build_unload_query(

Review comment:
       Same problem here -- this just runs the code again, it doesn't actually 
test what we do in that code.

##########
File path: tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
##########
@@ -66,24 +70,79 @@ def test_execute(
             task_id="task_id",
             table_as_file_name=table_as_file_name,
             dag=None,
-        ).execute(None)
+        )
+
+        op.execute(None)
 
         unload_options = '\n\t\t\t'.join(unload_options)
         select_query = "SELECT * FROM {schema}.{table}".format(schema=schema, 
table=table)
-        unload_query = """
-                    UNLOAD ('{select_query}')
-                    TO 's3://{s3_bucket}/{s3_key}'
-                    with credentials
-                    
'aws_access_key_id={access_key};aws_secret_access_key={secret_key}'
-                    {unload_options};
-                    """.format(
-            select_query=select_query,
+        credentials_block = build_credentials_block(mock_session.return_value)
+
+        unload_query = op._build_unload_query(
+            credentials_block, select_query, expected_s3_key, unload_options
+        )

Review comment:
       By calling this method in the tests, you don't actually test the query 
correctly -- you could change the query to return `GOATS` and the tests will 
still pass. You need to test against a "fixed" string as the tests were doing 
before hand.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to