Brooke-white commented on a change in pull request #18447:
URL: https://github.com/apache/airflow/pull/18447#discussion_r716999098



##########
File path: airflow/providers/amazon/aws/hooks/redshift_statement.py
##########
@@ -0,0 +1,144 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Interact with AWS Redshift, using the boto3 library."""
+
+from typing import Callable, Dict, Optional, Tuple, Union
+
+import redshift_connector
+from redshift_connector import Connection as RedshiftConnection
+
+from airflow.hooks.dbapi import DbApiHook
+
+
+class RedshiftStatementHook(DbApiHook):

Review comment:
       that sounds good to me, I'm happy to make this change if the others here 
agree with taking this route @josh-fell @JavierLopezT 

##########
File path: airflow/providers/amazon/aws/hooks/redshift_statement.py
##########
@@ -0,0 +1,159 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Execute statements against Amazon Redshift, using redshift_connector."""
+
+from typing import Callable, Dict, Optional, Union
+
+import redshift_connector
+from redshift_connector import Connection as RedshiftConnection
+
+from airflow.hooks.dbapi import DbApiHook
+
+
+class RedshiftStatementHook(DbApiHook):
+    """
+    Execute statements against Amazon Redshift, using redshift_connector
+
+    This hook requires the redshift_conn_id connection. This connection must
+    be initialized with the host, port, login, password. Additional connection
+    options can be passed to extra as a JSON string.
+
+    :param redshift_conn_id: reference to
+        :ref:`Amazon Redshift connection id<howto/connection:redshift>`
+    :type redshift_conn_id: str
+
+    .. note::
+        get_sqlalchemy_engine() and get_uri() depend on 
sqlalchemy-amazon-redshift
+    """
+
+    conn_name_attr = 'redshift_conn_id'
+    default_conn_name = 'redshift_default'
+    conn_type = 'redshift+redshift_connector'
+    hook_name = 'Amazon Redshift'
+    supports_autocommit = True
+
+    @staticmethod
+    def get_ui_field_behavior() -> Dict:
+        """Returns custom field behavior"""
+        return {
+            "hidden_fields": [],
+            "relabeling": {'login': 'User', 'schema': 'Database'},
+        }
+
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, **kwargs)
+
+    def _get_conn_params(self) -> Dict[str, Union[str, int]]:
+        """Helper method to retrieve connection args"""
+        conn = self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member
+        )
+
+        conn_params: Dict[str, Union[str, int]] = {
+            "user": conn.login or '',
+            "password": conn.password or '',
+            "host": conn.host or '',
+            "port": conn.port or 5439,
+            "database": conn.schema or '',
+        }
+
+        return conn_params
+
+    def _get_conn_kwargs(self) -> Dict:
+        """Helper method to retrieve connection kwargs"""
+        conn = self.get_connection(

Review comment:
       cached property `conn` added, and `_get_conn_kwargs` removed in a1de44e

##########
File path: airflow/providers/amazon/aws/hooks/redshift_statement.py
##########
@@ -0,0 +1,159 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Execute statements against Amazon Redshift, using redshift_connector."""
+
+from typing import Callable, Dict, Optional, Union
+
+import redshift_connector
+from redshift_connector import Connection as RedshiftConnection
+
+from airflow.hooks.dbapi import DbApiHook
+
+
+class RedshiftStatementHook(DbApiHook):
+    """
+    Execute statements against Amazon Redshift, using redshift_connector
+
+    This hook requires the redshift_conn_id connection. This connection must
+    be initialized with the host, port, login, password. Additional connection
+    options can be passed to extra as a JSON string.
+
+    :param redshift_conn_id: reference to
+        :ref:`Amazon Redshift connection id<howto/connection:redshift>`
+    :type redshift_conn_id: str
+
+    .. note::
+        get_sqlalchemy_engine() and get_uri() depend on 
sqlalchemy-amazon-redshift
+    """
+
+    conn_name_attr = 'redshift_conn_id'
+    default_conn_name = 'redshift_default'
+    conn_type = 'redshift+redshift_connector'
+    hook_name = 'Amazon Redshift'
+    supports_autocommit = True
+
+    @staticmethod
+    def get_ui_field_behavior() -> Dict:
+        """Returns custom field behavior"""
+        return {
+            "hidden_fields": [],
+            "relabeling": {'login': 'User', 'schema': 'Database'},
+        }
+
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, **kwargs)
+
+    def _get_conn_params(self) -> Dict[str, Union[str, int]]:
+        """Helper method to retrieve connection args"""
+        conn = self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member
+        )
+
+        conn_params: Dict[str, Union[str, int]] = {
+            "user": conn.login or '',
+            "password": conn.password or '',
+            "host": conn.host or '',
+            "port": conn.port or 5439,
+            "database": conn.schema or '',
+        }
+
+        return conn_params
+
+    def _get_conn_kwargs(self) -> Dict:
+        """Helper method to retrieve connection kwargs"""
+        conn = self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member
+        )
+
+        return conn.extra_dejson
+
+    def get_uri(self) -> str:
+        """
+        Override DbApiHook get_uri method for get_sqlalchemy_engine()
+
+        .. note::
+            Value passed to connection extra parameter will be excluded
+            from returned uri but passed to get_sqlalchemy_engine()
+            by default
+        """
+        from sqlalchemy.engine.url import URL
+
+        conn_params = self._get_conn_params()
+
+        conn = self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member
+        )
+
+        conn_type = RedshiftStatementHook.conn_type if not conn.conn_type else 
conn.conn_type

Review comment:
       done in d90b760

##########
File path: airflow/providers/amazon/aws/hooks/redshift_statement.py
##########
@@ -0,0 +1,159 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Execute statements against Amazon Redshift, using redshift_connector."""
+
+from typing import Callable, Dict, Optional, Union
+
+import redshift_connector
+from redshift_connector import Connection as RedshiftConnection
+
+from airflow.hooks.dbapi import DbApiHook
+
+
+class RedshiftStatementHook(DbApiHook):
+    """
+    Execute statements against Amazon Redshift, using redshift_connector
+
+    This hook requires the redshift_conn_id connection. This connection must
+    be initialized with the host, port, login, password. Additional connection
+    options can be passed to extra as a JSON string.
+
+    :param redshift_conn_id: reference to
+        :ref:`Amazon Redshift connection id<howto/connection:redshift>`
+    :type redshift_conn_id: str
+
+    .. note::
+        get_sqlalchemy_engine() and get_uri() depend on 
sqlalchemy-amazon-redshift
+    """
+
+    conn_name_attr = 'redshift_conn_id'
+    default_conn_name = 'redshift_default'
+    conn_type = 'redshift+redshift_connector'
+    hook_name = 'Amazon Redshift'
+    supports_autocommit = True
+
+    @staticmethod
+    def get_ui_field_behavior() -> Dict:
+        """Returns custom field behavior"""
+        return {
+            "hidden_fields": [],
+            "relabeling": {'login': 'User', 'schema': 'Database'},
+        }
+
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, **kwargs)
+
+    def _get_conn_params(self) -> Dict[str, Union[str, int]]:
+        """Helper method to retrieve connection args"""
+        conn = self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member
+        )
+
+        conn_params: Dict[str, Union[str, int]] = {
+            "user": conn.login or '',
+            "password": conn.password or '',
+            "host": conn.host or '',
+            "port": conn.port or 5439,

Review comment:
       addressed this in 6b3976b. the default port number of 5439 has been 
remove and I opted to selectively add kwargs as mentioned.

##########
File path: airflow/providers/amazon/aws/hooks/redshift_statement.py
##########
@@ -0,0 +1,159 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Execute statements against Amazon Redshift, using redshift_connector."""
+
+from typing import Callable, Dict, Optional, Union
+
+import redshift_connector
+from redshift_connector import Connection as RedshiftConnection
+
+from airflow.hooks.dbapi import DbApiHook
+
+
+class RedshiftStatementHook(DbApiHook):
+    """
+    Execute statements against Amazon Redshift, using redshift_connector
+
+    This hook requires the redshift_conn_id connection. This connection must
+    be initialized with the host, port, login, password. Additional connection
+    options can be passed to extra as a JSON string.
+
+    :param redshift_conn_id: reference to
+        :ref:`Amazon Redshift connection id<howto/connection:redshift>`
+    :type redshift_conn_id: str
+
+    .. note::
+        get_sqlalchemy_engine() and get_uri() depend on 
sqlalchemy-amazon-redshift
+    """
+
+    conn_name_attr = 'redshift_conn_id'
+    default_conn_name = 'redshift_default'
+    conn_type = 'redshift+redshift_connector'
+    hook_name = 'Amazon Redshift'
+    supports_autocommit = True
+
+    @staticmethod
+    def get_ui_field_behavior() -> Dict:
+        """Returns custom field behavior"""
+        return {
+            "hidden_fields": [],
+            "relabeling": {'login': 'User', 'schema': 'Database'},
+        }
+
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, **kwargs)
+
+    def _get_conn_params(self) -> Dict[str, Union[str, int]]:
+        """Helper method to retrieve connection args"""
+        conn = self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member
+        )
+
+        conn_params: Dict[str, Union[str, int]] = {
+            "user": conn.login or '',
+            "password": conn.password or '',
+            "host": conn.host or '',
+            "port": conn.port or 5439,
+            "database": conn.schema or '',
+        }
+
+        return conn_params
+
+    def _get_conn_kwargs(self) -> Dict:
+        """Helper method to retrieve connection kwargs"""
+        conn = self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member
+        )
+
+        return conn.extra_dejson
+
+    def get_uri(self) -> str:
+        """
+        Override DbApiHook get_uri method for get_sqlalchemy_engine()
+
+        .. note::
+            Value passed to connection extra parameter will be excluded
+            from returned uri but passed to get_sqlalchemy_engine()
+            by default
+        """
+        from sqlalchemy.engine.url import URL
+
+        conn_params = self._get_conn_params()
+
+        conn = self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member
+        )
+
+        conn_type = RedshiftStatementHook.conn_type if not conn.conn_type else 
conn.conn_type
+
+        return URL(
+            drivername=conn_type,
+            username=conn_params['user'],
+            password=conn_params['password'],
+            host=conn_params['host'],
+            port=conn_params['port'],
+            database=conn_params['database'],
+        ).__str__()
+
+    def get_sqlalchemy_engine(self, engine_kwargs=None):
+        """Overrides DbApiHook get_sqlalchemy_engine to pass 
redshift_connector specific kwargs"""
+        conn_kwargs = self._get_conn_kwargs()
+        if engine_kwargs is None:
+            engine_kwargs = {}
+
+        if "connect_args" in engine_kwargs:
+            engine_kwargs["connect_args"] = {**conn_kwargs, 
**engine_kwargs["connect_args"]}
+        else:
+            engine_kwargs["connect_args"] = conn_kwargs
+
+        return super().get_sqlalchemy_engine(engine_kwargs=engine_kwargs)
+
+    def get_conn(self) -> RedshiftConnection:
+        """Returns a redshift_connector.Connection object"""
+        conn_params = self._get_conn_params()
+        conn_kwargs = self._get_conn_kwargs()
+        conn_kwargs: Dict = {**conn_params, **conn_kwargs}
+        conn: RedshiftConnection = redshift_connector.connect(**conn_kwargs)
+
+        return conn
+
+    def run(
+        self,
+        sql: Union[str, list],
+        autocommit: bool = False,
+        parameters: Optional[dict] = None,
+        handler: Optional[Callable] = None,
+    ):
+        """
+        Runs a command or a list of commands. Pass a list of sql
+        statements to the sql parameter to get them to execute
+        sequentially
+
+        :param sql: the sql statement to be executed (str) or a list of
+            sql statements to execute
+        :type sql: str or list
+        :param autocommit: What to set the connection's autocommit setting to
+            before executing the query.
+        :type autocommit: bool
+        :param parameters: The parameters to render the SQL query with.
+        :type parameters: dict or iterable
+        :param handler: The result handler which is called with the result of 
each statement.
+        :type handler: callable
+        :return: query results if handler was provided.
+        """
+        return super().run(sql, autocommit=False, parameters=parameters, 
handler=handler)

Review comment:
       removed the method override. The hard coded ` autocommit=False`was left 
over from testing, so not needed. 1efe5a9

##########
File path: airflow/providers/amazon/aws/hooks/redshift_statement.py
##########
@@ -0,0 +1,131 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Execute statements against Amazon Redshift, using redshift_connector."""
+try:
+    from functools import cached_property
+except ImportError:
+    from cached_property import cached_property
+from typing import Dict, Union
+
+import redshift_connector
+from redshift_connector import Connection as RedshiftConnection
+
+from airflow.hooks.dbapi import DbApiHook
+
+
+class RedshiftStatementHook(DbApiHook):
+    """
+    Execute statements against Amazon Redshift, using redshift_connector
+
+    This hook requires the redshift_conn_id connection. This connection must
+    be initialized with the host, port, login, password. Additional connection
+    options can be passed to extra as a JSON string.
+
+    :param redshift_conn_id: reference to
+        :ref:`Amazon Redshift connection id<howto/connection:redshift>`
+    :type redshift_conn_id: str
+
+    .. note::
+        get_sqlalchemy_engine() and get_uri() depend on 
sqlalchemy-amazon-redshift
+    """
+
+    conn_name_attr = 'redshift_conn_id'
+    default_conn_name = 'redshift_default'
+    conn_type = 'redshift+redshift_connector'
+    hook_name = 'Amazon Redshift'
+    supports_autocommit = True
+
+    @staticmethod
+    def get_ui_field_behavior() -> Dict:
+        """Returns custom field behavior"""
+        return {
+            "hidden_fields": [],
+            "relabeling": {'login': 'User', 'schema': 'Database'},
+        }
+
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, **kwargs)
+
+    @cached_property
+    def conn(self):
+        return self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member
+        )
+
+    def _get_conn_params(self) -> Dict[str, Union[str, int]]:
+        """Helper method to retrieve connection args"""
+        conn = self.conn
+
+        conn_params: Dict[str, Union[str, int]] = {}
+
+        if conn.login:
+            conn_params['user'] = conn.login
+        if conn.password:
+            conn_params['password'] = conn.password
+        if conn.host:
+            conn_params['host'] = conn.host
+        if conn.port:
+            conn_params['port'] = conn.port
+        if conn.schema:
+            conn_params['database'] = conn.schema
+
+        return conn_params
+
+    def get_uri(self) -> str:
+        """
+        Override DbApiHook get_uri method for get_sqlalchemy_engine()
+
+        .. note::
+            Value passed to connection extra parameter will be excluded
+            from returned uri but passed to get_sqlalchemy_engine()
+            by default
+        """
+        from sqlalchemy.engine.url import URL
+
+        conn_params = self._get_conn_params()
+
+        conn = self.conn
+
+        conn_type = conn.conn_type or RedshiftStatementHook.conn_type
+
+        if 'user' in conn_params:
+            conn_params['username'] = conn_params.pop('user')
+
+        return URL(drivername=conn_type, **conn_params).__str__()

Review comment:
       addressed in 949ff473e493a9af022924f5b1aea35ff5ff9af3

##########
File path: airflow/providers/amazon/aws/hooks/redshift_statement.py
##########
@@ -0,0 +1,131 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Execute statements against Amazon Redshift, using redshift_connector."""
+try:
+    from functools import cached_property
+except ImportError:
+    from cached_property import cached_property
+from typing import Dict, Union
+
+import redshift_connector
+from redshift_connector import Connection as RedshiftConnection
+
+from airflow.hooks.dbapi import DbApiHook
+
+
+class RedshiftStatementHook(DbApiHook):
+    """
+    Execute statements against Amazon Redshift, using redshift_connector
+
+    This hook requires the redshift_conn_id connection. This connection must
+    be initialized with the host, port, login, password. Additional connection
+    options can be passed to extra as a JSON string.
+
+    :param redshift_conn_id: reference to
+        :ref:`Amazon Redshift connection id<howto/connection:redshift>`
+    :type redshift_conn_id: str
+
+    .. note::
+        get_sqlalchemy_engine() and get_uri() depend on 
sqlalchemy-amazon-redshift
+    """
+
+    conn_name_attr = 'redshift_conn_id'
+    default_conn_name = 'redshift_default'
+    conn_type = 'redshift+redshift_connector'
+    hook_name = 'Amazon Redshift'
+    supports_autocommit = True
+
+    @staticmethod
+    def get_ui_field_behavior() -> Dict:
+        """Returns custom field behavior"""
+        return {
+            "hidden_fields": [],
+            "relabeling": {'login': 'User', 'schema': 'Database'},
+        }
+
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, **kwargs)
+
+    @cached_property
+    def conn(self):
+        return self.get_connection(
+            self.redshift_conn_id  # type: ignore[attr-defined]  # pylint: 
disable=no-member

Review comment:
       addressed in c9e182047b12474f00e9e5abca13a90364a44a0c

##########
File path: tests/providers/amazon/aws/hooks/test_redshift_statement.py
##########
@@ -0,0 +1,72 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+import json
+import unittest
+from unittest import mock
+
+from airflow.models import Connection
+from airflow.providers.amazon.aws.hooks.redshift_statement import 
RedshiftStatementHook
+
+
+class TestRedshiftStatementHookConn(unittest.TestCase):
+    def setUp(self):
+        super().setUp()
+
+        self.connection = Connection(login='login', password='password', 
host='host', port=5439, schema="dev")
+
+        class UnitTestRedshiftStatementHook(RedshiftStatementHook):
+            conn_name_attr = "redshift_conn_id"
+            conn_type = 'redshift+redshift_connector'
+
+        self.db_hook = UnitTestRedshiftStatementHook()
+        self.db_hook.get_connection = mock.Mock()
+        self.db_hook.get_connection.return_value = self.connection
+
+    def test_get_uri(self):
+        uri_shouldbe = 
'redshift+redshift_connector://login:password@host:5439/dev'
+        x = self.db_hook.get_uri()
+        assert uri_shouldbe == x

Review comment:
       addressed in 01091dcace8f3c6efd57a2f7d144f7a9336ecbcd

##########
File path: tests/providers/amazon/aws/operators/test_redshift.py
##########
@@ -0,0 +1,57 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import unittest
+from unittest import mock
+
+from airflow.models.dag import DAG
+from airflow.providers.amazon.aws.operators.redshift import RedshiftOperator
+from airflow.utils import timezone
+
+DEFAULT_DATE = timezone.datetime(2015, 1, 1)
+DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
+DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
+TEST_DAG_ID = 'unit_test_dag'
+
+
+class TestRedshiftOperator(unittest.TestCase):
+    def setUp(self):
+        super().setUp()
+        args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
+        dag = DAG(TEST_DAG_ID, default_args=args)
+        self.dag = dag
+
+    
@mock.patch("airflow.providers.amazon.aws.operators.redshift.RedshiftOperator.get_hook")
+    def test_redshift_operator(self, mock_get_hook):
+        sql = """
+        CREATE TABLE IF NOT EXISTS test_airflow (
+            dummy VARCHAR(50)
+        );
+        """
+        operator = RedshiftOperator(task_id='redshift_operator', sql=sql, 
dag=self.dag)
+        operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, 
ignore_ti_state=True)
+
+    
@mock.patch("airflow.providers.amazon.aws.operators.redshift.RedshiftOperator.get_hook")
+    def test_redshift_operator_test_multi(self, mock_get_hook):
+        sql = [
+            "CREATE TABLE IF NOT EXISTS test_airflow (dummy VARCHAR(50))",
+            "TRUNCATE TABLE test_airflow",
+            "INSERT INTO test_airflow VALUES ('X')",
+        ]
+        operator = RedshiftOperator(task_id='redshift_operator_test_multi', 
sql=sql, dag=self.dag)
+        operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, 
ignore_ti_state=True)

Review comment:
       thank you for providing the re-write and very thorough explanation. i've 
addressed this in 7205a8cd7d0d05ed70dc7e5f87bf70d0227f0184, and added some 
parameterization for autocommit and parameters

##########
File path: airflow/providers/amazon/aws/example_dags/example_redshift.py
##########
@@ -0,0 +1,65 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This is an example dag for using `RedshiftOperator` to authenticate with 
Amazon Redshift
+using IAM authentication then executing a simple select statement

Review comment:
       addressed in 7205a8cd7d0d05ed70dc7e5f87bf70d0227f0184

##########
File path: airflow/providers/amazon/aws/hooks/redshift_statement.py
##########
@@ -0,0 +1,131 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Execute statements against Amazon Redshift, using redshift_connector."""
+try:
+    from functools import cached_property
+except ImportError:
+    from cached_property import cached_property
+from typing import Dict, Union
+
+import redshift_connector
+from redshift_connector import Connection as RedshiftConnection
+
+from airflow.hooks.dbapi import DbApiHook
+
+
+class RedshiftStatementHook(DbApiHook):
+    """
+    Execute statements against Amazon Redshift, using redshift_connector
+
+    This hook requires the redshift_conn_id connection. This connection must
+    be initialized with the host, port, login, password. Additional connection
+    options can be passed to extra as a JSON string.
+
+    :param redshift_conn_id: reference to
+        :ref:`Amazon Redshift connection id<howto/connection:redshift>`
+    :type redshift_conn_id: str
+
+    .. note::
+        get_sqlalchemy_engine() and get_uri() depend on 
sqlalchemy-amazon-redshift
+    """
+
+    conn_name_attr = 'redshift_conn_id'
+    default_conn_name = 'redshift_default'
+    conn_type = 'redshift+redshift_connector'
+    hook_name = 'Amazon Redshift'
+    supports_autocommit = True
+
+    @staticmethod
+    def get_ui_field_behavior() -> Dict:
+        """Returns custom field behavior"""
+        return {
+            "hidden_fields": [],
+            "relabeling": {'login': 'User', 'schema': 'Database'},
+        }
+
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, **kwargs)

Review comment:
       addressed in ddd9d493




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to