Taragolis commented on code in PR #39217:
URL: https://github.com/apache/airflow/pull/39217#discussion_r1588818546


##########
airflow/providers/teradata/transfers/azure_blob_to_teradata.py:
##########
@@ -0,0 +1,95 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
+from airflow.providers.teradata.hooks.teradata import TeradataHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+class AzureBlobStorageToTeradataOperator(BaseOperator):
+    """
+
+    Loads CSV, JSON and Parquet format data from Azure Blob Storage to 
Teradata.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the 
guide:
+        :ref:`howto/operator:AzureBlobStorageToTeradataOperator`
+
+    :param blob_source_key: The URI format specifying the location of the 
Azure blob object store.(templated)
+        The URI format is 
`/az/YOUR-STORAGE-ACCOUNT.blob.core.windows.net/YOUR-CONTAINER/YOUR-BLOB-LOCATION`.
+        Refer to
+        
https://docs.teradata.com/search/documents?query=native+object+store&sort=last_update&virtual-field=title_only&content-lang=en-US
+    :param azure_conn_id: The Airflow WASB connection used for azure blob 
credentials.
+    :param teradata_table: The name of the teradata table to which the data is 
transferred.(templated)
+    :param teradata_conn_id: The connection ID used to connect to Teradata
+        :ref:`Teradata connection <howto/connection:Teradata>`
+
+    Note that ``blob_source_key`` and ``teradata_table`` are
+    templated, so you can use variables in them if you wish.
+    """
+
+    template_fields: Sequence[str] = ("blob_source_key", "teradata_table")
+    ui_color = "#e07c24"
+
+    def __init__(
+        self,
+        *,
+        blob_source_key: str,
+        azure_conn_id: str = "azure_default",
+        teradata_table: str,
+        teradata_conn_id: str = "teradata_default",
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.blob_source_key = blob_source_key
+        self.azure_conn_id = azure_conn_id
+        self.teradata_table = teradata_table
+        self.teradata_conn_id = teradata_conn_id
+
+    def execute(self, context: Context) -> None:
+        self.log.info(
+            "transferring data from %s to teradata table %s...", 
self.blob_source_key, self.teradata_table
+        )
+        azure_hook = WasbHook(wasb_conn_id=self.azure_conn_id)
+        conn = azure_hook.get_connection(self.azure_conn_id)
+        # Obtaining the Azure client ID and Azure secret in order to access a 
specified Blob container
+        access_id = conn.login if conn.login is not None else ""
+        access_secret = conn.password if conn.password is not None else ""
+        teradata_hook = TeradataHook(teradata_conn_id=self.teradata_conn_id)
+        sql = f"""
+                    CREATE MULTISET TABLE {self.teradata_table}  AS
+                    (
+                        SELECT * FROM (
+                            LOCATION = '{self.blob_source_key}'
+                            ACCESS_ID= '{access_id}'
+                            ACCESS_KEY= '{access_secret}'
+                    ) AS d
+                    ) WITH DATA
+                """

Review Comment:
   ```suggestion
           sql = dedent(f"""
               CREATE MULTISET TABLE {self.teradata_table} AS
                   (
                       SELECT * FROM (
                           LOCATION = '{self.blob_source_key}'
                           ACCESS_ID = '{access_id}'
                           ACCESS_KEY = '{access_secret}'
                       ) AS d
                   ) WITH DATA
           """).rstrip()
   ```



##########
tests/system/providers/teradata/example_s3_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of S3StorageToTeradataOperator.
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from S3
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `aws_default` already exist. 
The DAG creates
+tables using data from the S3, reports the number of rows inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+from airflow.providers.teradata.transfers.s3_to_teradata import 
S3ToTeradataOperator
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START s3_to_teradata_transfer_operator_howto_guide]
+
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_s3_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    transfer_data_csv = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_csv",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/CSVDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_csv",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_csv;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_s3_teradata_csv;
+            """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json]
+    transfer_data_json = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_json",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/JSONDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_json",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    read_data_table_json = TeradataOperator(
+        task_id="read_data_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_json;
+                """,

Review Comment:
   ```suggestion
           sql="SELECT * from example_s3_teradata_json;",
   ```



##########
airflow/providers/teradata/transfers/s3_to_teradata.py:
##########
@@ -0,0 +1,100 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+from airflow.providers.teradata.hooks.teradata import TeradataHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+class S3ToTeradataOperator(BaseOperator):
+    """
+    Loads CSV, JSON and Parquet format data from Amazon S3 to Teradata.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the 
guide:
+        :ref:`howto/operator:S3ToTeradataOperator`
+
+    :param s3_source_key: The URI format specifying the location of the S3 
object store.(templated)
+        The URI format is /s3/YOUR-BUCKET.s3.amazonaws.com/YOUR-BUCKET-NAME.
+        Refer to
+        
https://docs.teradata.com/search/documents?query=native+object+store&sort=last_update&virtual-field=title_only&content-lang=en-US
+    :param teradata_table: The name of the teradata table to which the data is 
transferred.(templated)
+    :param aws_conn_id: The Airflow AWS connection used for AWS credentials.
+    :param teradata_conn_id: The connection ID used to connect to Teradata
+        :ref:`Teradata connection <howto/connection:Teradata>`.
+
+    Note that ``s3_source_key`` and ``teradata_table`` are
+    templated, so you can use variables in them if you wish.
+    """
+
+    template_fields: Sequence[str] = ("s3_source_key", "teradata_table")
+    ui_color = "#e07c24"
+
+    def __init__(
+        self,
+        *,
+        s3_source_key: str,
+        teradata_table: str,
+        aws_conn_id: str = "aws_default",
+        teradata_conn_id: str = "teradata_default",
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.s3_source_key = s3_source_key
+        self.teradata_table = teradata_table
+        self.aws_conn_id = aws_conn_id
+        self.teradata_conn_id = teradata_conn_id
+
+    def execute(self, context: Context) -> None:
+        self.log.info(
+            "transferring data from %s to teradata table %s...", 
self.s3_source_key, self.teradata_table
+        )
+
+        s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
+        access_key = (
+            s3_hook.conn_config.aws_access_key_id if 
s3_hook.conn_config.aws_access_key_id is not None else ""
+        )
+        access_secret = (
+            s3_hook.conn_config.aws_secret_access_key
+            if s3_hook.conn_config.aws_secret_access_key is not None
+            else ""
+        )

Review Comment:
   This one is not semantically correct in case of [AWS 
Connection](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/connections/aws.html#default-connection-ids)
   
   Contain itself might not contain `aws_access_key_id` / 
`aws_secret_access_key` it could be obtained by [`botocore` / `boto3` 
credential 
strategy](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials)
   
   You should call 
[`get_credentials`](https://github.com/apache/airflow/blob/0a74928894fb57b0160208262ccacad12da23fc7/airflow/providers/amazon/aws/hooks/base_aws.py#L764-L778)
 method which returns named tuple with frozen credentials
   
   ```python
   ReadOnlyCredentials = namedtuple(
       'ReadOnlyCredentials', ['access_key', 'secret_key', 'token']
   )
   ```
   
   Please note in case if `token` is not None you should also provide it 
because without it (STS Session Token) credentials is not valid. 
   
   And I can't find how to do it within the terradata, because manual doesn't 
contain such information 
https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-VantageTM-Native-Object-Store-Getting-Started-Guide-17.20/Authentication-for-External-Object-Stores/Using-AWS-Assume-Role/Setting-Up-Assume-Role-on-Analytics-Database
 however [this 
KB](https://support.teradata.com/knowledge?id=kb_article_view&sys_kb_id=a6b8ad8cc36ae990e2bcb41f050131a7)
 shows that somehow it supported
   
   And finally there is pretty difficult to handle anonymous access, because 
there is no out-of-box solution for that in AWS Hooks, so I would recommend to 
add separate parameter for that, so we could skip obtain connection at all if 
this kind of access required.



##########
tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of AzureBlobStorageToTeradataOperator
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from Azure 
Blob storage
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `wasb_default` already exist. 
The DAG creates
+tables using data from the Azure Blob location, reports the number of rows 
inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+    from airflow.providers.teradata.transfers.azure_blob_to_teradata import 
AzureBlobStorageToTeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START azure_blob_to_teradata_transfer_operator_howto_guide]
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_azure_blob_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    transfer_data_csv = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_csv",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/CSVDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_csv",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_csv;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_blob_teradata_csv;
+            """,
+    )
+    # [END azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json]
+    transfer_data_json = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_json",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/JSONDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_json",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    read_data_table_json = TeradataOperator(
+        task_id="read_data_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_json;
+            """,

Review Comment:
   ```suggestion
           sql="SELECT count(1) from example_blob_teradata_json;",
   ```



##########
tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of AzureBlobStorageToTeradataOperator
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from Azure 
Blob storage
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `wasb_default` already exist. 
The DAG creates
+tables using data from the Azure Blob location, reports the number of rows 
inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+    from airflow.providers.teradata.transfers.azure_blob_to_teradata import 
AzureBlobStorageToTeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START azure_blob_to_teradata_transfer_operator_howto_guide]
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_azure_blob_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    transfer_data_csv = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_csv",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/CSVDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_csv",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_csv;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_blob_teradata_csv;
+            """,

Review Comment:
   ```suggestion
           sql="DROP TABLE example_blob_teradata_csv;",
   ```



##########
tests/system/providers/teradata/example_s3_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of S3StorageToTeradataOperator.
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from S3
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `aws_default` already exist. 
The DAG creates
+tables using data from the S3, reports the number of rows inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+from airflow.providers.teradata.transfers.s3_to_teradata import 
S3ToTeradataOperator
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START s3_to_teradata_transfer_operator_howto_guide]
+
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_s3_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    transfer_data_csv = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_csv",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/CSVDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_csv",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_csv;
+                """,

Review Comment:
   ```suggestion
           sql="SELECT * from example_s3_teradata_csv;",
   ```



##########
tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of AzureBlobStorageToTeradataOperator
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from Azure 
Blob storage
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `wasb_default` already exist. 
The DAG creates
+tables using data from the Azure Blob location, reports the number of rows 
inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+    from airflow.providers.teradata.transfers.azure_blob_to_teradata import 
AzureBlobStorageToTeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START azure_blob_to_teradata_transfer_operator_howto_guide]
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_azure_blob_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    transfer_data_csv = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_csv",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/CSVDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_csv",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_csv;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_blob_teradata_csv;
+            """,
+    )
+    # [END azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json]
+    transfer_data_json = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_json",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/JSONDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_json",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    read_data_table_json = TeradataOperator(
+        task_id="read_data_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_json;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    drop_table_json = TeradataOperator(
+        task_id="drop_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_blob_teradata_json;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet]
+    transfer_data_parquet = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_parquet",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/PARQUETDATA/09394500/2018/06/",
+        teradata_table="example_blob_teradata_parquet",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_parquet]
+    read_data_table_parquet = TeradataOperator(
+        task_id="read_data_table_parquet",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_parquet;
+            """,

Review Comment:
   ```suggestion
           sql="SELECT count(1) from example_blob_teradata_parquet;",
   ```



##########
tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of AzureBlobStorageToTeradataOperator
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from Azure 
Blob storage
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `wasb_default` already exist. 
The DAG creates
+tables using data from the Azure Blob location, reports the number of rows 
inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+    from airflow.providers.teradata.transfers.azure_blob_to_teradata import 
AzureBlobStorageToTeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START azure_blob_to_teradata_transfer_operator_howto_guide]
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_azure_blob_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    transfer_data_csv = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_csv",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/CSVDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_csv",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_csv;
+            """,

Review Comment:
   There is couple places where triple-quoted string literals redundant not 
required
   
   ```suggestion
           sql="SELECT count(1) from example_blob_teradata_csv;",
   ```



##########
tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of AzureBlobStorageToTeradataOperator
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from Azure 
Blob storage
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `wasb_default` already exist. 
The DAG creates
+tables using data from the Azure Blob location, reports the number of rows 
inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+    from airflow.providers.teradata.transfers.azure_blob_to_teradata import 
AzureBlobStorageToTeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START azure_blob_to_teradata_transfer_operator_howto_guide]
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_azure_blob_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    transfer_data_csv = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_csv",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/CSVDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_csv",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_csv;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_blob_teradata_csv;
+            """,
+    )
+    # [END azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json]
+    transfer_data_json = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_json",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/JSONDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_json",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    read_data_table_json = TeradataOperator(
+        task_id="read_data_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_json;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    drop_table_json = TeradataOperator(
+        task_id="drop_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_blob_teradata_json;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet]
+    transfer_data_parquet = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_parquet",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/PARQUETDATA/09394500/2018/06/",
+        teradata_table="example_blob_teradata_parquet",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_parquet]
+    read_data_table_parquet = TeradataOperator(
+        task_id="read_data_table_parquet",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_parquet;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_parquet]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_parquet]
+    drop_table_parquet = TeradataOperator(
+        task_id="drop_table_parquet",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_blob_teradata_parquet;
+            """,

Review Comment:
   ```suggestion
           sql="DROP TABLE example_blob_teradata_parquet;",
   ```



##########
tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of AzureBlobStorageToTeradataOperator
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from Azure 
Blob storage
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `wasb_default` already exist. 
The DAG creates
+tables using data from the Azure Blob location, reports the number of rows 
inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+    from airflow.providers.teradata.transfers.azure_blob_to_teradata import 
AzureBlobStorageToTeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START azure_blob_to_teradata_transfer_operator_howto_guide]
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_azure_blob_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    transfer_data_csv = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_csv",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/CSVDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_csv",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_csv;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_blob_teradata_csv;
+            """,
+    )
+    # [END azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json]
+    transfer_data_json = AzureBlobStorageToTeradataOperator(
+        task_id="transfer_data_blob_to_teradata_json",
+        
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/JSONDATA/09380000/2018/06/",
+        teradata_table="example_blob_teradata_json",
+        azure_conn_id="wasb_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    read_data_table_json = TeradataOperator(
+        task_id="read_data_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                SELECT count(1) from example_blob_teradata_json;
+            """,
+    )
+    # [END 
azure_blob_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    # [START 
azure_blob_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    drop_table_json = TeradataOperator(
+        task_id="drop_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_blob_teradata_json;
+            """,

Review Comment:
   ```suggestion
           sql="DROP TABLE example_blob_teradata_json;",
   ```



##########
tests/system/providers/teradata/example_s3_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of S3StorageToTeradataOperator.
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from S3
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `aws_default` already exist. 
The DAG creates
+tables using data from the S3, reports the number of rows inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+from airflow.providers.teradata.transfers.s3_to_teradata import 
S3ToTeradataOperator
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START s3_to_teradata_transfer_operator_howto_guide]
+
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_s3_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    transfer_data_csv = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_csv",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/CSVDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_csv",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_csv;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_s3_teradata_csv;
+            """,

Review Comment:
   ```suggestion
           sql="DROP TABLE example_s3_teradata_csv;",
   ```



##########
tests/system/providers/teradata/example_s3_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of S3StorageToTeradataOperator.
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from S3
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `aws_default` already exist. 
The DAG creates
+tables using data from the S3, reports the number of rows inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+from airflow.providers.teradata.transfers.s3_to_teradata import 
S3ToTeradataOperator
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START s3_to_teradata_transfer_operator_howto_guide]
+
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_s3_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    transfer_data_csv = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_csv",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/CSVDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_csv",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_csv;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_s3_teradata_csv;
+            """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json]
+    transfer_data_json = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_json",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/JSONDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_json",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    read_data_table_json = TeradataOperator(
+        task_id="read_data_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_json;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    # [START s3_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    drop_table_json = TeradataOperator(
+        task_id="drop_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                    DROP TABLE example_s3_teradata_json;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet]
+    transfer_data_parquet = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_parquet",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/PARQUETDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_parquet",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet]
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_read_data_table_parquet]
+    read_data_table_parquet = TeradataOperator(
+        task_id="read_data_table_parquet",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_parquet;
+                """,

Review Comment:
   ```suggestion
           sql="SELECT * from example_s3_teradata_parquet;",
   ```



##########
tests/system/providers/teradata/example_s3_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of S3StorageToTeradataOperator.
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from S3
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `aws_default` already exist. 
The DAG creates
+tables using data from the S3, reports the number of rows inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+from airflow.providers.teradata.transfers.s3_to_teradata import 
S3ToTeradataOperator
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START s3_to_teradata_transfer_operator_howto_guide]
+
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_s3_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    transfer_data_csv = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_csv",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/CSVDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_csv",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_csv;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_s3_teradata_csv;
+            """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json]
+    transfer_data_json = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_json",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/JSONDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_json",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    read_data_table_json = TeradataOperator(
+        task_id="read_data_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_json;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    # [START s3_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    drop_table_json = TeradataOperator(
+        task_id="drop_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                    DROP TABLE example_s3_teradata_json;
+                """,

Review Comment:
   ```suggestion
           sql="DROP TABLE example_s3_teradata_json;",
   ```



##########
tests/system/providers/teradata/example_s3_to_teradata_transfer.py:
##########
@@ -0,0 +1,163 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG to show usage of S3StorageToTeradataOperator.
+
+The transfer operator moves files in CSV, JSON, and PARQUET formats from S3
+to Teradata tables. In the example Directed Acyclic Graph (DAG) below, it 
assumes Airflow
+Connections with the IDs `teradata_default` and `aws_default` already exist. 
The DAG creates
+tables using data from the S3, reports the number of rows inserted into
+the table, and subsequently drops the table.
+"""
+
+from __future__ import annotations
+
+import datetime
+import os
+
+import pytest
+
+from airflow import DAG
+from airflow.providers.teradata.transfers.s3_to_teradata import 
S3ToTeradataOperator
+
+try:
+    from airflow.providers.teradata.operators.teradata import TeradataOperator
+except ImportError:
+    pytest.skip("Teradata provider apache-airflow-provider-teradata not 
available", allow_module_level=True)
+
+# [START s3_to_teradata_transfer_operator_howto_guide]
+
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_s3_to_teradata_transfer_operator"
+CONN_ID = "teradata_default"
+
+with DAG(
+    dag_id=DAG_ID,
+    start_date=datetime.datetime(2020, 2, 2),
+    schedule="@once",
+    catchup=False,
+    default_args={"conn_id": "teradata_default"},
+) as dag:
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    transfer_data_csv = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_csv",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/CSVDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_csv",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    read_data_table_csv = TeradataOperator(
+        task_id="read_data_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_csv;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_read_data_table_csv]
+    # [START s3_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    drop_table_csv = TeradataOperator(
+        task_id="drop_table_csv",
+        conn_id=CONN_ID,
+        sql="""
+                DROP TABLE example_s3_teradata_csv;
+            """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_drop_table_csv]
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json]
+    transfer_data_json = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_json",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/JSONDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_json",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json]
+    # [START s3_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    read_data_table_json = TeradataOperator(
+        task_id="read_data_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_json;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_read_data_table_json]
+    # [START s3_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    drop_table_json = TeradataOperator(
+        task_id="drop_table_json",
+        conn_id=CONN_ID,
+        sql="""
+                    DROP TABLE example_s3_teradata_json;
+                """,
+    )
+    # [END s3_to_teradata_transfer_operator_howto_guide_drop_table_json]
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet]
+    transfer_data_parquet = S3ToTeradataOperator(
+        task_id="transfer_data_s3_to_teradata_parquet",
+        
s3_source_key="/s3/td-usgs-public.s3.amazonaws.com/PARQUETDATA/09394500/2018/06/",
+        teradata_table="example_s3_teradata_parquet",
+        aws_conn_id="aws_default",
+        teradata_conn_id="teradata_default",
+        trigger_rule="all_done",
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet]
+    # [START 
s3_to_teradata_transfer_operator_howto_guide_read_data_table_parquet]
+    read_data_table_parquet = TeradataOperator(
+        task_id="read_data_table_parquet",
+        conn_id=CONN_ID,
+        sql="""
+                    SELECT * from example_s3_teradata_parquet;
+                """,
+    )
+    # [END 
s3_to_teradata_transfer_operator_howto_guide_read_data_table_parquet]
+    # [START s3_to_teradata_transfer_operator_howto_guide_drop_table_parquet]
+    drop_table_parquet = TeradataOperator(
+        task_id="drop_table_parquet",
+        conn_id=CONN_ID,
+        sql="""
+                    DROP TABLE example_s3_teradata_parquet;
+                """,

Review Comment:
   ```suggestion
           sql="DROP TABLE example_s3_teradata_parquet;",
   ```



##########
airflow/providers/teradata/transfers/s3_to_teradata.py:
##########
@@ -0,0 +1,100 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+from airflow.providers.teradata.hooks.teradata import TeradataHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+class S3ToTeradataOperator(BaseOperator):
+    """
+    Loads CSV, JSON and Parquet format data from Amazon S3 to Teradata.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the 
guide:
+        :ref:`howto/operator:S3ToTeradataOperator`
+
+    :param s3_source_key: The URI format specifying the location of the S3 
object store.(templated)
+        The URI format is /s3/YOUR-BUCKET.s3.amazonaws.com/YOUR-BUCKET-NAME.
+        Refer to
+        
https://docs.teradata.com/search/documents?query=native+object+store&sort=last_update&virtual-field=title_only&content-lang=en-US
+    :param teradata_table: The name of the teradata table to which the data is 
transferred.(templated)
+    :param aws_conn_id: The Airflow AWS connection used for AWS credentials.
+    :param teradata_conn_id: The connection ID used to connect to Teradata
+        :ref:`Teradata connection <howto/connection:Teradata>`.
+
+    Note that ``s3_source_key`` and ``teradata_table`` are
+    templated, so you can use variables in them if you wish.
+    """
+
+    template_fields: Sequence[str] = ("s3_source_key", "teradata_table")
+    ui_color = "#e07c24"
+
+    def __init__(
+        self,
+        *,
+        s3_source_key: str,
+        teradata_table: str,
+        aws_conn_id: str = "aws_default",
+        teradata_conn_id: str = "teradata_default",
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.s3_source_key = s3_source_key
+        self.teradata_table = teradata_table
+        self.aws_conn_id = aws_conn_id
+        self.teradata_conn_id = teradata_conn_id
+
+    def execute(self, context: Context) -> None:
+        self.log.info(
+            "transferring data from %s to teradata table %s...", 
self.s3_source_key, self.teradata_table
+        )
+
+        s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
+        access_key = (
+            s3_hook.conn_config.aws_access_key_id if 
s3_hook.conn_config.aws_access_key_id is not None else ""
+        )
+        access_secret = (
+            s3_hook.conn_config.aws_secret_access_key
+            if s3_hook.conn_config.aws_secret_access_key is not None
+            else ""
+        )
+
+        teradata_hook = TeradataHook(teradata_conn_id=self.teradata_conn_id)
+        sql = f"""
+                        CREATE MULTISET TABLE {self.teradata_table} AS
+                        (
+                            SELECT * FROM (
+                                LOCATION = '{self.s3_source_key}'
+                                ACCESS_ID= '{access_key}'
+                                ACCESS_KEY= '{access_secret}'
+                            ) AS d
+                        ) WITH DATA
+                        """

Review Comment:
   I would recommend import `from textwrap import dedent` and remove redundant 
whitespaces
   
   ```suggestion
           sql = dedent(f"""
               CREATE MULTISET TABLE {self.teradata_table} AS
                   (
                       SELECT * FROM (
                           LOCATION = '{self.s3_source_key}'
                           ACCESS_ID = '{access_key}'
                           ACCESS_KEY = '{access_secret}'
                   ) AS d
               ) WITH DATA
           """).rstrip()
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to