This is an automated email from the ASF dual-hosted git repository.

taragolis pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 5802469fbc Migrate amazon provider transfer tests from `unittests` to 
`pytest` (#28145)
5802469fbc is described below

commit 5802469fbc452a5727c938f033f2753571989d92
Author: Adrian Castro <[email protected]>
AuthorDate: Wed Dec 7 19:35:45 2022 +0100

    Migrate amazon provider transfer tests from `unittests` to `pytest` (#28145)
    
    Related: #28039 #28139
    
    Migrate Amazon provider's transfer tests to `pytest`.
    
    All changes are more or less straightforward:
    
    - Get rid of unittests.TestCase class and TestCase.assert* methods
    - Convert setUp* and tearDown* methods to appropriate pytest alternative
    - Replace decorator `parameterized.expand` by `pytest.mark.parametrize`.
    - Renamed `@patch` to `@mock.patch` to conform to other tests
---
 tests/providers/amazon/aws/sensors/test_batch.py   |  2 +-
 .../amazon/aws/transfers/test_dynamodb_to_s3.py    |  7 ++--
 .../amazon/aws/transfers/test_ftp_to_s3.py         |  3 +-
 .../amazon/aws/transfers/test_google_api_to_s3.py  |  5 +--
 .../amazon/aws/transfers/test_hive_to_dynamodb.py  |  5 +--
 .../aws/transfers/test_imap_attachment_to_s3.py    | 11 +++--
 .../amazon/aws/transfers/test_local_to_s3.py       | 29 +++++++------
 .../amazon/aws/transfers/test_mongo_to_s3.py       |  5 +--
 .../amazon/aws/transfers/test_redshift_to_s3.py    | 49 ++++++++--------------
 .../amazon/aws/transfers/test_s3_to_ftp.py         |  3 +-
 .../amazon/aws/transfers/test_s3_to_redshift.py    |  3 +-
 .../amazon/aws/transfers/test_s3_to_sftp.py        | 22 ++++------
 .../amazon/aws/transfers/test_salesforce_to_s3.py  |  3 +-
 .../amazon/aws/transfers/test_sftp_to_s3.py        | 25 ++++-------
 .../amazon/aws/transfers/test_sql_to_s3.py         | 15 ++++---
 15 files changed, 74 insertions(+), 113 deletions(-)

diff --git a/tests/providers/amazon/aws/sensors/test_batch.py 
b/tests/providers/amazon/aws/sensors/test_batch.py
index d7905d563f..835b99ad0a 100644
--- a/tests/providers/amazon/aws/sensors/test_batch.py
+++ b/tests/providers/amazon/aws/sensors/test_batch.py
@@ -42,7 +42,7 @@ class TestBatchSensor:
     @mock.patch.object(BatchClientHook, "get_job_description")
     def test_poke_on_success_state(self, mock_get_job_description):
         mock_get_job_description.return_value = {"status": "SUCCEEDED"}
-        assert self.batch_sensor.poke({})
+        assert self.batch_sensor.poke({}) is True
         mock_get_job_description.assert_called_once_with(JOB_ID)
 
     @mock.patch.object(BatchClientHook, "get_job_description")
diff --git a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py
index 2de653fb4a..6dcac793a4 100644
--- a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py
@@ -18,14 +18,13 @@
 from __future__ import annotations
 
 import json
-import unittest
 from decimal import Decimal
 from unittest.mock import MagicMock, patch
 
 from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import 
DynamoDBToS3Operator, JSONEncoder
 
 
-class JSONEncoderTest(unittest.TestCase):
+class JSONEncoderTest:
     def test_jsonencoder_with_decimal(self):
         """Test JSONEncoder correctly encodes and decodes decimal values."""
 
@@ -36,8 +35,8 @@ class JSONEncoderTest(unittest.TestCase):
             self.assertAlmostEqual(decoded, org)
 
 
-class DynamodbToS3Test(unittest.TestCase):
-    def setUp(self):
+class DynamodbToS3Test:
+    def setup_method(self):
         self.output_queue = []
 
     def mock_upload_file(self, Filename, Bucket, Key):
diff --git a/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py
index 9bd05cbb5c..3c0ead8cfe 100644
--- a/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 from airflow.providers.amazon.aws.transfers.ftp_to_s3 import FTPToS3Operator
@@ -32,7 +31,7 @@ S3_KEY_MULTIPLE = "test/"
 FTP_PATH_MULTIPLE = "/tmp/"
 
 
-class TestFTPToS3Operator(unittest.TestCase):
+class TestFTPToS3Operator:
     def assert_execute(
         self, mock_local_tmp_file, mock_s3_hook_load_file, 
mock_ftp_hook_retrieve_file, ftp_file, s3_file
     ):
diff --git a/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py
index 1fac172f6e..6ddc968d3b 100644
--- a/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest.mock import Mock, patch
 
 import pytest
@@ -29,8 +28,8 @@ from airflow.providers.amazon.aws.transfers.google_api_to_s3 
import GoogleApiToS
 from airflow.utils import db
 
 
-class TestGoogleApiToS3(unittest.TestCase):
-    def setUp(self):
+class TestGoogleApiToS3:
+    def setup_method(self):
         conf.load_test_config()
 
         db.merge_conn(
diff --git a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py 
b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
index c1a2b76676..07676540e4 100644
--- a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
+++ b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
@@ -19,7 +19,6 @@ from __future__ import annotations
 
 import datetime
 import json
-import unittest
 from unittest import mock
 
 import pandas as pd
@@ -34,8 +33,8 @@ DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
 DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
 
 
-class TestHiveToDynamoDBOperator(unittest.TestCase):
-    def setUp(self):
+class TestHiveToDynamoDBOperator:
+    def setup_method(self):
         args = {"owner": "airflow", "start_date": DEFAULT_DATE}
         dag = DAG("test_dag_id", default_args=args)
         self.dag = dag
diff --git a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py
index 1663f90eac..acb5bb7134 100644
--- a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py
@@ -17,14 +17,13 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
-from unittest.mock import patch
+from unittest import mock
 
 from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import 
ImapAttachmentToS3Operator
 
 
-class TestImapAttachmentToS3Operator(unittest.TestCase):
-    def setUp(self):
+class TestImapAttachmentToS3Operator:
+    def setup_method(self):
         self.kwargs = dict(
             imap_attachment_name="test_file",
             s3_bucket="test_bucket",
@@ -37,8 +36,8 @@ class TestImapAttachmentToS3Operator(unittest.TestCase):
             dag=None,
         )
 
-    
@patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.S3Hook")
-    
@patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapHook")
+    
@mock.patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.S3Hook")
+    
@mock.patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapHook")
     def test_execute(self, mock_imap_hook, mock_s3_hook):
         mock_imap_hook.return_value.__enter__ = mock_imap_hook
         mock_imap_hook.return_value.retrieve_mail_attachments.return_value = 
[("test_file", b"Hello World")]
diff --git a/tests/providers/amazon/aws/transfers/test_local_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_local_to_s3.py
index 2d53d19256..ad811a0a0c 100644
--- a/tests/providers/amazon/aws/transfers/test_local_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_local_to_s3.py
@@ -19,20 +19,19 @@ from __future__ import annotations
 
 import datetime
 import os
-import unittest
 
 import boto3
+import pytest
 from moto import mock_s3
 
 from airflow.models.dag import DAG
 from airflow.providers.amazon.aws.transfers.local_to_s3 import 
LocalFilesystemToS3Operator
 
+CONFIG = {"verify": False, "replace": False, "encrypt": False, "gzip": False}
 
-class TestFileToS3Operator(unittest.TestCase):
 
-    _config = {"verify": False, "replace": False, "encrypt": False, "gzip": 
False}
-
-    def setUp(self):
+class TestFileToS3Operator:
+    def setup_method(self):
         args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 
1)}
         self.dag = DAG("test_dag_id", default_args=args)
         self.dest_key = "test/test1.csv"
@@ -41,7 +40,7 @@ class TestFileToS3Operator(unittest.TestCase):
         with open(self.testfile1, "wb") as f:
             f.write(b"x" * 393216)
 
-    def tearDown(self):
+    def teardown_method(self):
         os.remove(self.testfile1)
 
     def test_init(self):
@@ -51,15 +50,15 @@ class TestFileToS3Operator(unittest.TestCase):
             filename=self.testfile1,
             dest_key=self.dest_key,
             dest_bucket=self.dest_bucket,
-            **self._config,
+            **CONFIG,
         )
         assert operator.filename == self.testfile1
         assert operator.dest_key == self.dest_key
         assert operator.dest_bucket == self.dest_bucket
-        assert operator.verify == self._config["verify"]
-        assert operator.replace == self._config["replace"]
-        assert operator.encrypt == self._config["encrypt"]
-        assert operator.gzip == self._config["gzip"]
+        assert operator.verify == CONFIG["verify"]
+        assert operator.replace == CONFIG["replace"]
+        assert operator.encrypt == CONFIG["encrypt"]
+        assert operator.gzip == CONFIG["gzip"]
 
     def test_execute_exception(self):
         operator = LocalFilesystemToS3Operator(
@@ -68,9 +67,9 @@ class TestFileToS3Operator(unittest.TestCase):
             filename=self.testfile1,
             dest_key=f"s3://dummy/{self.dest_key}",
             dest_bucket=self.dest_bucket,
-            **self._config,
+            **CONFIG,
         )
-        with self.assertRaises(TypeError):
+        with pytest.raises(TypeError):
             operator.execute(None)
 
     @mock_s3
@@ -83,7 +82,7 @@ class TestFileToS3Operator(unittest.TestCase):
             filename=self.testfile1,
             dest_key=self.dest_key,
             dest_bucket=self.dest_bucket,
-            **self._config,
+            **CONFIG,
         )
         operator.execute(None)
 
@@ -102,7 +101,7 @@ class TestFileToS3Operator(unittest.TestCase):
             dag=self.dag,
             filename=self.testfile1,
             dest_key=f"s3://dummy/{self.dest_key}",
-            **self._config,
+            **CONFIG,
         )
         operator.execute(None)
 
diff --git a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py
index 9fb06866fb..f2bd53318c 100644
--- a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 from airflow.models import DAG, DagRun, TaskInstance
@@ -40,8 +39,8 @@ MOCK_MONGO_RETURN = [
 ]
 
 
-class TestMongoToS3Operator(unittest.TestCase):
-    def setUp(self):
+class TestMongoToS3Operator:
+    def setup_method(self):
         args = {"owner": "airflow", "start_date": DEFAULT_DATE}
 
         self.dag = DAG("test_dag_id", default_args=args)
diff --git a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
index a94a3fce6d..91a1ae32da 100644
--- a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
@@ -17,11 +17,10 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
+import pytest
 from boto3.session import Session
-from parameterized import parameterized
 
 from airflow.models.connection import Connection
 from airflow.providers.amazon.aws.transfers.redshift_to_s3 import 
RedshiftToS3Operator
@@ -29,25 +28,20 @@ from airflow.providers.amazon.aws.utils.redshift import 
build_credentials_block
 from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces
 
 
-class TestRedshiftToS3Transfer(unittest.TestCase):
-    @parameterized.expand(
-        [
-            [True, "key/table_"],
-            [False, "key"],
-        ]
-    )
+class TestRedshiftToS3Transfer:
+    @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, 
"key/table_"], [False, "key"]])
     @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection")
     @mock.patch("airflow.models.connection.Connection")
     @mock.patch("boto3.session.Session")
     
@mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run")
     def test_table_unloading(
         self,
-        table_as_file_name,
-        expected_s3_key,
         mock_run,
         mock_session,
         mock_connection,
         mock_hook,
+        table_as_file_name,
+        expected_s3_key,
     ):
         access_key = "aws_access_key_id"
         secret_key = "aws_secret_access_key"
@@ -94,24 +88,19 @@ class TestRedshiftToS3Transfer(unittest.TestCase):
         assert secret_key in unload_query
         assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], 
unload_query)
 
-    @parameterized.expand(
-        [
-            [True, "key/table_"],
-            [False, "key"],
-        ]
-    )
+    @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, 
"key/table_"], [False, "key"]])
     @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection")
     @mock.patch("airflow.models.connection.Connection")
     @mock.patch("boto3.session.Session")
     
@mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run")
     def test_execute_sts_token(
         self,
-        table_as_file_name,
-        expected_s3_key,
         mock_run,
         mock_session,
         mock_connection,
         mock_hook,
+        table_as_file_name,
+        expected_s3_key,
     ):
         access_key = "ASIA_aws_access_key_id"
         secret_key = "aws_secret_access_key"
@@ -160,13 +149,14 @@ class TestRedshiftToS3Transfer(unittest.TestCase):
         assert token in unload_query
         assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], 
unload_query)
 
-    @parameterized.expand(
+    @pytest.mark.parametrize(
+        "table, table_as_file_name, expected_s3_key",
         [
             ["table", True, "key/table_"],
             ["table", False, "key"],
             [None, False, "key"],
             [None, True, "key"],
-        ]
+        ],
     )
     @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection")
     @mock.patch("airflow.models.connection.Connection")
@@ -174,13 +164,13 @@ class TestRedshiftToS3Transfer(unittest.TestCase):
     
@mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run")
     def test_custom_select_query_unloading(
         self,
-        table,
-        table_as_file_name,
-        expected_s3_key,
         mock_run,
         mock_session,
         mock_connection,
         mock_hook,
+        table,
+        table_as_file_name,
+        expected_s3_key,
     ):
         access_key = "aws_access_key_id"
         secret_key = "aws_secret_access_key"
@@ -225,24 +215,19 @@ class TestRedshiftToS3Transfer(unittest.TestCase):
         assert secret_key in unload_query
         assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], 
unload_query)
 
-    @parameterized.expand(
-        [
-            [True, "key/table_"],
-            [False, "key"],
-        ]
-    )
+    @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, 
"key/table_"], [False, "key"]])
     @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection")
     @mock.patch("airflow.models.connection.Connection")
     @mock.patch("boto3.session.Session")
     
@mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run")
     def test_table_unloading_role_arn(
         self,
-        table_as_file_name,
-        expected_s3_key,
         mock_run,
         mock_session,
         mock_connection,
         mock_hook,
+        table_as_file_name,
+        expected_s3_key,
     ):
         access_key = "aws_access_key_id"
         secret_key = "aws_secret_access_key"
diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py 
b/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py
index 3bb28b692a..6308d34ac0 100644
--- a/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py
+++ b/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 from airflow.providers.amazon.aws.transfers.s3_to_ftp import S3ToFTPOperator
@@ -30,7 +29,7 @@ AWS_CONN_ID = "aws_default"
 FTP_CONN_ID = "ftp_default"
 
 
-class TestS3ToFTPOperator(unittest.TestCase):
+class TestS3ToFTPOperator:
     @mock.patch("airflow.providers.ftp.hooks.ftp.FTPHook.store_file")
     @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_key")
     
@mock.patch("airflow.providers.amazon.aws.transfers.s3_to_ftp.NamedTemporaryFile")
diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py 
b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py
index 56c72a0354..e69673b27e 100644
--- a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py
+++ b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 import pytest
@@ -29,7 +28,7 @@ from airflow.providers.amazon.aws.transfers.s3_to_redshift 
import S3ToRedshiftOp
 from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces
 
 
-class TestS3ToRedshiftTransfer(unittest.TestCase):
+class TestS3ToRedshiftTransfer:
     @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection")
     @mock.patch("airflow.models.connection.Connection")
     @mock.patch("boto3.session.Session")
diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py 
b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
index cef89e3967..74b5273f65 100644
--- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
+++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
@@ -17,13 +17,13 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
-
 import boto3
 from moto import mock_s3
 
 from airflow.models import DAG
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
 from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator
+from airflow.providers.ssh.hooks.ssh import SSHHook
 from airflow.providers.ssh.operators.ssh import SSHOperator
 from airflow.utils.timezone import datetime
 from tests.test_utils.config import conf_vars
@@ -42,14 +42,10 @@ TEST_DAG_ID = "unit_tests_s3_to_sftp"
 DEFAULT_DATE = datetime(2018, 1, 1)
 
 
-class TestS3ToSFTPOperator(unittest.TestCase):
-    @mock_s3
-    def setUp(self):
-        from airflow.providers.amazon.aws.hooks.s3 import S3Hook
-        from airflow.providers.ssh.hooks.ssh import SSHHook
+class TestS3ToSFTPOperator:
+    def setup_method(self):
 
         hook = SSHHook(ssh_conn_id="ssh_default")
-        s3_hook = S3Hook("aws_default")
         hook.no_host_key_check = True
         dag = DAG(
             f"{TEST_DAG_ID}test_schedule_dag_once",
@@ -58,7 +54,6 @@ class TestS3ToSFTPOperator(unittest.TestCase):
         )
 
         self.hook = hook
-        self.s3_hook = s3_hook
 
         self.ssh_client = self.hook.get_conn()
         self.sftp_client = self.ssh_client.open_sftp()
@@ -71,6 +66,7 @@ class TestS3ToSFTPOperator(unittest.TestCase):
     @mock_s3
     @conf_vars({("core", "enable_xcom_pickling"): "True"})
     def test_s3_to_sftp_operation(self):
+        s3_hook = S3Hook(aws_conn_id=None)
         # Setting
         test_remote_file_content = (
             "This is remote file content \n which is also multiline "
@@ -80,11 +76,11 @@ class TestS3ToSFTPOperator(unittest.TestCase):
         # Test for creation of s3 bucket
         conn = boto3.client("s3")
         conn.create_bucket(Bucket=self.s3_bucket)
-        assert self.s3_hook.check_for_bucket(self.s3_bucket)
+        assert s3_hook.check_for_bucket(self.s3_bucket)
 
         with open(LOCAL_FILE_PATH, "w") as file:
             file.write(test_remote_file_content)
-        self.s3_hook.load_file(LOCAL_FILE_PATH, self.s3_key, 
bucket_name=BUCKET)
+        s3_hook.load_file(LOCAL_FILE_PATH, self.s3_key, bucket_name=BUCKET)
 
         # Check if object was created in s3
         objects_in_dest_bucket = conn.list_objects(Bucket=self.s3_bucket, 
Prefix=self.s3_key)
@@ -122,7 +118,7 @@ class TestS3ToSFTPOperator(unittest.TestCase):
         # Clean up after finishing with test
         conn.delete_object(Bucket=self.s3_bucket, Key=self.s3_key)
         conn.delete_bucket(Bucket=self.s3_bucket)
-        assert not self.s3_hook.check_for_bucket(self.s3_bucket)
+        assert not s3_hook.check_for_bucket(self.s3_bucket)
 
     def delete_remote_resource(self):
         # check the remote file content
@@ -136,5 +132,5 @@ class TestS3ToSFTPOperator(unittest.TestCase):
         assert remove_file_task is not None
         remove_file_task.execute(None)
 
-    def tearDown(self):
+    def teardown_method(self):
         self.delete_remote_resource()
diff --git a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py
index b861a2cdc8..5dcc1890c2 100644
--- a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py
@@ -16,7 +16,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from collections import OrderedDict
 from unittest import mock
 
@@ -55,7 +54,7 @@ REPLACE = ENCRYPT = GZIP = False
 ACL_POLICY = None
 
 
-class TestSalesforceToGcsOperator(unittest.TestCase):
+class TestSalesforceToGcsOperator:
     @mock.patch.object(S3Hook, "load_file")
     @mock.patch.object(SalesforceHook, "write_object_to_file")
     @mock.patch.object(SalesforceHook, "make_query")
diff --git a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py
index 4a23d54d63..d3a301c823 100644
--- a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py
@@ -17,11 +17,9 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
-
 import boto3
+import pytest
 from moto import mock_s3
-from parameterized import parameterized
 
 from airflow.models import DAG
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -44,12 +42,10 @@ TEST_DAG_ID = "unit_tests_sftp_tos3_op"
 DEFAULT_DATE = datetime(2018, 1, 1)
 
 
-class TestSFTPToS3Operator(unittest.TestCase):
-    @mock_s3
-    def setUp(self):
+class TestSFTPToS3Operator:
+    def setup_method(self):
         hook = SSHHook(ssh_conn_id="ssh_default")
 
-        s3_hook = S3Hook("aws_default")
         hook.no_host_key_check = True
         dag = DAG(
             f"{TEST_DAG_ID}test_schedule_dag_once",
@@ -58,7 +54,6 @@ class TestSFTPToS3Operator(unittest.TestCase):
         )
 
         self.hook = hook
-        self.s3_hook = s3_hook
 
         self.ssh_client = self.hook.get_conn()
         self.sftp_client = self.ssh_client.open_sftp()
@@ -68,15 +63,10 @@ class TestSFTPToS3Operator(unittest.TestCase):
         self.sftp_path = SFTP_PATH
         self.s3_key = S3_KEY
 
-    @parameterized.expand(
-        [
-            (True,),
-            (False,),
-        ]
-    )
+    @pytest.mark.parametrize("use_temp_file", [True, False])
     @mock_s3
     @conf_vars({("core", "enable_xcom_pickling"): "True"})
-    def test_sftp_to_s3_operation(self, use_temp_file=True):
+    def test_sftp_to_s3_operation(self, use_temp_file):
         # Setting
         test_remote_file_content = (
             "This is remote file content \n which is also multiline "
@@ -95,9 +85,10 @@ class TestSFTPToS3Operator(unittest.TestCase):
         create_file_task.execute(None)
 
         # Test for creation of s3 bucket
+        s3_hook = S3Hook(aws_conn_id=None)
         conn = boto3.client("s3")
         conn.create_bucket(Bucket=self.s3_bucket)
-        assert self.s3_hook.check_for_bucket(self.s3_bucket)
+        assert s3_hook.check_for_bucket(self.s3_bucket)
 
         # get remote file to local
         run_task = SFTPToS3Operator(
@@ -125,4 +116,4 @@ class TestSFTPToS3Operator(unittest.TestCase):
         # Clean up after finishing with test
         conn.delete_object(Bucket=self.s3_bucket, Key=self.s3_key)
         conn.delete_bucket(Bucket=self.s3_bucket)
-        assert not self.s3_hook.check_for_bucket(self.s3_bucket)
+        assert not s3_hook.check_for_bucket(self.s3_bucket)
diff --git a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_sql_to_s3.py
index fd7f25a009..84db615eeb 100644
--- a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_sql_to_s3.py
@@ -17,19 +17,17 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from tempfile import NamedTemporaryFile
 from unittest import mock
 
 import pandas as pd
 import pytest
-from parameterized import parameterized
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.transfers.sql_to_s3 import SqlToS3Operator
 
 
-class TestSqlToS3Operator(unittest.TestCase):
+class TestSqlToS3Operator:
     
@mock.patch("airflow.providers.amazon.aws.transfers.sql_to_s3.NamedTemporaryFile")
     @mock.patch("airflow.providers.amazon.aws.transfers.sql_to_s3.S3Hook")
     def test_execute_csv(self, mock_s3_hook, temp_mock):
@@ -146,13 +144,14 @@ class TestSqlToS3Operator(unittest.TestCase):
                 replace=True,
             )
 
-    @parameterized.expand(
+    @pytest.mark.parametrize(
+        "params",
         [
-            ("with-csv", {"file_format": "csv", "null_string_result": None}),
-            ("with-parquet", {"file_format": "parquet", "null_string_result": 
"None"}),
-        ]
+            pytest.param({"file_format": "csv", "null_string_result": None}, 
id="with-csv"),
+            pytest.param({"file_format": "parquet", "null_string_result": 
"None"}, id="with-parquet"),
+        ],
     )
-    def test_fix_dtypes(self, _, params):
+    def test_fix_dtypes(self, params):
         op = SqlToS3Operator(
             query="query",
             s3_bucket="s3_bucket",

Reply via email to