This is an automated email from the ASF dual-hosted git repository.

joshfell pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 6621874bf5 Resolve deprecation warnings in Azure FileShare-to-GCS 
tests (#39599)
6621874bf5 is described below

commit 6621874bf523c0f1caa5ac51e482434528ee13e4
Author: Josh Fell <[email protected]>
AuthorDate: Tue May 14 12:55:07 2024 -0400

    Resolve deprecation warnings in Azure FileShare-to-GCS tests (#39599)
    
    Related: #39485
    
    AzureFileShareToGCSOperator has a deprecated parameter, `directory_name`, 
which was replaced by `directory_path`. This PR updates system and unit tests 
as well as operator docs to reflect this deprecated change.
---
 .../google/cloud/transfers/azure_fileshare_to_gcs.py          | 11 +++++++----
 tests/always/test_example_dags.py                             |  1 -
 .../google/cloud/transfers/test_azure_fileshare_to_gcs.py     | 10 +++++-----
 .../google/cloud/azure/example_azure_fileshare_to_gcs.py      |  8 +++++---
 4 files changed, 17 insertions(+), 13 deletions(-)

diff --git a/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py 
b/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py
index cca318001c..6b9970481d 100644
--- a/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py
@@ -43,8 +43,10 @@ class AzureFileShareToGCSOperator(BaseOperator):
     Does not include subdirectories.  May be filtered by prefix.
 
     :param share_name: The Azure FileShare share where to find the objects. 
(templated)
-    :param directory_name: (Optional) Path to Azure FileShare directory which 
content is to be transferred.
+    :param directory_name: (Deprecated) Path to Azure FileShare directory 
which content is to be transferred.
         Defaults to root directory (templated)
+    :param directory_path: (Optional) Path to Azure FileShare directory which 
content is to be transferred.
+        Defaults to root directory. Use this instead of ``directory_name``. 
(templated)
     :param prefix: Prefix string which filters objects whose name begin with
         such prefix. (templated)
     :param azure_fileshare_conn_id: The source WASB connection
@@ -63,13 +65,14 @@ class AzureFileShareToGCSOperator(BaseOperator):
         Service Account Token Creator IAM role to the directly preceding 
identity, with first
         account from the list granting this role to the originating account 
(templated).
 
-    Note that ``share_name``, ``directory_name``, ``prefix``, ``delimiter`` 
and ``dest_gcs`` are
+    Note that ``share_name``, ``directory_path``, ``prefix``, and ``dest_gcs`` 
are
     templated, so you can use variables in them if you wish.
     """
 
     template_fields: Sequence[str] = (
         "share_name",
         "directory_name",
+        "directory_path",
         "prefix",
         "dest_gcs",
     )
@@ -94,8 +97,8 @@ class AzureFileShareToGCSOperator(BaseOperator):
         self.share_name = share_name
         self.directory_path = directory_path
         self.directory_name = directory_name
-        if self.directory_path is None:
-            self.directory_path = directory_name
+        if self.directory_path is None and self.directory_name is not None:
+            self.directory_path = self.directory_name
             warnings.warn(
                 "Use 'directory_path' instead of 'directory_name'.",
                 AirflowProviderDeprecationWarning,
diff --git a/tests/always/test_example_dags.py 
b/tests/always/test_example_dags.py
index cd207ac60b..79f11eb517 100644
--- a/tests/always/test_example_dags.py
+++ b/tests/always/test_example_dags.py
@@ -51,7 +51,6 @@ IGNORE_AIRFLOW_PROVIDER_DEPRECATION_WARNING: tuple[str, ...] 
= (
     "tests/system/providers/amazon/aws/example_eks_with_nodegroups.py",
     "tests/system/providers/amazon/aws/example_emr.py",
     "tests/system/providers/amazon/aws/example_emr_notebook_execution.py",
-    
"tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py",
     
"tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py",
     "tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py",
     "tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py",
diff --git 
a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py 
b/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py
index 0ef2f4a5b9..39b5bb62f3 100644
--- a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py
@@ -22,7 +22,7 @@ from 
airflow.providers.google.cloud.transfers.azure_fileshare_to_gcs import Azur
 
 TASK_ID = "test-azure-fileshare-to-gcs"
 AZURE_FILESHARE_SHARE = "test-share"
-AZURE_FILESHARE_DIRECTORY_NAME = "/path/to/dir"
+AZURE_FILESHARE_DIRECTORY_PATH = "/path/to/dir"
 GCS_PATH_PREFIX = "gs://gcs-bucket/data/"
 MOCK_FILES = ["TEST1.csv", "TEST2.csv", "TEST3.csv"]
 AZURE_FILESHARE_CONN_ID = "azure_fileshare_default"
@@ -37,7 +37,7 @@ class TestAzureFileShareToGCSOperator:
         operator = AzureFileShareToGCSOperator(
             task_id=TASK_ID,
             share_name=AZURE_FILESHARE_SHARE,
-            directory_name=AZURE_FILESHARE_DIRECTORY_NAME,
+            directory_path=AZURE_FILESHARE_DIRECTORY_PATH,
             azure_fileshare_conn_id=AZURE_FILESHARE_CONN_ID,
             gcp_conn_id=GCS_CONN_ID,
             dest_gcs=GCS_PATH_PREFIX,
@@ -46,7 +46,7 @@ class TestAzureFileShareToGCSOperator:
 
         assert operator.task_id == TASK_ID
         assert operator.share_name == AZURE_FILESHARE_SHARE
-        assert operator.directory_name == AZURE_FILESHARE_DIRECTORY_NAME
+        assert operator.directory_path == AZURE_FILESHARE_DIRECTORY_PATH
         assert operator.azure_fileshare_conn_id == AZURE_FILESHARE_CONN_ID
         assert operator.gcp_conn_id == GCS_CONN_ID
         assert operator.dest_gcs == GCS_PATH_PREFIX
@@ -60,7 +60,7 @@ class TestAzureFileShareToGCSOperator:
         operator = AzureFileShareToGCSOperator(
             task_id=TASK_ID,
             share_name=AZURE_FILESHARE_SHARE,
-            directory_name=AZURE_FILESHARE_DIRECTORY_NAME,
+            directory_path=AZURE_FILESHARE_DIRECTORY_PATH,
             azure_fileshare_conn_id=AZURE_FILESHARE_CONN_ID,
             gcp_conn_id=GCS_CONN_ID,
             dest_gcs=GCS_PATH_PREFIX,
@@ -97,7 +97,7 @@ class TestAzureFileShareToGCSOperator:
         operator = AzureFileShareToGCSOperator(
             task_id=TASK_ID,
             share_name=AZURE_FILESHARE_SHARE,
-            directory_name=AZURE_FILESHARE_DIRECTORY_NAME,
+            directory_path=AZURE_FILESHARE_DIRECTORY_PATH,
             azure_fileshare_conn_id=AZURE_FILESHARE_CONN_ID,
             gcp_conn_id=GCS_CONN_ID,
             dest_gcs=GCS_PATH_PREFIX,
diff --git 
a/tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py 
b/tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py
index c46287db08..ce86a4b694 100644
--- 
a/tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py
+++ 
b/tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py
@@ -30,7 +30,7 @@ DAG_ID = "azure_fileshare_to_gcs_example"
 
 BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
 AZURE_SHARE_NAME = os.environ.get("AZURE_SHARE_NAME", "test-azure-share")
-AZURE_DIRECTORY_NAME = "test-azure-dir"
+AZURE_DIRECTORY_PATH = "test-azure-dir"
 
 with DAG(
     dag_id=DAG_ID,
@@ -49,7 +49,9 @@ with DAG(
     tags=["example", "azure"],
 ) as dag:
     create_bucket = GCSCreateBucketOperator(
-        task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
+        task_id="create_bucket",
+        bucket_name=BUCKET_NAME,
+        project_id=PROJECT_ID,  # type: ignore[arg-type]
     )
 
     # [START howto_operator_azure_fileshare_to_gcs_basic]
@@ -57,7 +59,7 @@ with DAG(
         task_id="sync_azure_files_with_gcs",
         share_name=AZURE_SHARE_NAME,
         dest_gcs=BUCKET_NAME,
-        directory_name=AZURE_DIRECTORY_NAME,
+        directory_path=AZURE_DIRECTORY_PATH,
         replace=False,
         gzip=True,
         google_impersonation_chain=None,

Reply via email to