This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 8da1ff573b Clean microsoft azure provider by deleting the custom 
prefix from conn extra fields (#30558)
8da1ff573b is described below

commit 8da1ff573bf598225f79899636efa0f9c55c4881
Author: Hussein Awala <[email protected]>
AuthorDate: Mon Aug 7 22:32:04 2023 +0200

    Clean microsoft azure provider by deleting the custom prefix from conn 
extra fields (#30558)
---
 airflow/providers/microsoft/azure/hooks/adx.py     | 12 ++++---
 .../providers/microsoft/azure/hooks/base_azure.py  | 37 +++++++++++++-------
 .../microsoft/azure/hooks/container_volume.py      |  3 +-
 airflow/providers/microsoft/azure/hooks/cosmos.py  |  3 +-
 .../microsoft/azure/hooks/data_factory.py          | 29 +++++++++++++---
 .../providers/microsoft/azure/hooks/data_lake.py   |  3 +-
 .../providers/microsoft/azure/hooks/fileshare.py   | 38 ++++----------------
 airflow/providers/microsoft/azure/hooks/wasb.py    | 32 -----------------
 airflow/providers/microsoft/azure/utils.py         | 31 -----------------
 .../connections/azure_container_volume.rst         |  4 +--
 .../connections/azure_synapse.rst                  |  6 ++--
 tests/providers/microsoft/azure/hooks/test_adx.py  | 32 ++++++++---------
 .../microsoft/azure/hooks/test_azure_batch.py      |  4 +--
 .../azure/hooks/test_azure_container_volume.py     |  4 +--
 .../microsoft/azure/hooks/test_azure_cosmos.py     |  4 +--
 .../azure/hooks/test_azure_data_factory.py         | 40 ++++++++++------------
 .../microsoft/azure/hooks/test_azure_data_lake.py  | 21 +-----------
 .../microsoft/azure/hooks/test_azure_fileshare.py  | 30 ----------------
 .../microsoft/azure/hooks/test_azure_synapse.py    | 10 +++---
 tests/providers/microsoft/azure/hooks/test_wasb.py | 32 -----------------
 .../microsoft/azure/operators/test_azure_batch.py  |  4 +--
 .../azure/operators/test_azure_synapse.py          |  6 ++--
 tests/providers/microsoft/azure/test_utils.py      | 17 ---------
 23 files changed, 122 insertions(+), 280 deletions(-)

diff --git a/airflow/providers/microsoft/azure/hooks/adx.py 
b/airflow/providers/microsoft/azure/hooks/adx.py
index e3cda1d08f..705db4b5b3 100644
--- a/airflow/providers/microsoft/azure/hooks/adx.py
+++ b/airflow/providers/microsoft/azure/hooks/adx.py
@@ -32,9 +32,8 @@ from azure.kusto.data.exceptions import KustoServiceError
 from azure.kusto.data.request import ClientRequestProperties, KustoClient, 
KustoConnectionStringBuilder
 from azure.kusto.data.response import KustoResponseDataSetV2
 
-from airflow.exceptions import AirflowException
+from airflow.exceptions import AirflowException, 
AirflowProviderDeprecationWarning
 from airflow.hooks.base import BaseHook
-from airflow.providers.microsoft.azure.utils import _ensure_prefixes
 
 
 class AzureDataExplorerHook(BaseHook):
@@ -95,7 +94,6 @@ class AzureDataExplorerHook(BaseHook):
         }
 
     @staticmethod
-    @_ensure_prefixes(conn_type="azure_data_explorer")
     def get_ui_field_behaviour() -> dict[str, Any]:
         """Returns custom field behaviour."""
         return {
@@ -148,7 +146,13 @@ class AzureDataExplorerHook(BaseHook):
             value = extras.get(name)
             if value:
                 warn_if_collison(name, backcompat_key)
-            if not value:
+            if not value and extras.get(backcompat_key):
+                warnings.warn(
+                    f"`{backcompat_key}` is deprecated in azure connection 
extra,"
+                    f" please use `{name}` instead",
+                    AirflowProviderDeprecationWarning,
+                    stacklevel=2,
+                )
                 value = extras.get(backcompat_key)
             if not value:
                 raise AirflowException(f"Required connection parameter is 
missing: `{name}`")
diff --git a/airflow/providers/microsoft/azure/hooks/base_azure.py 
b/airflow/providers/microsoft/azure/hooks/base_azure.py
index 2a4d250e1c..214cbd5f20 100644
--- a/airflow/providers/microsoft/azure/hooks/base_azure.py
+++ b/airflow/providers/microsoft/azure/hooks/base_azure.py
@@ -16,12 +16,13 @@
 # under the License.
 from __future__ import annotations
 
+import warnings
 from typing import Any
 
 from azure.common.client_factory import get_client_from_auth_file, 
get_client_from_json_dict
 from azure.common.credentials import ServicePrincipalCredentials
 
-from airflow.exceptions import AirflowException
+from airflow.exceptions import AirflowException, 
AirflowProviderDeprecationWarning
 from airflow.hooks.base import BaseHook
 
 
@@ -50,12 +51,8 @@ class AzureBaseHook(BaseHook):
         from wtforms import StringField
 
         return {
-            "extra__azure__tenantId": StringField(
-                lazy_gettext("Azure Tenant ID"), widget=BS3TextFieldWidget()
-            ),
-            "extra__azure__subscriptionId": StringField(
-                lazy_gettext("Azure Subscription ID"), 
widget=BS3TextFieldWidget()
-            ),
+            "tenantId": StringField(lazy_gettext("Azure Tenant ID"), 
widget=BS3TextFieldWidget()),
+            "subscriptionId": StringField(lazy_gettext("Azure Subscription 
ID"), widget=BS3TextFieldWidget()),
         }
 
     @staticmethod
@@ -79,8 +76,8 @@ class AzureBaseHook(BaseHook):
                 ),
                 "login": "client_id (token credentials auth)",
                 "password": "secret (token credentials auth)",
-                "extra__azure__tenantId": "tenantId (token credentials auth)",
-                "extra__azure__subscriptionId": "subscriptionId (token 
credentials auth)",
+                "tenantId": "tenantId (token credentials auth)",
+                "subscriptionId": "subscriptionId (token credentials auth)",
             },
         }
 
@@ -96,10 +93,24 @@ class AzureBaseHook(BaseHook):
         :return: the authenticated client.
         """
         conn = self.get_connection(self.conn_id)
-        tenant = conn.extra_dejson.get("extra__azure__tenantId") or 
conn.extra_dejson.get("tenantId")
-        subscription_id = 
conn.extra_dejson.get("extra__azure__subscriptionId") or conn.extra_dejson.get(
-            "subscriptionId"
-        )
+        tenant = conn.extra_dejson.get("tenantId")
+        if not tenant and conn.extra_dejson.get("extra__azure__tenantId"):
+            warnings.warn(
+                "`extra__azure__tenantId` is deprecated in azure connection 
extra, "
+                "please use `tenantId` instead",
+                AirflowProviderDeprecationWarning,
+                stacklevel=2,
+            )
+            tenant = conn.extra_dejson.get("extra__azure__tenantId")
+        subscription_id = conn.extra_dejson.get("subscriptionId")
+        if not subscription_id and 
conn.extra_dejson.get("extra__azure__subscriptionId"):
+            warnings.warn(
+                "`extra__azure__subscriptionId` is deprecated in azure 
connection extra, "
+                "please use `subscriptionId` instead",
+                AirflowProviderDeprecationWarning,
+                stacklevel=2,
+            )
+            subscription_id = 
conn.extra_dejson.get("extra__azure__subscriptionId")
 
         key_path = conn.extra_dejson.get("key_path")
         if key_path:
diff --git a/airflow/providers/microsoft/azure/hooks/container_volume.py 
b/airflow/providers/microsoft/azure/hooks/container_volume.py
index a40f4d4249..fd8f3fa4c7 100644
--- a/airflow/providers/microsoft/azure/hooks/container_volume.py
+++ b/airflow/providers/microsoft/azure/hooks/container_volume.py
@@ -22,7 +22,7 @@ from typing import Any
 from azure.mgmt.containerinstance.models import AzureFileVolume, Volume
 
 from airflow.hooks.base import BaseHook
-from airflow.providers.microsoft.azure.utils import _ensure_prefixes, get_field
+from airflow.providers.microsoft.azure.utils import get_field
 
 
 class AzureContainerVolumeHook(BaseHook):
@@ -65,7 +65,6 @@ class AzureContainerVolumeHook(BaseHook):
         }
 
     @staticmethod
-    @_ensure_prefixes(conn_type="azure_container_volume")
     def get_ui_field_behaviour() -> dict[str, Any]:
         """Returns custom field behaviour."""
         return {
diff --git a/airflow/providers/microsoft/azure/hooks/cosmos.py 
b/airflow/providers/microsoft/azure/hooks/cosmos.py
index 45b1b0dab2..44a44b7b7e 100644
--- a/airflow/providers/microsoft/azure/hooks/cosmos.py
+++ b/airflow/providers/microsoft/azure/hooks/cosmos.py
@@ -34,7 +34,7 @@ from azure.cosmos.exceptions import CosmosHttpResponseError
 
 from airflow.exceptions import AirflowBadRequest
 from airflow.hooks.base import BaseHook
-from airflow.providers.microsoft.azure.utils import _ensure_prefixes, get_field
+from airflow.providers.microsoft.azure.utils import get_field
 
 
 class AzureCosmosDBHook(BaseHook):
@@ -71,7 +71,6 @@ class AzureCosmosDBHook(BaseHook):
         }
 
     @staticmethod
-    @_ensure_prefixes(conn_type="azure_cosmos")  # todo: remove when min 
airflow version >= 2.5
     def get_ui_field_behaviour() -> dict[str, Any]:
         """Returns custom field behaviour."""
         return {
diff --git a/airflow/providers/microsoft/azure/hooks/data_factory.py 
b/airflow/providers/microsoft/azure/hooks/data_factory.py
index 590f2af92b..cd00b1b631 100644
--- a/airflow/providers/microsoft/azure/hooks/data_factory.py
+++ b/airflow/providers/microsoft/azure/hooks/data_factory.py
@@ -32,6 +32,7 @@ from __future__ import annotations
 
 import inspect
 import time
+import warnings
 from functools import wraps
 from typing import Any, Callable, TypeVar, Union, cast
 
@@ -56,7 +57,7 @@ from azure.mgmt.datafactory.models import (
     TriggerResource,
 )
 
-from airflow.exceptions import AirflowException
+from airflow.exceptions import AirflowException, 
AirflowProviderDeprecationWarning
 from airflow.hooks.base import BaseHook
 from airflow.typing_compat import TypedDict
 
@@ -85,9 +86,15 @@ def provide_targeted_factory(func: Callable) -> Callable:
                 self = args[0]
                 conn = self.get_connection(self.conn_id)
                 extras = conn.extra_dejson
-                default_value = extras.get(default_key) or extras.get(
-                    f"extra__azure_data_factory__{default_key}"
-                )
+                default_value = extras.get(default_key)
+                if not default_value and 
extras.get(f"extra__azure_data_factory__{default_key}"):
+                    warnings.warn(
+                        f"`extra__azure_data_factory__{default_key}` is 
deprecated in azure connection extra,"
+                        f" please use `{default_key}` instead",
+                        AirflowProviderDeprecationWarning,
+                        stacklevel=2,
+                    )
+                    default_value = 
extras.get(f"extra__azure_data_factory__{default_key}")
                 if not default_value:
                     raise AirflowException("Could not determine the targeted 
data factory.")
 
@@ -139,6 +146,12 @@ def get_field(extras: dict, field_name: str, strict: bool 
= False):
         return extras[field_name] or None
     prefixed_name = f"{backcompat_prefix}{field_name}"
     if prefixed_name in extras:
+        warnings.warn(
+            f"`{prefixed_name}` is deprecated in azure connection extra,"
+            f" please use `{field_name}` instead",
+            AirflowProviderDeprecationWarning,
+            stacklevel=2,
+        )
         return extras[prefixed_name] or None
     if strict:
         raise KeyError(f"Field {field_name} not found in extras")
@@ -1086,6 +1099,14 @@ def provide_targeted_factory_async(func: T) -> T:
                 default_value = extras.get(default_key) or extras.get(
                     f"extra__azure_data_factory__{default_key}"
                 )
+                if not default_value and 
extras.get(f"extra__azure_data_factory__{default_key}"):
+                    warnings.warn(
+                        f"`extra__azure_data_factory__{default_key}` is 
deprecated in azure connection extra,"
+                        f" please use `{default_key}` instead",
+                        AirflowProviderDeprecationWarning,
+                        stacklevel=2,
+                    )
+                    default_value = 
extras.get(f"extra__azure_data_factory__{default_key}")
                 if not default_value:
                     raise AirflowException("Could not determine the targeted 
data factory.")
 
diff --git a/airflow/providers/microsoft/azure/hooks/data_lake.py 
b/airflow/providers/microsoft/azure/hooks/data_lake.py
index 5098113baa..ef84ec51c2 100644
--- a/airflow/providers/microsoft/azure/hooks/data_lake.py
+++ b/airflow/providers/microsoft/azure/hooks/data_lake.py
@@ -33,7 +33,7 @@ from azure.storage.filedatalake import (
 
 from airflow.exceptions import AirflowException
 from airflow.hooks.base import BaseHook
-from airflow.providers.microsoft.azure.utils import _ensure_prefixes, get_field
+from airflow.providers.microsoft.azure.utils import get_field
 
 
 class AzureDataLakeHook(BaseHook):
@@ -73,7 +73,6 @@ class AzureDataLakeHook(BaseHook):
         }
 
     @staticmethod
-    @_ensure_prefixes(conn_type="azure_data_lake")
     def get_ui_field_behaviour() -> dict[str, Any]:
         """Returns custom field behaviour."""
         return {
diff --git a/airflow/providers/microsoft/azure/hooks/fileshare.py 
b/airflow/providers/microsoft/azure/hooks/fileshare.py
index 0244b7ce72..e23319734e 100644
--- a/airflow/providers/microsoft/azure/hooks/fileshare.py
+++ b/airflow/providers/microsoft/azure/hooks/fileshare.py
@@ -18,44 +18,14 @@
 from __future__ import annotations
 
 import warnings
-from functools import wraps
 from typing import IO, Any
 
 from azure.storage.file import File, FileService
 
+from airflow.exceptions import AirflowProviderDeprecationWarning
 from airflow.hooks.base import BaseHook
 
 
-def _ensure_prefixes(conn_type):
-    """
-    Deprecated.
-
-    Remove when provider min airflow version >= 2.5.0 since this is handled by
-    provider manager from that version.
-    """
-
-    def dec(func):
-        @wraps(func)
-        def inner():
-            field_behaviors = func()
-            conn_attrs = {"host", "schema", "login", "password", "port", 
"extra"}
-
-            def _ensure_prefix(field):
-                if field not in conn_attrs and not field.startswith("extra__"):
-                    return f"extra__{conn_type}__{field}"
-                else:
-                    return field
-
-            if "placeholders" in field_behaviors:
-                placeholders = field_behaviors["placeholders"]
-                field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, 
v in placeholders.items()}
-            return field_behaviors
-
-        return inner
-
-    return dec
-
-
 class AzureFileShareHook(BaseHook):
     """
     Interacts with Azure FileShare Storage.
@@ -94,7 +64,6 @@ class AzureFileShareHook(BaseHook):
         }
 
     @staticmethod
-    @_ensure_prefixes(conn_type="azure_fileshare")
     def get_ui_field_behaviour() -> dict[str, Any]:
         """Returns custom field behaviour."""
         return {
@@ -138,6 +107,11 @@ class AzureFileShareHook(BaseHook):
                 check_for_conflict(key)
             elif key.startswith(backcompat_prefix):
                 short_name = key[len(backcompat_prefix) :]
+                warnings.warn(
+                    f"`{key}` is deprecated in azure connection extra please 
use `{short_name}` instead",
+                    AirflowProviderDeprecationWarning,
+                    stacklevel=2,
+                )
                 if short_name not in service_options:  # prefer values 
provided with short name
                     service_options[short_name] = value
             else:
diff --git a/airflow/providers/microsoft/azure/hooks/wasb.py 
b/airflow/providers/microsoft/azure/hooks/wasb.py
index d129cc4ce5..5d29f6b0b6 100644
--- a/airflow/providers/microsoft/azure/hooks/wasb.py
+++ b/airflow/providers/microsoft/azure/hooks/wasb.py
@@ -27,7 +27,6 @@ from __future__ import annotations
 
 import logging
 import os
-from functools import wraps
 from typing import Any, Union
 
 from asgiref.sync import sync_to_async
@@ -51,36 +50,6 @@ from airflow.hooks.base import BaseHook
 AsyncCredentials = Union[AsyncClientSecretCredential, 
AsyncDefaultAzureCredential]
 
 
-def _ensure_prefixes(conn_type):
-    """
-    Deprecated.
-
-    Remove when provider min airflow version >= 2.5.0 since this is handled by
-    provider manager from that version.
-    """
-
-    def dec(func):
-        @wraps(func)
-        def inner():
-            field_behaviors = func()
-            conn_attrs = {"host", "schema", "login", "password", "port", 
"extra"}
-
-            def _ensure_prefix(field):
-                if field not in conn_attrs and not field.startswith("extra__"):
-                    return f"extra__{conn_type}__{field}"
-                else:
-                    return field
-
-            if "placeholders" in field_behaviors:
-                placeholders = field_behaviors["placeholders"]
-                field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, 
v in placeholders.items()}
-            return field_behaviors
-
-        return inner
-
-    return dec
-
-
 class WasbHook(BaseHook):
     """
     Interacts with Azure Blob Storage through the ``wasb://`` protocol.
@@ -124,7 +93,6 @@ class WasbHook(BaseHook):
         }
 
     @staticmethod
-    @_ensure_prefixes(conn_type="wasb")
     def get_ui_field_behaviour() -> dict[str, Any]:
         """Returns custom field behaviour."""
         return {
diff --git a/airflow/providers/microsoft/azure/utils.py 
b/airflow/providers/microsoft/azure/utils.py
index e4161a1b12..0a8edcf7c7 100644
--- a/airflow/providers/microsoft/azure/utils.py
+++ b/airflow/providers/microsoft/azure/utils.py
@@ -18,37 +18,6 @@
 from __future__ import annotations
 
 import warnings
-from functools import wraps
-
-
-def _ensure_prefixes(conn_type):
-    """
-    Deprecated.
-
-    Remove when provider min airflow version >= 2.5.0 since this is handled by
-    provider manager from that version.
-    """
-
-    def dec(func):
-        @wraps(func)
-        def inner():
-            field_behaviors = func()
-            conn_attrs = {"host", "schema", "login", "password", "port", 
"extra"}
-
-            def _ensure_prefix(field):
-                if field not in conn_attrs and not field.startswith("extra__"):
-                    return f"extra__{conn_type}__{field}"
-                else:
-                    return field
-
-            if "placeholders" in field_behaviors:
-                placeholders = field_behaviors["placeholders"]
-                field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, 
v in placeholders.items()}
-            return field_behaviors
-
-        return inner
-
-    return dec
 
 
 def get_field(*, conn_id: str, conn_type: str, extras: dict, field_name: str):
diff --git 
a/docs/apache-airflow-providers-microsoft-azure/connections/azure_container_volume.rst
 
b/docs/apache-airflow-providers-microsoft-azure/connections/azure_container_volume.rst
index c81be9a88d..e3fe8efe76 100644
--- 
a/docs/apache-airflow-providers-microsoft-azure/connections/azure_container_volume.rst
+++ 
b/docs/apache-airflow-providers-microsoft-azure/connections/azure_container_volume.rst
@@ -34,7 +34,7 @@ There are four ways to connect to Azure Container Volume 
using Airflow.
    i.e. add specific credentials (client_id, secret) and subscription id to 
the Airflow connection.
 2. Use a `Connection String
    
<https://docs.microsoft.com/en-us/azure/data-explorer/kusto/api/connection-strings/storage>`_
-   i.e. add connection string to 
``extra__azure_container_volume__connection_string`` in the Airflow connection.
+   i.e. add connection string to ``connection_string`` in the Airflow 
connection.
 
 Only one authorization method can be used at a time. If you need to manage 
multiple credentials or keys then you should
 configure multiple connections.
@@ -61,7 +61,7 @@ Extra (optional)
     Specify the extra parameters (as json dictionary) that can be used in 
Azure connection.
     The following parameters are all optional:
 
-    * ``extra__azure_container_volume__connection_string``: Connection string 
for use with connection string authentication.
+    * ``connection_string``: Connection string for use with connection string 
authentication.
 
 When specifying the connection in environment variable you should specify
 it using URI syntax.
diff --git 
a/docs/apache-airflow-providers-microsoft-azure/connections/azure_synapse.rst 
b/docs/apache-airflow-providers-microsoft-azure/connections/azure_synapse.rst
index dd8dfd0293..2327b1474c 100644
--- 
a/docs/apache-airflow-providers-microsoft-azure/connections/azure_synapse.rst
+++ 
b/docs/apache-airflow-providers-microsoft-azure/connections/azure_synapse.rst
@@ -55,15 +55,13 @@ Secret
     It can be left out to fall back on ``DefaultAzureCredential``.
 
 Tenant ID
-    Specify the Azure tenant ID used for the initial connection.
+    Specify the ``tenantId`` used for the initial connection.
     This is needed for *token credentials* authentication mechanism.
     It can be left out to fall back on ``DefaultAzureCredential``.
-    Use the key ``extra__azure_synapse__tenantId`` to pass in the tenant ID.
 
 Subscription ID
-    A subscription ID is required for the connection.
+    ``subscriptionId`` is required for the connection.
     This is needed for all authentication mechanisms.
-    Use the key ``extra__azure_synapse__subscriptionId`` to pass in the Azure 
subscription ID.
 
 Synapse Workspace URL
     Specify the Azure Synapse endpoint to interface with.
diff --git a/tests/providers/microsoft/azure/hooks/test_adx.py 
b/tests/providers/microsoft/azure/hooks/test_adx.py
index 22264fe463..dd4787eb06 100644
--- a/tests/providers/microsoft/azure/hooks/test_adx.py
+++ b/tests/providers/microsoft/azure/hooks/test_adx.py
@@ -54,7 +54,7 @@ class TestAzureDataExplorerHook:
         )
         with pytest.raises(AirflowException) as ctx:
             AzureDataExplorerHook(azure_data_explorer_conn_id=ADX_TEST_CONN_ID)
-            assert "is missing: `extra__azure_data_explorer__auth_method`" in 
str(ctx.value)
+            assert "is missing: `data_explorer__auth_method`" in str(ctx.value)
 
     def test_conn_unknown_method(self):
         db.merge_conn(
@@ -64,7 +64,7 @@ class TestAzureDataExplorerHook:
                 login="client_id",
                 password="client secret",
                 host="https://help.kusto.windows.net";,
-                extra=json.dumps({"extra__azure_data_explorer__auth_method": 
"AAD_OTHER"}),
+                extra=json.dumps({"auth_method": "AAD_OTHER"}),
             )
         )
         with pytest.raises(AirflowException) as ctx:
@@ -97,8 +97,8 @@ class TestAzureDataExplorerHook:
                 host="https://help.kusto.windows.net";,
                 extra=json.dumps(
                     {
-                        "extra__azure_data_explorer__tenant": "tenant",
-                        "extra__azure_data_explorer__auth_method": "AAD_CREDS",
+                        "tenant": "tenant",
+                        "auth_method": "AAD_CREDS",
                     }
                 ),
             )
@@ -122,8 +122,8 @@ class TestAzureDataExplorerHook:
                 host="https://help.kusto.windows.net";,
                 extra=json.dumps(
                     {
-                        "extra__azure_data_explorer__tenant": "tenant",
-                        "extra__azure_data_explorer__auth_method": "AAD_APP",
+                        "tenant": "tenant",
+                        "auth_method": "AAD_APP",
                     }
                 ),
             )
@@ -146,10 +146,10 @@ class TestAzureDataExplorerHook:
                 host="https://help.kusto.windows.net";,
                 extra=json.dumps(
                     {
-                        "extra__azure_data_explorer__tenant": "tenant",
-                        "extra__azure_data_explorer__auth_method": 
"AAD_APP_CERT",
-                        "extra__azure_data_explorer__certificate": "PEM",
-                        "extra__azure_data_explorer__thumbprint": "thumbprint",
+                        "tenant": "tenant",
+                        "auth_method": "AAD_APP_CERT",
+                        "certificate": "PEM",
+                        "thumbprint": "thumbprint",
                     }
                 ),
             )
@@ -169,7 +169,7 @@ class TestAzureDataExplorerHook:
                 conn_id=ADX_TEST_CONN_ID,
                 conn_type="azure_data_explorer",
                 host="https://help.kusto.windows.net";,
-                extra=json.dumps({"extra__azure_data_explorer__auth_method": 
"AAD_DEVICE"}),
+                extra=json.dumps({"auth_method": "AAD_DEVICE"}),
             )
         )
         AzureDataExplorerHook(azure_data_explorer_conn_id=ADX_TEST_CONN_ID)
@@ -185,7 +185,7 @@ class TestAzureDataExplorerHook:
                 conn_id=ADX_TEST_CONN_ID,
                 conn_type="azure_data_explorer",
                 host="https://help.kusto.windows.net";,
-                extra=json.dumps({"extra__azure_data_explorer__auth_method": 
"AAD_DEVICE"}),
+                extra=json.dumps({"auth_method": "AAD_DEVICE"}),
             )
         )
         hook = 
AzureDataExplorerHook(azure_data_explorer_conn_id=ADX_TEST_CONN_ID)
@@ -203,10 +203,10 @@ class TestAzureDataExplorerHook:
         assert 
list(AzureDataExplorerHook.get_ui_field_behaviour()["placeholders"].keys()) == [
             "login",
             "password",
-            "extra__azure_data_explorer__auth_method",
-            "extra__azure_data_explorer__tenant",
-            "extra__azure_data_explorer__certificate",
-            "extra__azure_data_explorer__thumbprint",
+            "auth_method",
+            "tenant",
+            "certificate",
+            "thumbprint",
         ]
         if 
get_provider_min_airflow_version("apache-airflow-providers-microsoft-azure") >= 
(2, 5):
             raise Exception(
diff --git a/tests/providers/microsoft/azure/hooks/test_azure_batch.py 
b/tests/providers/microsoft/azure/hooks/test_azure_batch.py
index acd6fc53eb..05a8864f50 100644
--- a/tests/providers/microsoft/azure/hooks/test_azure_batch.py
+++ b/tests/providers/microsoft/azure/hooks/test_azure_batch.py
@@ -50,7 +50,7 @@ class TestAzureBatchHook:
             Connection(
                 conn_id=self.test_vm_conn_id,
                 conn_type="azure_batch",
-                extra=json.dumps({"extra__azure_batch__account_url": 
self.test_account_url}),
+                extra=json.dumps({"account_url": self.test_account_url}),
             )
         )
         # connect with cloud service
@@ -58,7 +58,7 @@ class TestAzureBatchHook:
             Connection(
                 conn_id=self.test_cloud_conn_id,
                 conn_type="azure_batch",
-                extra=json.dumps({"extra__azure_batch__account_url": 
self.test_account_url}),
+                extra=json.dumps({"account_url": self.test_account_url}),
             )
         )
 
diff --git 
a/tests/providers/microsoft/azure/hooks/test_azure_container_volume.py 
b/tests/providers/microsoft/azure/hooks/test_azure_container_volume.py
index 0a60ec9a37..3dcbfd9a89 100644
--- a/tests/providers/microsoft/azure/hooks/test_azure_container_volume.py
+++ b/tests/providers/microsoft/azure/hooks/test_azure_container_volume.py
@@ -50,7 +50,7 @@ class TestAzureContainerVolumeHook:
                 conn_type="wasb",
                 login="login",
                 password="key",
-                
extra=json.dumps({"extra__azure_container_volume__connection_string": 
"a=b;AccountKey=1"}),
+                extra=json.dumps({"connection_string": "a=b;AccountKey=1"}),
             )
         )
         hook = AzureContainerVolumeHook(
@@ -75,7 +75,7 @@ class TestAzureContainerVolumeHook:
         assert 
list(AzureContainerVolumeHook.get_ui_field_behaviour()["placeholders"].keys()) 
== [
             "login",
             "password",
-            "extra__azure_container_volume__connection_string",
+            "connection_string",
         ]
         if 
get_provider_min_airflow_version("apache-airflow-providers-microsoft-azure") >= 
(2, 5):
             raise Exception(
diff --git a/tests/providers/microsoft/azure/hooks/test_azure_cosmos.py 
b/tests/providers/microsoft/azure/hooks/test_azure_cosmos.py
index 63d134b0b9..af649f1d6e 100644
--- a/tests/providers/microsoft/azure/hooks/test_azure_cosmos.py
+++ b/tests/providers/microsoft/azure/hooks/test_azure_cosmos.py
@@ -264,8 +264,8 @@ class TestAzureCosmosDbHook:
         assert 
list(AzureCosmosDBHook.get_ui_field_behaviour()["placeholders"].keys()) == [
             "login",
             "password",
-            "extra__azure_cosmos__database_name",
-            "extra__azure_cosmos__collection_name",
+            "database_name",
+            "collection_name",
         ]
         if 
get_provider_min_airflow_version("apache-airflow-providers-microsoft-azure") >= 
(2, 5):
             raise Exception(
diff --git a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py 
b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py
index 57f7bc6178..4ca2eb4920 100644
--- a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py
+++ b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py
@@ -799,9 +799,7 @@ class TestAzureDataFactoryAsyncHook:
         Test get_pipeline_run function without passing the resource name to 
check the decorator function and
         raise exception
         """
-        mock_connection = Connection(
-            extra=json.dumps({"extra__azure_data_factory__factory_name": 
DATAFACTORY_NAME})
-        )
+        mock_connection = Connection(extra=json.dumps({"factory_name": 
DATAFACTORY_NAME}))
         mock_get_connection.return_value = mock_connection
         mock_conn.return_value.pipeline_runs.get.return_value = 
mock_pipeline_run
         hook = AzureDataFactoryAsyncHook(AZURE_DATA_FACTORY_CONN_ID)
@@ -819,10 +817,10 @@ class TestAzureDataFactoryAsyncHook:
             password="clientSecret",
             extra=json.dumps(
                 {
-                    "extra__azure_data_factory__tenantId": "tenantId",
-                    "extra__azure_data_factory__subscriptionId": 
"subscriptionId",
-                    "extra__azure_data_factory__resource_group_name": 
RESOURCE_GROUP_NAME,
-                    "extra__azure_data_factory__factory_name": 
DATAFACTORY_NAME,
+                    "tenantId": "tenantId",
+                    "subscriptionId": "subscriptionId",
+                    "resource_group_name": RESOURCE_GROUP_NAME,
+                    "factory_name": DATAFACTORY_NAME,
                 }
             ),
         )
@@ -840,10 +838,10 @@ class TestAzureDataFactoryAsyncHook:
             conn_type="azure_data_factory",
             extra=json.dumps(
                 {
-                    "extra__azure_data_factory__tenantId": "tenantId",
-                    "extra__azure_data_factory__subscriptionId": 
"subscriptionId",
-                    "extra__azure_data_factory__resource_group_name": 
RESOURCE_GROUP_NAME,
-                    "extra__azure_data_factory__factory_name": 
DATAFACTORY_NAME,
+                    "tenantId": "tenantId",
+                    "subscriptionId": "subscriptionId",
+                    "resource_group_name": RESOURCE_GROUP_NAME,
+                    "factory_name": DATAFACTORY_NAME,
                 }
             ),
         )
@@ -857,9 +855,9 @@ class TestAzureDataFactoryAsyncHook:
         "mock_connection_params",
         [
             {
-                "extra__azure_data_factory__tenantId": "tenantId",
-                "extra__azure_data_factory__resource_group_name": 
RESOURCE_GROUP_NAME,
-                "extra__azure_data_factory__factory_name": DATAFACTORY_NAME,
+                "tenantId": "tenantId",
+                "resource_group_name": RESOURCE_GROUP_NAME,
+                "factory_name": DATAFACTORY_NAME,
             }
         ],
     )
@@ -883,9 +881,9 @@ class TestAzureDataFactoryAsyncHook:
         "mock_connection_params",
         [
             {
-                "extra__azure_data_factory__subscriptionId": "subscriptionId",
-                "extra__azure_data_factory__resource_group_name": 
RESOURCE_GROUP_NAME,
-                "extra__azure_data_factory__factory_name": DATAFACTORY_NAME,
+                "subscriptionId": "subscriptionId",
+                "resource_group_name": RESOURCE_GROUP_NAME,
+                "factory_name": DATAFACTORY_NAME,
             },
         ],
     )
@@ -911,10 +909,10 @@ class TestAzureDataFactoryAsyncHook:
             conn_type="azure_data_factory",
             extra=json.dumps(
                 {
-                    "extra__azure_data_factory__tenantId": "tenantId",
-                    "extra__azure_data_factory__subscriptionId": 
"subscriptionId",
-                    "extra__azure_data_factory__resource_group_name": 
RESOURCE_GROUP_NAME,
-                    "extra__azure_data_factory__factory_name": 
DATAFACTORY_NAME,
+                    "tenantId": "tenantId",
+                    "subscriptionId": "subscriptionId",
+                    "resource_group_name": RESOURCE_GROUP_NAME,
+                    "factory_name": DATAFACTORY_NAME,
                 }
             ),
         )
diff --git a/tests/providers/microsoft/azure/hooks/test_azure_data_lake.py 
b/tests/providers/microsoft/azure/hooks/test_azure_data_lake.py
index 161b8d9995..122949beac 100644
--- a/tests/providers/microsoft/azure/hooks/test_azure_data_lake.py
+++ b/tests/providers/microsoft/azure/hooks/test_azure_data_lake.py
@@ -25,9 +25,8 @@ import pytest
 from azure.storage.filedatalake._models import FileSystemProperties
 
 from airflow.models import Connection
-from airflow.providers.microsoft.azure.hooks.data_lake import 
AzureDataLakeHook, AzureDataLakeStorageV2Hook
+from airflow.providers.microsoft.azure.hooks.data_lake import 
AzureDataLakeStorageV2Hook
 from airflow.utils import db
-from tests.test_utils.providers import get_provider_min_airflow_version
 
 
 class TestAzureDataLakeHook:
@@ -138,24 +137,6 @@ class TestAzureDataLakeHook:
         hook.remove("filepath", True)
         mock_fs.return_value.remove.assert_called_once_with("filepath", 
recursive=True)
 
-    def test_get_ui_field_behaviour_placeholders(self):
-        """
-        Check that ensure_prefixes decorator working properly
-
-        Note: remove this test and the _ensure_prefixes decorator after min 
airflow version >= 2.5.0
-        """
-        assert 
list(AzureDataLakeHook.get_ui_field_behaviour()["placeholders"].keys()) == [
-            "login",
-            "password",
-            "extra__azure_data_lake__tenant",
-            "extra__azure_data_lake__account_name",
-        ]
-        if 
get_provider_min_airflow_version("apache-airflow-providers-microsoft-azure") >= 
(2, 5):
-            raise Exception(
-                "You must now remove `_ensure_prefixes` from azure utils."
-                " The functionality is now taken care of by providers manager."
-            )
-
 
 class TestAzureDataLakeStorageV2Hook:
     def setup_class(self) -> None:
diff --git a/tests/providers/microsoft/azure/hooks/test_azure_fileshare.py 
b/tests/providers/microsoft/azure/hooks/test_azure_fileshare.py
index dfdc41adaa..1529eadcd9 100644
--- a/tests/providers/microsoft/azure/hooks/test_azure_fileshare.py
+++ b/tests/providers/microsoft/azure/hooks/test_azure_fileshare.py
@@ -37,7 +37,6 @@ from pytest import param
 from airflow.models import Connection
 from airflow.providers.microsoft.azure.hooks.fileshare import 
AzureFileShareHook
 from airflow.utils import db
-from tests.test_utils.providers import get_provider_min_airflow_version, 
object_exists
 
 
 class TestAzureFileshareHook:
@@ -231,35 +230,6 @@ class TestAzureFileshareHook:
         assert status is False
         assert msg == "Test Connection Failure"
 
-    def test__ensure_prefixes_removal(self):
-        """Ensure that _ensure_prefixes is removed from snowflake when airflow 
min version >= 2.5.0."""
-        path = 
"airflow.providers.microsoft.azure.hooks.fileshare._ensure_prefixes"
-        if not object_exists(path):
-            raise Exception(
-                "You must remove this test. It only exists to "
-                "remind us to remove decorator `_ensure_prefixes`."
-            )
-
-        if 
get_provider_min_airflow_version("apache-airflow-providers-microsoft-azure") >= 
(2, 5):
-            raise Exception(
-                "You must now remove `_ensure_prefixes` from 
AzureFileShareHook."
-                " The functionality is now taken care of by providers manager."
-            )
-
-    def test___ensure_prefixes(self):
-        """
-        Check that ensure_prefixes decorator working properly
-
-        Note: remove this test when removing ensure_prefixes (after min 
airflow version >= 2.5.0
-        """
-        assert 
list(AzureFileShareHook.get_ui_field_behaviour()["placeholders"].keys()) == [
-            "login",
-            "password",
-            "extra__azure_fileshare__sas_token",
-            "extra__azure_fileshare__connection_string",
-            "extra__azure_fileshare__protocol",
-        ]
-
     @pytest.mark.parametrize(
         "uri",
         [
diff --git a/tests/providers/microsoft/azure/hooks/test_azure_synapse.py 
b/tests/providers/microsoft/azure/hooks/test_azure_synapse.py
index 0472112977..3a82efd812 100644
--- a/tests/providers/microsoft/azure/hooks/test_azure_synapse.py
+++ b/tests/providers/microsoft/azure/hooks/test_azure_synapse.py
@@ -51,8 +51,8 @@ def setup_module():
         password="clientSecret",
         extra=json.dumps(
             {
-                "extra__azure_synapse__tenantId": "tenantId",
-                "extra__azure_synapse__subscriptionId": "subscriptionId",
+                "tenantId": "tenantId",
+                "subscriptionId": "subscriptionId",
             }
         ),
     )
@@ -62,7 +62,7 @@ def setup_module():
         host="https://testsynapse.dev.azuresynapse.net";,
         extra=json.dumps(
             {
-                "extra__azure_synapse__subscriptionId": "subscriptionId",
+                "subscriptionId": "subscriptionId",
             }
         ),
     )
@@ -74,7 +74,7 @@ def setup_module():
         password="clientSecret",
         extra=json.dumps(
             {
-                "extra__azure_synapse__tenantId": "tenantId",
+                "tenantId": "tenantId",
             }
         ),
     )
@@ -86,7 +86,7 @@ def setup_module():
         password="clientSecret",
         extra=json.dumps(
             {
-                "extra__azure_synapse__subscriptionId": "subscriptionId",
+                "subscriptionId": "subscriptionId",
             }
         ),
     )
diff --git a/tests/providers/microsoft/azure/hooks/test_wasb.py 
b/tests/providers/microsoft/azure/hooks/test_wasb.py
index 52826781bb..6e5d3ebd7b 100644
--- a/tests/providers/microsoft/azure/hooks/test_wasb.py
+++ b/tests/providers/microsoft/azure/hooks/test_wasb.py
@@ -27,7 +27,6 @@ from azure.storage.blob._models import BlobProperties
 from airflow.exceptions import AirflowException
 from airflow.models import Connection
 from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
-from tests.test_utils.providers import get_provider_min_airflow_version, 
object_exists
 
 # connection_string has a format
 CONN_STRING = (
@@ -641,34 +640,3 @@ class TestWasbHook:
         status, msg = hook.test_connection()
         assert status is False
         assert msg == "Authentication failed."
-
-    def test__ensure_prefixes_removal(self):
-        """Ensure that _ensure_prefixes is removed from snowflake when airflow 
min version >= 2.5.0."""
-        path = "airflow.providers.microsoft.azure.hooks.wasb._ensure_prefixes"
-        if not object_exists(path):
-            raise Exception(
-                "You must remove this test. It only exists to "
-                "remind us to remove decorator `_ensure_prefixes`."
-            )
-
-        if 
get_provider_min_airflow_version("apache-airflow-providers-microsoft-azure") >= 
(2, 5):
-            raise Exception(
-                "You must now remove `_ensure_prefixes` from WasbHook.  The 
functionality is now taken"
-                "care of by providers manager."
-            )
-
-    def test___ensure_prefixes(self):
-        """
-        Check that ensure_prefixes decorator working properly
-        Note: remove this test when removing ensure_prefixes (after min 
airflow version >= 2.5.0
-        """
-        assert list(WasbHook.get_ui_field_behaviour()["placeholders"].keys()) 
== [
-            "login",
-            "password",
-            "host",
-            "extra__wasb__connection_string",
-            "extra__wasb__tenant_id",
-            "extra__wasb__shared_access_key",
-            "extra__wasb__sas_token",
-            "extra",
-        ]
diff --git a/tests/providers/microsoft/azure/operators/test_azure_batch.py 
b/tests/providers/microsoft/azure/operators/test_azure_batch.py
index 147687b877..0e3947732b 100644
--- a/tests/providers/microsoft/azure/operators/test_azure_batch.py
+++ b/tests/providers/microsoft/azure/operators/test_azure_batch.py
@@ -64,7 +64,7 @@ class TestAzureBatchOperator:
             Connection(
                 conn_id=self.test_vm_conn_id,
                 conn_type="azure_batch",
-                extra=json.dumps({"extra__azure_batch__account_url": 
self.test_account_url}),
+                extra=json.dumps({"account_url": self.test_account_url}),
             )
         )
         # connect with cloud service
@@ -72,7 +72,7 @@ class TestAzureBatchOperator:
             Connection(
                 conn_id=self.test_cloud_conn_id,
                 conn_type="azure_batch",
-                extra=json.dumps({"extra__azure_batch__account_url": 
self.test_account_url}),
+                extra=json.dumps({"account_url": self.test_account_url}),
             )
         )
         self.operator = AzureBatchOperator(
diff --git a/tests/providers/microsoft/azure/operators/test_azure_synapse.py 
b/tests/providers/microsoft/azure/operators/test_azure_synapse.py
index 24fadb9dea..c43b11ef7b 100644
--- a/tests/providers/microsoft/azure/operators/test_azure_synapse.py
+++ b/tests/providers/microsoft/azure/operators/test_azure_synapse.py
@@ -31,9 +31,9 @@ SUBSCRIPTION_ID = "my-subscription-id"
 TASK_ID = "run_spark_op"
 AZURE_SYNAPSE_CONN_ID = "azure_synapse_test"
 CONN_EXTRAS = {
-    "extra__azure_synapse__subscriptionId": SUBSCRIPTION_ID,
-    "extra__azure_synapse__tenantId": "my-tenant-id",
-    "extra__azure_synapse__spark_pool": "my-spark-pool",
+    "synapse__subscriptionId": SUBSCRIPTION_ID,
+    "synapse__tenantId": "my-tenant-id",
+    "synapse__spark_pool": "my-spark-pool",
 }
 JOB_RUN_RESPONSE = {"id": 123}
 
diff --git a/tests/providers/microsoft/azure/test_utils.py 
b/tests/providers/microsoft/azure/test_utils.py
index 24dc447852..02718f46bb 100644
--- a/tests/providers/microsoft/azure/test_utils.py
+++ b/tests/providers/microsoft/azure/test_utils.py
@@ -20,23 +20,6 @@ from __future__ import annotations
 import pytest
 
 from airflow.providers.microsoft.azure.utils import get_field
-from tests.test_utils.providers import get_provider_min_airflow_version, 
object_exists
-
-
-def test__ensure_prefixes_removal():
-    """Ensure that _ensure_prefixes is removed from snowflake when airflow min 
version >= 2.5.0."""
-    path = "airflow.providers.microsoft.azure.utils._ensure_prefixes"
-    if not object_exists(path):
-        raise Exception(
-            "You must remove this test. It only exists to "
-            "remind us to remove decorator `_ensure_prefixes`."
-        )
-
-    if 
get_provider_min_airflow_version("apache-airflow-providers-microsoft-azure") >= 
(2, 5):
-        raise Exception(
-            "You must now remove `_ensure_prefixes` from azure utils."
-            " The functionality is now taken care of by providers manager."
-        )
 
 
 def test_get_field_warns_on_dupe():


Reply via email to