gopidesupavan commented on code in PR #62867:
URL: https://github.com/apache/airflow/pull/62867#discussion_r3006231995


##########
providers/common/sql/src/airflow/providers/common/sql/datafusion/engine.py:
##########
@@ -158,6 +158,65 @@ def _fetch_extra_configs(keys: list[str]) -> dict[str, 
Any]:
                 credentials = self._remove_none_values(credentials)
                 extra_config = _fetch_extra_configs(["region", "endpoint"])
 
+            case "google_cloud_platform":
+                try:
+                    # Imported as a feature gate only: verifies the Google 
provider is installed.
+                    from airflow.providers.google.common.hooks.base_google 
import GoogleBaseHook  # noqa: F401
+                except ImportError:
+                    from airflow.providers.common.compat.sdk import 
AirflowOptionalProviderFeatureException
+
+                    raise AirflowOptionalProviderFeatureException(
+                        "Failed to import GoogleBaseHook. To use the GCS 
storage functionality, please install the "
+                        "apache-airflow-providers-google package."
+                    )
+                # Deferred to GCSObjectStorageProvider, 
provide_gcp_credential_file_as_context to handle key_path, keyfile_dict, and 
ADC.
+                extra_config = {}
+
+            case "wasb":
+                try:
+                    # Imported as a feature gate only: verifies the Azure 
provider is installed
+                    from airflow.providers.microsoft.azure.hooks.wasb import 
WasbHook  # noqa: F401
+                except ImportError:
+                    from airflow.providers.common.compat.sdk import 
AirflowOptionalProviderFeatureException
+
+                    raise AirflowOptionalProviderFeatureException(
+                        "Failed to import WasbHook. To use the Azure storage 
functionality, please install the "
+                        "apache-airflow-providers-microsoft-azure package."
+                    )
+                tenant_id = conn.extra_dejson.get("tenant_id")
+                if tenant_id:
+                    # Service Principal auth: conn.host holds the storage 
account (name or full URL);
+                    # conn.login is the client_id (AAD app ID), matching 
WasbHook convention.
+                    # DataFusion requires just the account name, so strip any 
URL components.
+                    from urllib.parse import urlparse

Review Comment:
   you can move this top?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to