feluelle commented on a change in pull request #11015:
URL: https://github.com/apache/airflow/pull/11015#discussion_r501609552



##########
File path: airflow/providers/microsoft/azure/hooks/azure_data_factory.py
##########
@@ -0,0 +1,674 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from typing import Any, Optional, Tuple
+
+from azure.mgmt.datafactory import DataFactoryManagementClient
+from azure.mgmt.datafactory.models import (
+    CreateRunResponse,
+    Dataset,
+    DatasetResource,
+    Factory,
+    LinkedService,
+    LinkedServiceResource,
+    PipelineResource,
+    PipelineRun,
+    Trigger,
+    TriggerResource,
+)
+from msrestazure.azure_operation import AzureOperationPoller
+
+from airflow.exceptions import AirflowException
+from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
+
+
+class AzureDataFactoryHook(AzureBaseHook):
+    """
+    A hook to interact with Azure Data Factory.
+
+    :param conn_id: The Azure Data Factory connection id.
+    """
+
+    def __init__(self, conn_id: str = "azure_data_factory_default"):
+        super().__init__(sdk_client=DataFactoryManagementClient, 
conn_id=conn_id)
+
+        self._conn = self.get_conn()

Review comment:
       ```suggestion
   ```
   Please don't establish a connection in during `__init__` because init will 
be called every time the dag file will be parsed - more than it actually needs. 
You only want it to be called when an actual task runs. 

##########
File path: airflow/providers/microsoft/azure/hooks/azure_data_factory.py
##########
@@ -0,0 +1,674 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from typing import Any, Optional, Tuple
+
+from azure.mgmt.datafactory import DataFactoryManagementClient
+from azure.mgmt.datafactory.models import (
+    CreateRunResponse,
+    Dataset,
+    DatasetResource,
+    Factory,
+    LinkedService,
+    LinkedServiceResource,
+    PipelineResource,
+    PipelineRun,
+    Trigger,
+    TriggerResource,
+)
+from msrestazure.azure_operation import AzureOperationPoller
+
+from airflow.exceptions import AirflowException
+from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
+
+
+class AzureDataFactoryHook(AzureBaseHook):
+    """
+    A hook to interact with Azure Data Factory.
+
+    :param conn_id: The Azure Data Factory connection id.
+    """
+
+    def __init__(self, conn_id: str = "azure_data_factory_default"):
+        super().__init__(sdk_client=DataFactoryManagementClient, 
conn_id=conn_id)
+
+        self._conn = self.get_conn()
+
+    def get_conn(self) -> DataFactoryManagementClient:
+        """
+        Return a Data Factory client.
+        """
+
+        conn = self.get_connection(self.conn_id)
+
+        self._resource_group_name = conn.extra_dejson.get("resourceGroup")
+        self._factory_name = conn.extra_dejson.get("factory")
+
+        return super().get_conn()
+
+    def _get_targeted_factory(
+        self, resource_group_name: Optional[str], factory_name: Optional[str]
+    ) -> Tuple[str, str]:

Review comment:
       You could also implement this as an annotation / decorator. 
   
   Take a look at s3 for example:
   - The creation: 
https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/s3.py#L45-L65
   - The call: 
https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/s3.py#L140

##########
File path: airflow/providers/microsoft/azure/hooks/azure_data_factory.py
##########
@@ -0,0 +1,674 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from typing import Any, Optional, Tuple
+
+from azure.mgmt.datafactory import DataFactoryManagementClient
+from azure.mgmt.datafactory.models import (
+    CreateRunResponse,
+    Dataset,
+    DatasetResource,
+    Factory,
+    LinkedService,
+    LinkedServiceResource,
+    PipelineResource,
+    PipelineRun,
+    Trigger,
+    TriggerResource,
+)
+from msrestazure.azure_operation import AzureOperationPoller
+
+from airflow.exceptions import AirflowException
+from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
+
+
+class AzureDataFactoryHook(AzureBaseHook):
+    """
+    A hook to interact with Azure Data Factory.
+
+    :param conn_id: The Azure Data Factory connection id.
+    """
+
+    def __init__(self, conn_id: str = "azure_data_factory_default"):
+        super().__init__(sdk_client=DataFactoryManagementClient, 
conn_id=conn_id)
+
+        self._conn = self.get_conn()
+
+    def get_conn(self) -> DataFactoryManagementClient:
+        """
+        Return a Data Factory client.
+        """
+
+        conn = self.get_connection(self.conn_id)
+
+        self._resource_group_name = conn.extra_dejson.get("resourceGroup")
+        self._factory_name = conn.extra_dejson.get("factory")

Review comment:
       WDYT of putting this into `_get_targeted_factory` ?

##########
File path: airflow/providers/microsoft/azure/hooks/azure_data_factory.py
##########
@@ -0,0 +1,674 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from typing import Any, Optional, Tuple
+
+from azure.mgmt.datafactory import DataFactoryManagementClient
+from azure.mgmt.datafactory.models import (
+    CreateRunResponse,
+    Dataset,
+    DatasetResource,
+    Factory,
+    LinkedService,
+    LinkedServiceResource,
+    PipelineResource,
+    PipelineRun,
+    Trigger,
+    TriggerResource,
+)
+from msrestazure.azure_operation import AzureOperationPoller
+
+from airflow.exceptions import AirflowException
+from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
+
+
+class AzureDataFactoryHook(AzureBaseHook):
+    """
+    A hook to interact with Azure Data Factory.
+
+    :param conn_id: The Azure Data Factory connection id.
+    """
+
+    def __init__(self, conn_id: str = "azure_data_factory_default"):
+        super().__init__(sdk_client=DataFactoryManagementClient, 
conn_id=conn_id)
+
+        self._conn = self.get_conn()

Review comment:
       ```suggestion
   ```
   Please don't establish a connection during `__init__` because init will be 
called every time the dag file will be parsed - more than it actually needs. 
You only want it to be called when an actual task runs. 

##########
File path: airflow/providers/microsoft/azure/hooks/azure_data_factory.py
##########
@@ -0,0 +1,674 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from typing import Any, Optional, Tuple
+
+from azure.mgmt.datafactory import DataFactoryManagementClient
+from azure.mgmt.datafactory.models import (
+    CreateRunResponse,
+    Dataset,
+    DatasetResource,
+    Factory,
+    LinkedService,
+    LinkedServiceResource,
+    PipelineResource,
+    PipelineRun,
+    Trigger,
+    TriggerResource,
+)
+from msrestazure.azure_operation import AzureOperationPoller
+
+from airflow.exceptions import AirflowException
+from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
+
+
+class AzureDataFactoryHook(AzureBaseHook):
+    """
+    A hook to interact with Azure Data Factory.
+
+    :param conn_id: The Azure Data Factory connection id.
+    """
+
+    def __init__(self, conn_id: str = "azure_data_factory_default"):
+        super().__init__(sdk_client=DataFactoryManagementClient, 
conn_id=conn_id)
+
+        self._conn = self.get_conn()

Review comment:
       ```suggestion
   ```
   Please don't establish a connection during `__init__` because init will be 
called every time the dag file will be parsed - more than you actually need a 
connection to it. You only want it to be called when an actual task runs. 

##########
File path: tests/providers/microsoft/azure/hooks/test_azure_data_factory.py
##########
@@ -0,0 +1,440 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import json
+from re import A
+from unittest.mock import patch
+
+import pytest
+from pytest import fixture
+
+from airflow.exceptions import AirflowException
+from airflow.models.connection import Connection
+from airflow.providers.microsoft.azure.hooks.azure_data_factory import 
AzureDataFactoryHook
+from airflow.utils import db
+
+
+DEFAULT_RESOURCE_GROUP = "defaultResourceGroup"
+RESOURCE_GROUP = "testResourceGroup"
+
+DEFAULT_FACTORY = "defaultFactory"
+FACTORY = "testFactory"
+
+MODEL = object()
+
+
+def setup_module(module):
+    connection = Connection(
+        conn_id="azure_data_factory_test",
+        conn_type="azure_data_factory",
+        login="clientId",
+        password="clientSecret",
+        extra=json.dumps(
+            {
+                "tenantId": "tenantId",
+                "subscriptionId": "subscriptionId",
+                "resourceGroup": DEFAULT_RESOURCE_GROUP,
+                "factory": DEFAULT_FACTORY,
+            }
+        ),
+    )
+
+    db.merge_conn(connection)
+
+
+@fixture
+def hook():
+    with 
patch("airflow.providers.microsoft.azure.hooks.base_azure.ServicePrincipalCredentials"):
+        with 
patch("airflow.providers.microsoft.azure.hooks.azure_data_factory.DataFactoryManagementClient"):
+            return AzureDataFactoryHook(conn_id="azure_data_factory_test")
+
+
+def test_hook_initialization(hook: AzureDataFactoryHook):
+    assert hook._resource_group_name == DEFAULT_RESOURCE_GROUP
+    assert hook._factory_name == DEFAULT_FACTORY
+
+
+def test_targeted_factory(hook: AzureDataFactoryHook):
+    expected = (RESOURCE_GROUP, FACTORY)
+
+    result = hook._get_targeted_factory(*expected)
+
+    assert result == expected
+
+
+def test_targeted_factory_defaults(hook: AzureDataFactoryHook):
+    expected = (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY)
+
+    result = hook._get_targeted_factory(None, None)
+
+    assert result == expected

Review comment:
       For tests with different input and/or output parameters and basically 
have the same testing logic 
[parameterized](https://pypi.org/project/parameterized/) tests is a great tool 
:)
   
   See 
[this](https://github.com/apache/airflow/blob/9549274d110f689a0bd709db829a4d69e274eed9/tests/providers/google/cloud/operators/test_cloud_build.py#L48-L67)
 for example.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to