kaxil closed pull request #4094: [AIRFLOW-3236] Create 
AzureDataLakeStorageListOperator
URL: https://github.com/apache/incubator-airflow/pull/4094
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/airflow/contrib/operators/adls_list_operator.py 
b/airflow/contrib/operators/adls_list_operator.py
new file mode 100644
index 0000000000..7d03e86b17
--- /dev/null
+++ b/airflow/contrib/operators/adls_list_operator.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook
+from airflow.models import BaseOperator
+from airflow.utils.decorators import apply_defaults
+
+
+class AzureDataLakeStorageListOperator(BaseOperator):
+    """
+    List all files from the specified path
+
+    This operator returns a python list with the names of files which can be 
used by
+     `xcom` in the downstream tasks.
+
+    :param path: The Azure Data Lake path to find the objects. Supports glob
+        strings (templated)
+    :type path: str
+    :param azure_data_lake_conn_id: The connection ID to use when
+        connecting to Azure Data Lake Storage.
+    :type azure_data_lake_conn_id: str
+
+    **Example**:
+        The following Operator would list all the Parquet files from 
``folder/output/``
+        folder in the specified ADLS account ::
+
+            adls_files = AzureDataLakeStorageListOperator(
+                task_id='adls_files',
+                path='folder/output/*.parquet',
+                azure_data_lake_conn_id='azure_data_lake_default'
+            )
+    """
+    template_fields = ('path',)
+    ui_color = '#901dd2'
+
+    @apply_defaults
+    def __init__(self,
+                 path,
+                 azure_data_lake_conn_id='azure_data_lake_default',
+                 *args,
+                 **kwargs):
+        super(AzureDataLakeStorageListOperator, self).__init__(*args, **kwargs)
+        self.path = path
+        self.azure_data_lake_conn_id = azure_data_lake_conn_id
+
+    def execute(self, context):
+
+        hook = AzureDataLakeHook(
+            azure_data_lake_conn_id=self.azure_data_lake_conn_id
+        )
+
+        self.log.info('Getting list of ADLS files in path: %s', self.path)
+
+        return hook.list(path=self.path)
diff --git a/docs/code.rst b/docs/code.rst
index 211e1abafe..5b4a494911 100644
--- a/docs/code.rst
+++ b/docs/code.rst
@@ -112,6 +112,7 @@ Operators
 ^^^^^^^^^
 .. Alphabetize this list
 
+.. autoclass:: 
airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator
 .. autoclass:: airflow.contrib.operators.awsbatch_operator.AWSBatchOperator
 .. autoclass:: 
airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator
 .. autoclass:: 
airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator
diff --git a/docs/integration.rst b/docs/integration.rst
index 67298b15b6..d12b94b8b6 100644
--- a/docs/integration.rst
+++ b/docs/integration.rst
@@ -145,7 +145,7 @@ Airflow can be configured to read and write task logs in 
Azure Blob Storage.
 See :ref:`write-logs-azure`.
 
 Azure Data Lake
-''''''''''''''''''
+'''''''''''''''
 
 AzureDataLakeHook communicates via a REST API compatible with WebHDFS. Make 
sure that a
 Airflow connection of type `azure_data_lake` exists. Authorization can be done 
by supplying a
@@ -153,14 +153,22 @@ login (=Client ID), password (=Client Secret) and extra 
fields tenant (Tenant) a
  (see connection `azure_data_lake_default` for an example).
 
 - :ref:`AzureDataLakeHook`: Interface with Azure Data Lake.
+- :ref:`AzureDataLakeStorageListOperator`: Lists the files located in a 
specified Azure Data Lake path.
 
 .. _AzureDataLakeHook:
 
 AzureDataLakeHook
-"""""""""
+"""""""""""""""""
 
 .. autoclass:: airflow.contrib.hooks.azure_data_lake_hook.AzureDataLakeHook
 
+.. _AzureDataLakeStorageListOperator:
+
+AzureDataLakeStorageListOperator
+""""""""""""""""""""""""""""""""
+
+.. autoclass:: 
airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator
+
 .. _AWS:
 
 AWS: Amazon Web Services
@@ -170,7 +178,7 @@ Airflow has extensive support for Amazon Web Services. But 
note that the Hooks,
 Operators are in the contrib section.
 
 AWS EMR
-''''''''
+'''''''
 
 - :ref:`EmrAddStepsOperator` : Adds steps to an existing EMR JobFlow.
 - :ref:`EmrCreateJobFlowOperator` : Creates an EMR JobFlow, reading the config 
from the EMR connection.
diff --git a/tests/contrib/operators/test_adls_list_operator.py 
b/tests/contrib/operators/test_adls_list_operator.py
new file mode 100644
index 0000000000..52decd0cd2
--- /dev/null
+++ b/tests/contrib/operators/test_adls_list_operator.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import unittest
+
+from airflow.contrib.operators.adls_list_operator import 
AzureDataLakeStorageListOperator
+
+try:
+    from unittest import mock
+except ImportError:
+    try:
+        import mock
+    except ImportError:
+        mock = None
+
+TASK_ID = 'test-adls-list-operator'
+TEST_PATH = 'test/*'
+MOCK_FILES = ["test/TEST1.csv", "test/TEST2.csv", "test/path/TEST3.csv",
+              "test/path/PARQUET.parquet", "test/path/PIC.png"]
+
+
+class AzureDataLakeStorageListOperatorTest(unittest.TestCase):
+
+    
@mock.patch('airflow.contrib.operators.adls_list_operator.AzureDataLakeHook')
+    def test_execute(self, mock_hook):
+        mock_hook.return_value.list.return_value = MOCK_FILES
+
+        operator = AzureDataLakeStorageListOperator(task_id=TASK_ID,
+                                                    path=TEST_PATH)
+
+        files = operator.execute(None)
+        mock_hook.return_value.list.assert_called_once_with(
+            path=TEST_PATH
+        )
+        self.assertEqual(sorted(files), sorted(MOCK_FILES))


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to