[GitHub] apraovjr closed pull request #4288: [AIRFLOW-3282] Implement an Azure Kubernetes Service Operator

2018-12-06 Thread GitBox
apraovjr closed pull request #4288: [AIRFLOW-3282] Implement an Azure 
Kubernetes Service Operator 
URL: https://github.com/apache/incubator-airflow/pull/4288
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/airflow/contrib/example_dags/example_azure_kubernetes_container_operator.py 
b/airflow/contrib/example_dags/example_azure_kubernetes_container_operator.py
new file mode 100644
index 00..79fa5c5c16
--- /dev/null
+++ 
b/airflow/contrib/example_dags/example_azure_kubernetes_container_operator.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from airflow import DAG
+from airflow.contrib.operators.aks_operator import AzureKubernetesOperator
+from datetime import datetime, timedelta
+
+seven_days_ago = datetime.combine(datetime.today() - timedelta(7),
+  datetime.min.time())
+default_args = {
+'owner': 'airflow',
+'depends_on_past': False,
+'start_date': seven_days_ago,
+'email': ['em...@microsoft.com'],
+'email_on_failure': False,
+'email_on_retry': False,
+'retries': 1,
+'retry_delay': timedelta(minutes=5),
+}
+
+dag = DAG(
+dag_id='aks_container',
+default_args=default_args,
+schedule_interval=None,
+)
+
+start_aks_container = AzureKubernetesOperator(
+task_id="start_aks_container",
+ci_conn_id='azure_kubernetes_default',
+resource_group="apraotest1",
+name="akres1",
+ssh_key_value=None,
+dns_name_prefix=None,
+location="eastus",
+tags=None,
+dag=dag)
diff --git a/airflow/contrib/hooks/azure_kubernetes_hook.py 
b/airflow/contrib/hooks/azure_kubernetes_hook.py
new file mode 100644
index 00..dfd9200f64
--- /dev/null
+++ b/airflow/contrib/hooks/azure_kubernetes_hook.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+
+from airflow.hooks.base_hook import BaseHook
+from airflow.exceptions import AirflowException
+
+from azure.common.credentials import ServicePrincipalCredentials
+from azure.mgmt.containerservice import ContainerServiceClient
+from azure.mgmt.resource import ResourceManagementClient
+from airflow.contrib.utils.aks_utils import load_json
+
+
+class AzureKubernetesServiceHook(BaseHook):
+
+def __init__(self, conn_id=None):
+self.conn_id = conn_id
+self.connection = self.get_conn()
+self.configData = None
+self.credentials = None
+self.subscription_id = None
+self.clientId = None
+self.clientSecret = None
+
+def get_conn(self):
+if self.conn_id:
+conn = self.get_connection(self.conn_id)
+key_path = conn.extra_dejson.get('key_path', False)
+if key_path:
+if key_path.endswith('.json'):
+self.log.info('Getting connection using a JSON key file.')
+
+self.configData = load_json(self, key_path)
+else:
+raise AirflowException('Unrecognised extension for key 
file.')
+
+if os.environ.get('AZURE_AUTH_LOCATION'):
+key_path = os.environ.get('AZURE_AUTH_LOCATION')
+if key_path.endswith('.json'):
+self.log.info('Getting connection using a JSON key file.')
+self.configData = load_json(self, key_path)
+else:
+raise AirflowException('Unrecognised extension for key file.')
+
+self.credentials = 

[GitHub] apraovjr closed pull request #4288: [AIRFLOW-3282] Implement an Azure Kubernetes Service Operator

2018-12-06 Thread GitBox
apraovjr closed pull request #4288: [AIRFLOW-3282] Implement an Azure 
Kubernetes Service Operator 
URL: https://github.com/apache/incubator-airflow/pull/4288
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/airflow/contrib/example_dags/example_azure_kubernetes_container_operator.py 
b/airflow/contrib/example_dags/example_azure_kubernetes_container_operator.py
new file mode 100644
index 00..79fa5c5c16
--- /dev/null
+++ 
b/airflow/contrib/example_dags/example_azure_kubernetes_container_operator.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from airflow import DAG
+from airflow.contrib.operators.aks_operator import AzureKubernetesOperator
+from datetime import datetime, timedelta
+
+seven_days_ago = datetime.combine(datetime.today() - timedelta(7),
+  datetime.min.time())
+default_args = {
+'owner': 'airflow',
+'depends_on_past': False,
+'start_date': seven_days_ago,
+'email': ['em...@microsoft.com'],
+'email_on_failure': False,
+'email_on_retry': False,
+'retries': 1,
+'retry_delay': timedelta(minutes=5),
+}
+
+dag = DAG(
+dag_id='aks_container',
+default_args=default_args,
+schedule_interval=None,
+)
+
+start_aks_container = AzureKubernetesOperator(
+task_id="start_aks_container",
+ci_conn_id='azure_kubernetes_default',
+resource_group="apraotest1",
+name="akres1",
+ssh_key_value=None,
+dns_name_prefix=None,
+location="eastus",
+tags=None,
+dag=dag)
diff --git a/airflow/contrib/hooks/azure_kubernetes_hook.py 
b/airflow/contrib/hooks/azure_kubernetes_hook.py
new file mode 100644
index 00..dfd9200f64
--- /dev/null
+++ b/airflow/contrib/hooks/azure_kubernetes_hook.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+
+from airflow.hooks.base_hook import BaseHook
+from airflow.exceptions import AirflowException
+
+from azure.common.credentials import ServicePrincipalCredentials
+from azure.mgmt.containerservice import ContainerServiceClient
+from azure.mgmt.resource import ResourceManagementClient
+from airflow.contrib.utils.aks_utils import load_json
+
+
+class AzureKubernetesServiceHook(BaseHook):
+
+def __init__(self, conn_id=None):
+self.conn_id = conn_id
+self.connection = self.get_conn()
+self.configData = None
+self.credentials = None
+self.subscription_id = None
+self.clientId = None
+self.clientSecret = None
+
+def get_conn(self):
+if self.conn_id:
+conn = self.get_connection(self.conn_id)
+key_path = conn.extra_dejson.get('key_path', False)
+if key_path:
+if key_path.endswith('.json'):
+self.log.info('Getting connection using a JSON key file.')
+
+self.configData = load_json(self, key_path)
+else:
+raise AirflowException('Unrecognised extension for key 
file.')
+
+if os.environ.get('AZURE_AUTH_LOCATION'):
+key_path = os.environ.get('AZURE_AUTH_LOCATION')
+if key_path.endswith('.json'):
+self.log.info('Getting connection using a JSON key file.')
+self.configData = load_json(self, key_path)
+else:
+raise AirflowException('Unrecognised extension for key file.')
+
+self.credentials =