Revert "[AIRFLOW-1517] Add minikube for kubernetes integration tests"
This reverts commit 0197931609685a98181387014f7c8f3b5cd5f9a8. Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/a42dbb4f Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/a42dbb4f Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/a42dbb4f Branch: refs/heads/master Commit: a42dbb4f4d95abcbf91b50a9c408db6fa315daed Parents: 7c9e3c1 Author: Daniel Imberman <[email protected]> Authored: Fri Dec 29 13:23:27 2017 -0800 Committer: Daniel Imberman <[email protected]> Committed: Thu Jan 11 15:29:16 2018 -0800 ---------------------------------------------------------------------- .travis.yml | 9 --- airflow/contrib/kubernetes/kube_client.py | 8 +- .../ci/kubernetes/minikube/start_minikube.sh | 58 ++++----------- scripts/ci/run_tests.sh | 2 +- scripts/ci/travis_script.sh | 7 +- tests/contrib/minikube_tests/__init__.py | 13 ---- .../test_kubernetes_pod_operator.py | 78 -------------------- .../operators/test_kubernetes_pod_operator.py | 69 +++++++++++++++++ 8 files changed, 94 insertions(+), 150 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/a42dbb4f/.travis.yml ---------------------------------------------------------------------- diff --git a/.travis.yml b/.travis.yml index dec9181..6b45153 100644 --- a/.travis.yml +++ b/.travis.yml @@ -54,8 +54,6 @@ env: - TOX_ENV=py35-backend_sqlite - TOX_ENV=py35-backend_postgres - TOX_ENV=flake8 - - TOX_ENV=py27-backend_postgres KUBERNETES_VERSION=v1.7.0 - - TOX_ENV=py27-backend_postgres KUBERNETES_VERSION=v1.8.0 matrix: exclude: - python: "3.5" @@ -72,13 +70,6 @@ matrix: env: TOX_ENV=py35-backend_postgres - python: "2.7" env: TOX_ENV=flake8 - - python: "3.5" - env: TOX_ENV=py27-backend_postgres KUBERNETES_VERSION=v1.7.0 - - python: "3.5" - env: TOX_ENV=py27-backend_postgres KUBERNETES_VERSION=v1.8.0 - allow_failures: - - env: TOX_ENV=py27-backend_postgres KUBERNETES_VERSION=v1.7.0 - - env: TOX_ENV=py27-backend_postgres KUBERNETES_VERSION=v1.8.0 cache: directories: - $HOME/.wheelhouse/ http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/a42dbb4f/airflow/contrib/kubernetes/kube_client.py ---------------------------------------------------------------------- diff --git a/airflow/contrib/kubernetes/kube_client.py b/airflow/contrib/kubernetes/kube_client.py index ecb3d55..cd68caf 100644 --- a/airflow/contrib/kubernetes/kube_client.py +++ b/airflow/contrib/kubernetes/kube_client.py @@ -16,15 +16,17 @@ # under the License. -def _load_kube_config(in_cluster): +def load_kube_config(in_cluster=True): from kubernetes import config, client if in_cluster: config.load_incluster_config() - return client.CoreV1Api() else: config.load_kube_config() return client.CoreV1Api() def get_kube_client(in_cluster=True): # TODO: This should also allow people to point to a cluster. - return _load_kube_config(in_cluster) + + from kubernetes import client + load_kube_config(in_cluster) + return client.CoreV1Api() http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/a42dbb4f/scripts/ci/kubernetes/minikube/start_minikube.sh ---------------------------------------------------------------------- diff --git a/scripts/ci/kubernetes/minikube/start_minikube.sh b/scripts/ci/kubernetes/minikube/start_minikube.sh index 1da23d0..f78cb3a 100755 --- a/scripts/ci/kubernetes/minikube/start_minikube.sh +++ b/scripts/ci/kubernetes/minikube/start_minikube.sh @@ -15,8 +15,8 @@ # specific language governing permissions and limitations * # under the License. * -#!/usr/bin/env bash # Guard against a kubernetes cluster already being up +#!/usr/bin/env bash kubectl get pods &> /dev/null if [ $? -eq 0 ]; then echo "kubectl get pods returned 0 exit code, exiting early" @@ -24,8 +24,8 @@ if [ $? -eq 0 ]; then fi # -curl -Lo minikube https://storage.googleapis.com/minikube/releases/v0.24.1/minikube-linux-amd64 && chmod +x minikube -curl -Lo kubectl https://storage.googleapis.com/kubernetes-release/release/${KUBERNETES_VERSION}/bin/linux/amd64/kubectl && chmod +x kubectl +curl -Lo minikube https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64 && chmod +x minikube +curl -Lo kubectl https://storage.googleapis.com/kubernetes-release/release/v1.7.0/bin/linux/amd64/kubectl && chmod +x kubectl sudo mkdir -p /usr/local/bin sudo mv minikube /usr/local/bin/minikube @@ -39,43 +39,15 @@ mkdir $HOME/.kube || true touch $HOME/.kube/config export KUBECONFIG=$HOME/.kube/config - -start_minikube(){ - sudo -E minikube start --vm-driver=none --kubernetes-version="${KUBERNETES_VERSION}" - - # this for loop waits until kubectl can access the api server that minikube has created - for i in {1..90} # timeout 3 minutes - do - echo "------- Running kubectl get pods -------" - STDERR=$(kubectl get pods 2>&1 >/dev/null) - if [ $? -ne 1 ]; then - echo $STDERR - - # We do not need dynamic hostpath provisioning, so disable the default storageclass - sudo -E minikube addons disable default-storageclass && kubectl delete storageclasses --all - - # We need to give permission to watch pods to the airflow scheduler. - # The easiest way to do that is by giving admin access to the default serviceaccount (NOT SAFE!) - kubectl create clusterrolebinding add-on-cluster-admin --clusterrole=cluster-admin --serviceaccount=default:default - exit 0 - fi - echo $STDERR - sleep 2 - done -} - -cleanup_minikube(){ - sudo -E minikube stop - sudo -E minikube delete - docker stop $(docker ps -a -q) || true - docker rm $(docker ps -a -q) || true - sleep 1 -} - -start_minikube -echo "Minikube cluster creation timedout. Attempting to restart the minikube cluster." -cleanup_minikube -start_minikube -echo "Minikube cluster creation timedout a second time. Failing." - -exit 1 +sudo -E minikube start --vm-driver=none + +# this for loop waits until kubectl can access the api server that minikube has created +for i in {1..150} # timeout for 5 minutes +do + echo "------- Running kubectl get pods -------" + kubectl get po &> /dev/null + if [ $? -ne 1 ]; then + break + fi + sleep 2 +done http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/a42dbb4f/scripts/ci/run_tests.sh ---------------------------------------------------------------------- diff --git a/scripts/ci/run_tests.sh b/scripts/ci/run_tests.sh index 8c47ee8..1253686 100755 --- a/scripts/ci/run_tests.sh +++ b/scripts/ci/run_tests.sh @@ -44,5 +44,5 @@ fi if [[ "$SKIP_TESTS" != "true" ]]; then echo Backend: $AIRFLOW__CORE__SQL_ALCHEMY_CONN - ./run_unit_tests.sh $@ + ./run_unit_tests.sh fi http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/a42dbb4f/scripts/ci/travis_script.sh ---------------------------------------------------------------------- diff --git a/scripts/ci/travis_script.sh b/scripts/ci/travis_script.sh index 86c086a..a51e742 100755 --- a/scripts/ci/travis_script.sh +++ b/scripts/ci/travis_script.sh @@ -19,12 +19,13 @@ DIRNAME=$(cd "$(dirname "$0")"; pwd) AIRFLOW_ROOT="$DIRNAME/../.." cd $AIRFLOW_ROOT && pip --version && ls -l $HOME/.wheelhouse && tox --version -if [ -z "$KUBERNETES_VERSION" ]; +if [ -z "$RUN_KUBE_INTEGRATION" ]; then + $DIRNAME/kubernetes/setup_kubernetes.sh tox -e $TOX_ENV else - KUBERNETES_VERSION=${KUBERNETES_VERSION} $DIRNAME/kubernetes/setup_kubernetes.sh && \ - tox -e $TOX_ENV -- tests.contrib.minikube_tests \ + $DIRNAME/kubernetes/setup_kubernetes.sh && \ + tox -e $TOX_ENV -- tests.contrib.executors.integration \ --with-coverage \ --cover-erase \ --cover-html \ http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/a42dbb4f/tests/contrib/minikube_tests/__init__.py ---------------------------------------------------------------------- diff --git a/tests/contrib/minikube_tests/__init__.py b/tests/contrib/minikube_tests/__init__.py deleted file mode 100644 index 9d7677a..0000000 --- a/tests/contrib/minikube_tests/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/a42dbb4f/tests/contrib/minikube_tests/test_kubernetes_pod_operator.py ---------------------------------------------------------------------- diff --git a/tests/contrib/minikube_tests/test_kubernetes_pod_operator.py b/tests/contrib/minikube_tests/test_kubernetes_pod_operator.py deleted file mode 100644 index 18e614b..0000000 --- a/tests/contrib/minikube_tests/test_kubernetes_pod_operator.py +++ /dev/null @@ -1,78 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import unittest -from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator -from airflow import AirflowException -from subprocess import check_call, CalledProcessError - - -try: - check_call(["kubectl", "get", "pods"]) -except CalledProcessError: - raise unittest.SkipTest( - "Kubernetes integration tests require a minikube cluster; Skipping tests" - ) - - -class KubernetesPodOperatorTest(unittest.TestCase): - - def test_working_pod(self): - k = KubernetesPodOperator(namespace='default', - image="ubuntu:16.04", - cmds=["bash", "-cx"], - arguments=["echo", "10"], - labels={"foo": "bar"}, - name="test", - task_id="task" - ) - - k.execute(None) - - def test_faulty_image(self): - bad_image_name = "foobar" - k = KubernetesPodOperator(namespace='default', - image=bad_image_name, - cmds=["bash", "-cx"], - arguments=["echo", "10"], - labels={"foo": "bar"}, - name="test", - task_id="task", - startup_timeout_seconds=5 - ) - with self.assertRaises(AirflowException) as cm: - k.execute(None), - - print("exception: {}".format(cm)) - - def test_pod_failure(self): - """ - Tests that the task fails when a pod reports a failure - """ - - bad_internal_command = "foobar" - k = KubernetesPodOperator(namespace='default', - image="ubuntu:16.04", - cmds=["bash", "-cx"], - arguments=[bad_internal_command, "10"], - labels={"foo": "bar"}, - name="test", - task_id="task" - ) - - with self.assertRaises(AirflowException): - k.execute(None) http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/a42dbb4f/tests/contrib/operators/test_kubernetes_pod_operator.py ---------------------------------------------------------------------- diff --git a/tests/contrib/operators/test_kubernetes_pod_operator.py b/tests/contrib/operators/test_kubernetes_pod_operator.py new file mode 100644 index 0000000..205f183 --- /dev/null +++ b/tests/contrib/operators/test_kubernetes_pod_operator.py @@ -0,0 +1,69 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest +from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator +from airflow import AirflowException + + +class KubernetesPodOperatorTest(unittest.TestCase): + + def test_working_pod(self): + k = KubernetesPodOperator(namespace='default', + image="ubuntu:16.04", + cmds=["bash", "-cx"], + arguments=["echo", "10"], + labels={"foo": "bar"}, + name="test", + task_id="task" + ) + + k.execute(None) + + def test_faulty_image(self): + bad_image_name = "foobar" + k = KubernetesPodOperator(namespace='default', + image=bad_image_name, + cmds=["bash", "-cx"], + arguments=["echo", "10"], + labels={"foo": "bar"}, + name="test", + task_id="task", + startup_timeout_seconds=5 + ) + with self.assertRaises(AirflowException) as cm: + k.execute(None), + + print("exception: {}".format(cm)) + + def test_pod_failure(self): + """ + Tests that the task fails when a pod reports a failure + """ + + bad_internal_command = "foobar" + k = KubernetesPodOperator(namespace='default', + image="ubuntu:16.04", + cmds=["bash", "-cx"], + arguments=[bad_internal_command, "10"], + labels={"foo": "bar"}, + name="test", + task_id="task" + ) + + with self.assertRaises(AirflowException): + k.execute(None)
