potiuk commented on a change in pull request #4938: [AIRFLOW-4117] 
Multi-staging Image - Travis CI tests [Step 3/3]
URL: https://github.com/apache/airflow/pull/4938#discussion_r272790743
 
 

 ##########
 File path: scripts/ci/in_container/entrypoint_ci.sh
 ##########
 @@ -0,0 +1,233 @@
+#!/usr/bin/env bash
+#
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing,
+#  software distributed under the License is distributed on an
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#  KIND, either express or implied.  See the License for the
+#  specific language governing permissions and limitations
+#  under the License.
+
+# Bash sanity settings (error on exit, complain for undefined vars, error when 
pipe fails)
+set -euo pipefail
+MY_DIR=$(cd "$(dirname "$0")"; pwd)
+
+if [[ ${AIRFLOW_CI_VERBOSE:="false"} == "true" ]]; then
+    set -x
+fi
+
+. ${MY_DIR}/_check_in_container.sh
+
+AIRFLOW_ROOT="${MY_DIR}/../.."
+
+PYTHON_VERSION=${PYTHON_VERSION:=3.6}
+ENV=${ENV:=docker}
+BACKEND=${BACKEND:=sqlite}
+KUBERNETES_VERSION=${KUBERNETES_VERSION:=}
+
+RUN_TESTS=${RUN_TESTS:="true"}
+
+ARGS=$@
+
+if [[ -z "${AIRFLOW_HOME:=}" ]]; then
+    echo
+    echo AIRFLOW_HOME not set !!!!
+    echo
+    exit 1
+fi
+
+if [[ ! -d "${AIRFLOW_HOME}/airflow/www/node_modules" && "${CLEAN_FILES}" == 
"false" ]]; then
+    echo
+    echo "Installing NPM modules as they are not yet installed (Sources 
mounted from Host)"
+    echo
+    pushd "${AIRFLOW_HOME}/airflow/www/"
+    npm ci
+    echo
+    popd
+fi
+if [[ ! -d "${AIRFLOW_HOME}/airflow/www/static/dist" && ${CLEAN_FILES} == 
"false" ]]; then
+    pushd "${AIRFLOW_HOME}/airflow/www/"
+    echo
+    echo "Building production version of javascript files (Sources mounted 
from Host)"
+    echo
+    echo
+    npm run prod
+    echo
+    echo
+    popd
+fi
+
+if [[ -z "${HADOOP_HOME:=}" ]]; then
+    echo
+    echo "HADOOP_HOME not set - abort" >&2
+    echo
+    exit 1
+fi
+
+export HADOOP_HOME
+
+if [[ -z "${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}" ]]; then
+    echo
+    echo "AIRFLOW__CORE__SQL_ALCHEMY_CONN not set - abort" >&2
+    echo
+    exit 2
+fi
+
+echo
+echo "Using ${HADOOP_DISTRO:=} distribution of Hadoop from ${HADOOP_HOME}"
+echo
+
+AIRFLOW_ROOT="$(cd ${MY_DIR}; cd ../../..; pwd)"
+
+export AIRFLOW__CORE__DAGS_FOLDER="${AIRFLOW_ROOT}/tests/dags"
+
+# add test/test_utils to PYTHONPATH (TODO: Do we need it?)
+export PYTHONPATH=${PYTHONPATH:-${AIRFLOW_ROOT}/tests/test_utils}
+
+echo
+echo Backend connection: ${AIRFLOW__CORE__SQL_ALCHEMY_CONN}
+echo
+
+export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}}
+
+echo
+echo Airflow home: ${AIRFLOW_HOME}
+echo
+
+export AIRFLOW__CORE__UNIT_TEST_MODE=True
+export HADOOP_DISTRO
+
+echo "#######################################################################"
+echo
+echo "  You can drop into ipdb debugger by adding this line to your code:"
+echo
+echo "         import ipdb; ipdb.set_trace()"
+echo
+echo "  Then run your tests with 'run-tests <TEST> --nocapture' "
+echo
+echo "#######################################################################"
+
+
+# Fix codecov build path
+# TODO: Check this - this should be made travis-independent
+if [[ ! -h /home/travis/build/apache/airflow ]]; then
+  sudo mkdir -p /home/travis/build/apache
+  sudo ln -s ${AIRFLOW_ROOT} /home/travis/build/apache/airflow
+fi
+
+# Fix file permissions
+if [[ -d $HOME/.minikube ]]; then
+    sudo chown -R airflow.airflow $HOME/.kube $HOME/.minikube
+fi
+
+if [[ ${PYTHON_VERSION} == 3* ]]; then
+    PIP=pip3
+else
+    PIP=pip2
+fi
+
+if [[ "${ENV}" == "docker" ]]; then
+    # Start MiniCluster
+    java -cp "/tmp/minicluster-1.1-SNAPSHOT/*" com.ing.minicluster.MiniCluster 
>/dev/null 2>&1 &
+
+    # Set up ssh keys
+    echo 'yes' | ssh-keygen -t rsa -C [email protected] -P '' -f 
~/.ssh/id_rsa >/dev/null 2>&1
+    cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
+    ln -s -f ~/.ssh/authorized_keys ~/.ssh/authorized_keys2
+    chmod 600 ~/.ssh/*
+
+    # SSH Service
+    sudo service ssh restart >/dev/null 2>&1
+fi
+
+
+# Setting up kerberos
+
+FQDN=`hostname`
+ADMIN="admin"
+PASS="airflow"
+KRB5_KTNAME=/etc/airflow.keytab
+
+echo
+echo "Hosts:"
+echo
+cat /etc/hosts
+echo
+echo "Hostname: ${FQDN}"
+echo
+
+sudo cp ${MY_DIR}/krb5/krb5.conf /etc/krb5.conf
 
 Review comment:
   I will look into that while fixing the last failing kerberos test, but I 
think it is important that the Dockerfile can be run stand-alone without 
mounting anything to it. This makes the Dockerfile self-runnable as well (you 
should be able to just run `docker run airflow/airflow-ci:latest-3.6` and be 
able to enter bash and run sqlite tests immediately - without having to use 
docker-compose etc.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to