diff --git a/.travis.yml b/.travis.yml index 5bd750453a563..589fefff4519d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,6 +19,8 @@ sudo: true dist: trusty language: python +services: + - postgresql env: global: - DOCKER_COMPOSE_VERSION=1.20.0 @@ -26,14 +28,14 @@ env: - TRAVIS_CACHE=$HOME/.travis_cache/ matrix: - TOX_ENV=flake8 - - TOX_ENV=py27-backend_mysql - - TOX_ENV=py27-backend_sqlite - - TOX_ENV=py27-backend_postgres - - TOX_ENV=py35-backend_mysql PYTHON_VERSION=3 - - TOX_ENV=py35-backend_sqlite PYTHON_VERSION=3 - - TOX_ENV=py35-backend_postgres PYTHON_VERSION=3 - - TOX_ENV=py27-backend_postgres KUBERNETES_VERSION=v1.9.0 - - TOX_ENV=py35-backend_postgres KUBERNETES_VERSION=v1.10.0 PYTHON_VERSION=3 + - TOX_ENV=py27-backend_mysql-env_docker + - TOX_ENV=py27-backend_sqlite-env_docker + - TOX_ENV=py27-backend_postgres-env_docker + - TOX_ENV=py35-backend_mysql-env_docker PYTHON_VERSION=3 + - TOX_ENV=py35-backend_sqlite-env_docker PYTHON_VERSION=3 + - TOX_ENV=py35-backend_postgres-env_docker PYTHON_VERSION=3 + - TOX_ENV=py27-backend_postgres_k8s-env_kubernetes KUBERNETES_VERSION=v1.9.0 + - TOX_ENV=py35-backend_postgres_k8s-env_kubernetes KUBERNETES_VERSION=v1.10.0 PYTHON_VERSION=3 cache: directories: - $HOME/.wheelhouse/ @@ -43,12 +45,19 @@ before_install: - sudo ls -lh $HOME/.cache/pip/ - sudo rm -rf $HOME/.cache/pip/* $HOME/.wheelhouse/* - sudo chown -R travis:travis $HOME/.cache/pip + - ./scripts/ci/docker-registry.sh install: # Use recent docker-compose version - sudo rm /usr/local/bin/docker-compose - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose - chmod +x docker-compose - sudo mv docker-compose /usr/local/bin + - psql -c 'create database airflow;' -U postgres - pip install --upgrade pip + - pip install tox + - pip install codecov script: - - docker-compose --log-level ERROR -f scripts/ci/docker-compose.yml run airflow-testing /app/scripts/ci/run-ci.sh + - ./scripts/ci/run-ci.sh +after_success: + - sudo chown -R travis.travis . + - codecov diff --git a/scripts/ci/daemon.json b/scripts/ci/daemon.json new file mode 100644 index 0000000000000..ae8b9e46d6d68 --- /dev/null +++ b/scripts/ci/daemon.json @@ -0,0 +1,3 @@ +{ + "insecure-registries" : ["10.192.0.1:5000"] +} diff --git a/scripts/ci/docker-registry.sh b/scripts/ci/docker-registry.sh new file mode 100755 index 0000000000000..a77db43c912a0 --- /dev/null +++ b/scripts/ci/docker-registry.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +_UNAME_OUT=$(uname -s) +case "${_UNAME_OUT}" in + Linux*) _MY_OS=linux;; + Darwin*) _MY_OS=darwin;; + *) echo "${_UNAME_OUT} is unsupported." + exit 1;; +esac + +if [ "$_MY_OS" = "linux" ]; then + export _REGISTRY_IP=10.192.0.1 +else + export _REGISTRY_IP=`ipconfig getifaddr en0` +fi +echo "Deploying insecure registry" +if [ "$_MY_OS" = "linux" ]; then + DIRNAME=$(cd "$(dirname "$0")"; pwd) +# sudo rm /etc/docker/daemon.json +# sudo cp $DIRNAME/daemon.json /etc/docker/ + sudo sed -i "s/\DOCKER_OPTS=\"/DOCKER_OPTS=\"--insecure-registry=$_REGISTRY_IP:5000 /g" /etc/default/docker + sudo cat /etc/default/docker + sudo service docker restart +fi + + + diff --git a/scripts/ci/kubernetes/docker/build.sh b/scripts/ci/kubernetes/docker/build.sh index b93c6b1715ddb..3c28978b757a8 100755 --- a/scripts/ci/kubernetes/docker/build.sh +++ b/scripts/ci/kubernetes/docker/build.sh @@ -22,11 +22,24 @@ TAG=${2:-latest} DIRNAME=$(cd "$(dirname "$0")"; pwd) AIRFLOW_ROOT="$DIRNAME/../../../.." -ENVCONFIG=$(minikube docker-env) -if [ $? -eq 0 ]; then - eval $ENVCONFIG -fi +#ENVCONFIG=$(minikube docker-env) +#if [ $? -eq 0 ]; then +# eval $ENVCONFIG +#fi +_UNAME_OUT=$(uname -s) +case "${_UNAME_OUT}" in + Linux*) _MY_OS=linux;; + Darwin*) _MY_OS=darwin;; + *) echo "${_UNAME_OUT} is not unsupported." + exit 1;; +esac +echo "Local OS is ${_MY_OS}" +if [ "$_MY_OS" = "linux" ]; then + export _REGISTRY_IP=10.192.0.1 +else + export _REGISTRY_IP=`ipconfig getifaddr en0` +fi echo "Airflow directory $AIRFLOW_ROOT" echo "Airflow Docker directory $DIRNAME" @@ -34,5 +47,7 @@ cd $AIRFLOW_ROOT python setup.py sdist -q echo "Copy distro $AIRFLOW_ROOT/dist/*.tar.gz ${DIRNAME}/airflow.tar.gz" cp $AIRFLOW_ROOT/dist/*.tar.gz ${DIRNAME}/airflow.tar.gz -cd $DIRNAME && docker build --pull $DIRNAME --tag=${IMAGE}:${TAG} +cd $DIRNAME && docker build --pull $DIRNAME --tag=${_REGISTRY_IP}:5000/${IMAGE}:${TAG} +docker push ${_REGISTRY_IP}:5000/${IMAGE}:${TAG} +docker exec kube-node-1 docker pull ${_REGISTRY_IP}:5000/${IMAGE}:${TAG} rm $DIRNAME/airflow.tar.gz diff --git a/scripts/ci/kubernetes/kube/airflow.yaml b/scripts/ci/kubernetes/kube/airflow.yaml.template similarity index 97% rename from scripts/ci/kubernetes/kube/airflow.yaml rename to scripts/ci/kubernetes/kube/airflow.yaml.template index 4f451ba44a687..97969601b94f0 100644 --- a/scripts/ci/kubernetes/kube/airflow.yaml +++ b/scripts/ci/kubernetes/kube/airflow.yaml.template @@ -43,7 +43,7 @@ spec: spec: initContainers: - name: "init" - image: airflow + image: {REG_IP}:5000/airflow imagePullPolicy: IfNotPresent volumeMounts: - name: airflow-configmap @@ -65,8 +65,8 @@ spec: - "-cx" - "./tmp/airflow-test-env-init.sh" containers: - - name: webserver - image: airflow + - name: webserv + image: {REG_IP}:5000/airflow imagePullPolicy: IfNotPresent ports: - name: webserver @@ -105,7 +105,7 @@ spec: # path: /login # port: 8080 - name: scheduler - image: airflow + image: {REG_IP}:5000/airflow imagePullPolicy: IfNotPresent args: ["scheduler"] env: diff --git a/scripts/ci/kubernetes/kube/deploy.sh b/scripts/ci/kubernetes/kube/deploy.sh index a9a42a7a12d12..ca2ad2607c461 100755 --- a/scripts/ci/kubernetes/kube/deploy.sh +++ b/scripts/ci/kubernetes/kube/deploy.sh @@ -16,10 +16,27 @@ # KIND, either express or implied. See the License for the * # specific language governing permissions and limitations * # under the License. * +_UNAME_OUT=$(uname -s) +case "${_UNAME_OUT}" in + Linux*) _MY_OS=linux;; + Darwin*) _MY_OS=darwin;; + *) echo "${_UNAME_OUT} is not unsupported." + exit 1;; +esac +echo "Local OS is ${_MY_OS}" + +if [ "$_MY_OS" = "linux" ]; then + export _REGISTRY_IP=10.192.0.1 +else + export _REGISTRY_IP=`ipconfig getifaddr en0` +fi IMAGE=${1:-airflow/ci} TAG=${2:-latest} DIRNAME=$(cd "$(dirname "$0")"; pwd) +rm $DIRNAME/airflow.yaml +sed -e s/{REG_IP}/$_REGISTRY_IP/g $DIRNAME/airflow.yaml.template > $DIRNAME/airflow.yaml +cat $DIRNAME/airflow.yaml kubectl delete -f $DIRNAME/postgres.yaml kubectl delete -f $DIRNAME/airflow.yaml diff --git a/scripts/ci/kubernetes/kube/volumes.yaml b/scripts/ci/kubernetes/kube/volumes.yaml index b5488e7c7a711..2e08278953dac 100644 --- a/scripts/ci/kubernetes/kube/volumes.yaml +++ b/scripts/ci/kubernetes/kube/volumes.yaml @@ -35,7 +35,7 @@ metadata: name: airflow-dags spec: accessModes: - - ReadWriteMany + - ReadWriteOnce resources: requests: storage: 2Gi @@ -81,7 +81,7 @@ metadata: name: test-volume spec: accessModes: - - ReadWriteMany + - ReadWriteOnce resources: requests: storage: 2Gi diff --git a/scripts/ci/kubernetes/minikube/start_kubeadm.sh b/scripts/ci/kubernetes/minikube/start_kubeadm.sh new file mode 100755 index 0000000000000..ee160cfb455a7 --- /dev/null +++ b/scripts/ci/kubernetes/minikube/start_kubeadm.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +_MY_SCRIPT="${BASH_SOURCE[0]}" +_MY_DIR=$(cd "$(dirname "$_MY_SCRIPT")" && pwd) +_UNAME_OUT=$(uname -s) +case "${_UNAME_OUT}" in + Linux*) _MY_OS=linux;; + Darwin*) _MY_OS=darwin;; + *) echo "${_UNAME_OUT} is not unsupported." + exit 1;; +esac +echo "Local OS is ${_MY_OS}" +_KUBERNETES_VERSION="${KUBERNETES_VERSION}" +if [ "$_MY_OS" = "linux" ]; then + export _REGISTRY_IP="10.192.0.1" +else + export _REGISTRY_IP=`ipconfig getifaddr en0` +fi + +cd $_MY_DIR +rm -rf tmp +mkdir -p bin tmp + +rm $DIRNAME/dind-cluster-* +wget https://cdn.rawgit.com/kubernetes-sigs/kubeadm-dind-cluster/master/fixed/dind-cluster-v1.10.sh + +chmod +x $_MY_DIR/dind-cluster-v1.10.sh +$_MY_DIR/dind-cluster-v1.10.sh clean +echo "registries are "[\"${_REGISTRY_IP}:5000\"]"" +DIND_INSECURE_REGISTRIES="[\"${_REGISTRY_IP}:5000\"]" DIND_SUBNET="10.192.0.0" DIND_SUBNET_SIZE=16 $_MY_DIR/dind-cluster-v1.10.sh up +export PATH="$HOME/.kubeadm-dind-cluster:$PATH" + +if [[ ! -x /usr/local/bin/kubectl ]]; then + echo Downloading kubectl, which is a requirement for using minikube. + curl -Lo bin/kubectl \ + https://storage.googleapis.com/kubernetes-release/release/${_KUBERNETES_VERSION}/bin/${_MY_OS}/amd64/kubectl + chmod +x bin/kubectl + sudo mv bin/kubectl /usr/local/bin/kubectl +fi + +REG=`docker ps -f name=registry -q` + +if [ -n "$REG" ]; then + docker stop $REG; docker rm $REG +fi + +if [ "$_MY_OS" = "linux" ]; then + docker run -d -p :5000:5000 --restart=always --name registry registry:2 +else + docker run -d -p 5000:5000 --restart=always --name registry registry:2 +fi diff --git a/scripts/ci/kubernetes/setup_kubernetes.sh b/scripts/ci/kubernetes/setup_kubernetes.sh index ea559a02aa997..b38a0c1db1815 100755 --- a/scripts/ci/kubernetes/setup_kubernetes.sh +++ b/scripts/ci/kubernetes/setup_kubernetes.sh @@ -22,8 +22,7 @@ echo "This script downloads minikube, starts a driver=None minikube cluster, bui echo "For development, start minikube yourself (ie: minikube start) then run this script as you probably do not want a driver=None minikube cluster" DIRNAME=$(cd "$(dirname "$0")"; pwd) - -$DIRNAME/minikube/start_minikube.sh +$DIRNAME/minikube/start_kubeadm.sh $DIRNAME/docker/build.sh $DIRNAME/kube/deploy.sh diff --git a/scripts/ci/run-ci-docker.sh b/scripts/ci/run-ci-docker.sh new file mode 100755 index 0000000000000..05a2357a8f10b --- /dev/null +++ b/scripts/ci/run-ci-docker.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -x -e + +DIRNAME=$(cd "$(dirname "$0")"; pwd) +AIRFLOW_ROOT="$DIRNAME/../.." + +# Fix file permissions +sudo chown -R airflow.airflow . $HOME/.wheelhouse/ $HOME/.cache/pip + +if [[ $PYTHON_VERSION == '3' ]]; then + PIP=pip3 +else + PIP=pip +fi + +sudo $PIP install --upgrade pip +sudo $PIP install tox + +cd $AIRFLOW_ROOT && $PIP --version && tox --version + +tox -e $TOX_ENV diff --git a/scripts/ci/run-ci-kubernetes.sh b/scripts/ci/run-ci-kubernetes.sh new file mode 100755 index 0000000000000..655c0f4420ce9 --- /dev/null +++ b/scripts/ci/run-ci-kubernetes.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -x -e + +DIRNAME=$(cd "$(dirname "$0")"; pwd) +AIRFLOW_ROOT="$DIRNAME/../.." + +# Fix file permissions +sudo chown -R travis.travis . $HOME/.wheelhouse/ $HOME/.cache/pip +sudo apt-get install python3-pip + +if [[ $PYTHON_VERSION == '3' ]]; then + PIP=pip3 +else + PIP=pip +fi + +sudo $PIP install --upgrade pip +sudo $PIP install tox + +KUBERNETES_VERSION=${KUBERNETES_VERSION} $DIRNAME/kubernetes/setup_kubernetes.sh && \ +tox -e $TOX_ENV -- tests.contrib.minikube \ + --with-coverage \ + --cover-erase \ + --cover-html \ + --cover-package=airflow \ + --cover-html-dir=airflow/www/static/coverage \ + --with-ignore-docstrings \ + --rednose \ + --with-timer \ + -v \ + --logging-level=DEBUG diff --git a/scripts/ci/run-ci.sh b/scripts/ci/run-ci.sh index f2815bbd95979..c7cb0e42e6202 100755 --- a/scripts/ci/run-ci.sh +++ b/scripts/ci/run-ci.sh @@ -19,38 +19,11 @@ # under the License. set -x - DIRNAME=$(cd "$(dirname "$0")"; pwd) -AIRFLOW_ROOT="$DIRNAME/../.." - -# Fix file permissions -sudo chown -R airflow.airflow . $HOME/.wheelhouse/ $HOME/.cache/pip - -if [[ $PYTHON_VERSION == '3' ]]; then - PIP=pip3 -else - PIP=pip -fi - -sudo $PIP install --upgrade pip -sudo $PIP install tox - -cd $AIRFLOW_ROOT && $PIP --version && tox --version if [ -z "$KUBERNETES_VERSION" ]; then - tox -e $TOX_ENV + docker-compose --log-level ERROR -f scripts/ci/docker-compose.yml run airflow-testing /app/scripts/ci/run-ci-docker.sh else - KUBERNETES_VERSION=${KUBERNETES_VERSION} $DIRNAME/kubernetes/setup_kubernetes.sh && \ - tox -e $TOX_ENV -- tests.contrib.minikube \ - --with-coverage \ - --cover-erase \ - --cover-html \ - --cover-package=airflow \ - --cover-html-dir=airflow/www/static/coverage \ - --with-ignore-docstrings \ - --rednose \ - --with-timer \ - -v \ - --logging-level=DEBUG + $DIRNAME/run-ci-kubernetes.sh fi diff --git a/tests/contrib/minikube/test_kubernetes_executor.py b/tests/contrib/minikube/test_kubernetes_executor.py index 45d4124d07973..375cf62ff7566 100644 --- a/tests/contrib/minikube/test_kubernetes_executor.py +++ b/tests/contrib/minikube/test_kubernetes_executor.py @@ -34,7 +34,11 @@ def get_minikube_host(): - host_ip = check_output(['minikube', 'ip']) + host_ip = check_output(['docker', + 'inspect', + '-f', + '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}', + 'kube-node-1']) if six.PY3: host_ip = host_ip.decode('UTF-8') host = '{}:30809'.format(host_ip.strip()) @@ -64,10 +68,10 @@ def monitor_task(self, host, execution_date, dag_id, task_id, expected_final_sta result = requests.get( 'http://{host}/api/experimental/dags/{dag_id}/' 'dag_runs/{execution_date}/tasks/{task_id}' - .format(host=host, - dag_id=dag_id, - execution_date=execution_date, - task_id=task_id) + .format(host=host, + dag_id=dag_id, + execution_date=execution_date, + task_id=task_id) ) self.assertEqual(result.status_code, 200, "Could not get the status") result_json = result.json() @@ -99,9 +103,9 @@ def ensure_dag_expected_state(self, host, execution_date, dag_id, result = requests.get( 'http://{host}/api/experimental/dags/{dag_id}/' 'dag_runs/{execution_date}' - .format(host=host, - dag_id=dag_id, - execution_date=execution_date) + .format(host=host, + dag_id=dag_id, + execution_date=execution_date) ) print(result) self.assertEqual(result.status_code, 200, "Could not get the status") diff --git a/tests/operators/__init__.py b/tests/operators/__init__.py index b94e5a9b6ac4a..4e19c28ec97a1 100644 --- a/tests/operators/__init__.py +++ b/tests/operators/__init__.py @@ -7,21 +7,26 @@ # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - +from subprocess import check_call from .docker_operator import * from .subdag_operator import * from .operators import * from .hive_operator import * -from .s3_to_hive_operator import * +try: + check_call(["kubectl", "get", "pods"]) +except Exception as e: + print(e) + from .s3_to_hive_operator import * + pass from .python_operator import * from .latest_only_operator import * diff --git a/tests/operators/s3_to_hive_operator.py b/tests/operators/s3_to_hive_operator.py index 3e41454395fa3..0a14a6e4e0a83 100644 --- a/tests/operators/s3_to_hive_operator.py +++ b/tests/operators/s3_to_hive_operator.py @@ -7,9 +7,9 @@ # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY @@ -43,7 +43,6 @@ except ImportError: mock_s3 = None - class S3ToHiveTransferTest(unittest.TestCase): def setUp(self): diff --git a/tox.ini b/tox.ini index c4b74a1e55345..1c1658af91fe1 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ # under the License. [tox] -envlist = flake8,{py27,py35}-backend_{mysql,sqlite,postgres} +envlist = flake8,{py27,py35}-backend_{mysql,sqlite,postgres}-env_{docker,kubernetes} skipsdist = True [global] @@ -37,7 +37,7 @@ deps = basepython = py27: python2.7 - py35: python3.5 + py35: python3 setenv = HADOOP_DISTRO=cdh @@ -49,6 +49,7 @@ setenv = KRB5_KTNAME=/etc/airflow.keytab backend_mysql: AIRFLOW__CORE__SQL_ALCHEMY_CONN=mysql://root@mysql/airflow backend_postgres: AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql+psycopg2://postgres:airflow@postgres/airflow + backend_postgres_k8s: AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql+psycopg2://postgres@localhost/airflow backend_sqlite: AIRFLOW__CORE__SQL_ALCHEMY_CONN=sqlite:///{homedir}/airflow.db backend_sqlite: AIRFLOW__CORE__EXECUTOR=SequentialExecutor @@ -57,10 +58,12 @@ passenv = * commands = pip wheel --progress-bar off -w {homedir}/.wheelhouse -f {homedir}/.wheelhouse -e .[devel_ci] pip install --progress-bar off --find-links={homedir}/.wheelhouse --no-index -e .[devel_ci] + env_kubernetes: pip install boto3 + env_kubernetes: pip install moto {toxinidir}/scripts/ci/1-setup-env.sh - {toxinidir}/scripts/ci/2-setup-kdc.sh - {toxinidir}/scripts/ci/3-setup-databases.sh - {toxinidir}/scripts/ci/4-load-data.sh + env_docker: {toxinidir}/scripts/ci/2-setup-kdc.sh + env_docker: {toxinidir}/scripts/ci/3-setup-databases.sh + env_docker: {toxinidir}/scripts/ci/4-load-data.sh {toxinidir}/scripts/ci/5-run-tests.sh [] {toxinidir}/scripts/ci/6-check-license.sh codecov -e TOXENV