diff --git a/CI.rst b/CI.rst index 5c3217457841c..43ea218a69a7e 100644 --- a/CI.rst +++ b/CI.rst @@ -197,6 +197,16 @@ You can use those variables when you try to reproduce the build locally. +-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ | ``COMMIT_SHA`` | | GITHUB_SHA | GITHUB_SHA | SHA of the commit of the build is run | +-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| Initialization | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``SKIP_ENVIRONMENT_INITIALIZATION`` | false\* | false\* | false\* | Skip initialization of test environment | +| | | | | | +| | | | | \* set to true in pre-commits | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ +| ``SKIP_SSH_SETUP`` | false\* | false\* | false\* | Skip setting up SSH server for tests. | +| | | | | | +| | | | | \* set to true in GitHub CodeSpaces | ++-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ | Verbosity variables | +-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+ | ``PRINT_INFO_FROM_SCRIPTS`` | true\* | true\* | true\* | Allows to print output to terminal from running | diff --git a/scripts/ci/docker-compose/_docker.env b/scripts/ci/docker-compose/_docker.env index 341d85cef281f..b9271c063adbd 100644 --- a/scripts/ci/docker-compose/_docker.env +++ b/scripts/ci/docker-compose/_docker.env @@ -58,7 +58,9 @@ RUN_TESTS LIST_OF_INTEGRATION_TESTS_TO_RUN RUN_SYSTEM_TESTS START_AIRFLOW +SKIP_ENVIRONMENT_INITIALIZATION SKIP_TWINE_CHECK +SKIP_SSH_SETUP TEST_TYPE UPGRADE_TO_NEWER_DEPENDENCIES VERBOSE diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml index 2cdc177df8a3b..3833e8807a35e 100644 --- a/scripts/ci/docker-compose/base.yml +++ b/scripts/ci/docker-compose/base.yml @@ -71,7 +71,9 @@ services: - LIST_OF_INTEGRATION_TESTS_TO_RUN=${LIST_OF_INTEGRATION_TESTS_TO_RUN} - RUN_SYSTEM_TESTS=${RUN_SYSTEM_TESTS} - START_AIRFLOW=${START_AIRFLOW} + - SKIP_ENVIRONMENT_INITIALIZATION=${SKIP_ENVIRONMENT_INITIALIZATION} - SKIP_TWINE_CHECK=${SKIP_TWINE_CHECK} + - SKIP_SSH_SETUP=${SKIP_SSH_SETUP} - TEST_TYPE=${TEST_TYPE} - UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES} - VERBOSE=${VERBOSE} diff --git a/scripts/ci/docker-compose/devcontainer.env b/scripts/ci/docker-compose/devcontainer.env index 132bb4b140588..c07135d47b2c1 100644 --- a/scripts/ci/docker-compose/devcontainer.env +++ b/scripts/ci/docker-compose/devcontainer.env @@ -42,6 +42,7 @@ RUN_SYSTEM_TESTS="" START_AIRFLOW="false" SKIP_TWINE_CHECK="false" SKIP_SSH_SETUP="true" +SKIP_ENVIRONMENT_INITIALIZATION="false" TEST_TYPE= UPGRADE_TO_NEWER_DEPENDENCIES="false" VERBOSE="false" diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh index b690ed5bfbb23..be3410603adb6 100644 --- a/scripts/ci/libraries/_initialization.sh +++ b/scripts/ci/libraries/_initialization.sh @@ -422,6 +422,9 @@ function initialization::initialize_image_build_variables() { SKIP_SSH_SETUP=${SKIP_SSH_SETUP:="false"} export SKIP_SSH_SETUP + SKIP_ENVIRONMENT_INITIALIZATION=${SKIP_ENVIRONMENT_INITIALIZATION:="false"} + export SKIP_ENVIRONMENT_INITIALIZATION + export INSTALLED_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,imap,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv" AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION:="22.0.4"} diff --git a/scripts/ci/libraries/_runs.sh b/scripts/ci/libraries/_runs.sh index 194dc21e08d65..344c52e03781c 100644 --- a/scripts/ci/libraries/_runs.sh +++ b/scripts/ci/libraries/_runs.sh @@ -21,6 +21,7 @@ function runs::run_docs() { start_end::group_start "Run build docs" docker_v run "${EXTRA_DOCKER_FLAGS[@]}" -t \ -e "GITHUB_ACTIONS=${GITHUB_ACTIONS="false"}" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ --pull never \ "${AIRFLOW_CI_IMAGE_WITH_TAG}" \ "/opt/airflow/scripts/in_container/run_docs_build.sh" "${@}" @@ -31,6 +32,7 @@ function runs::run_docs() { function runs::run_generate_constraints() { start_end::group_start "Run generate constraints" docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ --pull never \ "${AIRFLOW_CI_IMAGE_WITH_TAG}" \ "/opt/airflow/scripts/in_container/run_generate_constraints.sh" @@ -43,6 +45,7 @@ function runs::run_prepare_airflow_packages() { docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ -t \ -v "${AIRFLOW_SOURCES}:/opt/airflow" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ --pull never \ "${AIRFLOW_CI_IMAGE_WITH_TAG}" \ "/opt/airflow/scripts/in_container/run_prepare_airflow_packages.sh" @@ -56,6 +59,7 @@ function runs::run_prepare_provider_packages() { docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ -t \ -v "${AIRFLOW_SOURCES}:/opt/airflow" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ --pull never \ "${AIRFLOW_CI_IMAGE_WITH_TAG}" \ "/opt/airflow/scripts/in_container/run_prepare_provider_packages.sh" "${@}" @@ -72,6 +76,7 @@ function runs::run_prepare_provider_documentation() { "${term_flag}" \ -v "${AIRFLOW_SOURCES}:/opt/airflow" \ -e "NON_INTERACTIVE" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ -e "GENERATE_PROVIDERS_ISSUE" \ -e "GITHUB_TOKEN" \ --pull never \ diff --git a/scripts/ci/pre_commit/pre_commit_check_license.sh b/scripts/ci/pre_commit/pre_commit_check_license.sh index 909ad691a2e92..cf0ac104dbb04 100755 --- a/scripts/ci/pre_commit/pre_commit_check_license.sh +++ b/scripts/ci/pre_commit/pre_commit_check_license.sh @@ -38,6 +38,7 @@ function run_check_license() { if ! docker_v run -v "${AIRFLOW_SOURCES}:/opt/airflow" -t \ --user "$(id -ur):$(id -gr)" \ --rm --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ ghcr.io/apache/airflow-apache-rat:0.13-2021.07.04 \ --exclude-file /opt/airflow/.rat-excludes \ --d /opt/airflow | tee "${AIRFLOW_SOURCES}/logs/rat-results.txt" ; then diff --git a/scripts/ci/pre_commit/pre_commit_flake8.sh b/scripts/ci/pre_commit/pre_commit_flake8.sh index 408d593fb7501..aed0bbce8c731 100755 --- a/scripts/ci/pre_commit/pre_commit_flake8.sh +++ b/scripts/ci/pre_commit/pre_commit_flake8.sh @@ -25,9 +25,11 @@ export PRINT_INFO_FROM_SCRIPTS="false" function run_flake8() { if [[ "${#@}" == "0" ]]; then docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ "${AIRFLOW_CI_IMAGE}" "/opt/airflow/scripts/in_container/run_flake8.sh" else docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ "${AIRFLOW_CI_IMAGE}" "/opt/airflow/scripts/in_container/run_flake8.sh" "${@}" fi } diff --git a/scripts/ci/pre_commit/pre_commit_migration_reference.sh b/scripts/ci/pre_commit/pre_commit_migration_reference.sh index 2d02fc3584a3c..997aad9543486 100755 --- a/scripts/ci/pre_commit/pre_commit_migration_reference.sh +++ b/scripts/ci/pre_commit/pre_commit_migration_reference.sh @@ -24,10 +24,12 @@ export PRINT_INFO_FROM_SCRIPTS="false" function migration_reference() { if [[ "${#@}" == "0" ]]; then docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ "${AIRFLOW_CI_IMAGE}" \ "/opt/airflow/scripts/in_container/run_migration_reference.sh" else docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ "${AIRFLOW_CI_IMAGE}" \ "/opt/airflow/scripts/in_container/run_migration_reference.sh" "${@}" fi diff --git a/scripts/ci/pre_commit/pre_commit_mypy.sh b/scripts/ci/pre_commit/pre_commit_mypy.sh index 60d1671fae034..90c17b96c158e 100755 --- a/scripts/ci/pre_commit/pre_commit_mypy.sh +++ b/scripts/ci/pre_commit/pre_commit_mypy.sh @@ -32,6 +32,7 @@ function run_mypy() { docker_v run "${EXTRA_DOCKER_FLAGS[@]}" -t \ "-v" "${AIRFLOW_SOURCES}/.mypy_cache:/opt/airflow/.mypy_cache" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ "${AIRFLOW_CI_IMAGE_WITH_TAG}" \ "/opt/airflow/scripts/in_container/run_mypy.sh" "${files[@]}" } diff --git a/scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh b/scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh index cf9c3832dd143..2c5a0b4e72599 100755 --- a/scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh +++ b/scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh @@ -37,6 +37,7 @@ function run_test_package_import_all_classes() { -v "${AIRFLOW_SOURCES}/empty:/opt/airflow/airflow:cached" \ -v "${AIRFLOW_SOURCES}/scripts/in_container:/opt/airflow/scripts/in_container:cached" \ -v "${AIRFLOW_SOURCES}/dev/import_all_classes.py:/opt/airflow/dev/import_all_classes.py:cached" \ + -e "SKIP_ENVIRONMENT_INITIALIZATION=true" \ "${AIRFLOW_CI_IMAGE_WITH_TAG}" \ "/opt/airflow/scripts/in_container/run_install_and_test_provider_packages.sh" } diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh index 77a5ca92bc613..1cc866046542c 100755 --- a/scripts/in_container/entrypoint_ci.sh +++ b/scripts/in_container/entrypoint_ci.sh @@ -52,154 +52,157 @@ export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} : "${AIRFLOW_SOURCES:?"ERROR: AIRFLOW_SOURCES not set !!!!"}" -echo -echo "Airflow home: ${AIRFLOW_HOME}" -echo "Airflow sources: ${AIRFLOW_SOURCES}" -echo "Airflow core SQL connection: ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}" - -echo - -RUN_TESTS=${RUN_TESTS:="false"} -CI=${CI:="false"} -USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}" +if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then -if [[ ${USE_AIRFLOW_VERSION} == "" ]]; then - export PYTHONPATH=${AIRFLOW_SOURCES} - echo - echo "Using already installed airflow version" - echo - if [[ -d "${AIRFLOW_SOURCES}/airflow/www/" ]]; then - pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null - ./ask_for_recompile_assets_if_needed.sh - popd >/dev/null - fi - # Cleanup the logs, tmp when entering the environment - sudo rm -rf "${AIRFLOW_SOURCES}"/logs/* - sudo rm -rf "${AIRFLOW_SOURCES}"/tmp/* - mkdir -p "${AIRFLOW_SOURCES}"/logs/ - mkdir -p "${AIRFLOW_SOURCES}"/tmp/ -elif [[ ${USE_AIRFLOW_VERSION} == "none" ]]; then - echo - echo "Skip installing airflow - only install wheel/tar.gz packages that are present locally" - echo - uninstall_airflow_and_providers -elif [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then - echo - echo "Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers." - echo - uninstall_airflow_and_providers - install_airflow_from_wheel "[${AIRFLOW_EXTRAS}]" - uninstall_providers -elif [[ ${USE_AIRFLOW_VERSION} == "sdist" ]]; then - echo - echo "Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers." - echo - uninstall_airflow_and_providers - install_airflow_from_sdist "[${AIRFLOW_EXTRAS}]" - uninstall_providers -else - echo - echo "Install airflow from PyPI without extras" echo - install_released_airflow_version "${USE_AIRFLOW_VERSION}" -fi -if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then + echo "Airflow home: ${AIRFLOW_HOME}" + echo "Airflow sources: ${AIRFLOW_SOURCES}" + echo "Airflow core SQL connection: ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}" + echo - echo "Install all packages from dist folder" - if [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then - echo "(except apache-airflow)" - fi - if [[ ${PACKAGE_FORMAT} == "both" ]]; then + + RUN_TESTS=${RUN_TESTS:="false"} + CI=${CI:="false"} + USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}" + + if [[ ${USE_AIRFLOW_VERSION} == "" ]]; then + export PYTHONPATH=${AIRFLOW_SOURCES} echo - echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'${COLOR_RESET}" + echo "Using already installed airflow version" echo - exit 1 + if [[ -d "${AIRFLOW_SOURCES}/airflow/www/" ]]; then + pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null + ./ask_for_recompile_assets_if_needed.sh + popd >/dev/null + fi + # Cleanup the logs, tmp when entering the environment + sudo rm -rf "${AIRFLOW_SOURCES}"/logs/* + sudo rm -rf "${AIRFLOW_SOURCES}"/tmp/* + mkdir -p "${AIRFLOW_SOURCES}"/logs/ + mkdir -p "${AIRFLOW_SOURCES}"/tmp/ + elif [[ ${USE_AIRFLOW_VERSION} == "none" ]]; then + echo + echo "Skip installing airflow - only install wheel/tar.gz packages that are present locally" + echo + uninstall_airflow_and_providers + elif [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then + echo + echo "Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers." + echo + uninstall_airflow_and_providers + install_airflow_from_wheel "[${AIRFLOW_EXTRAS}]" + uninstall_providers + elif [[ ${USE_AIRFLOW_VERSION} == "sdist" ]]; then + echo + echo "Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers." + echo + uninstall_airflow_and_providers + install_airflow_from_sdist "[${AIRFLOW_EXTRAS}]" + uninstall_providers + else + echo + echo "Install airflow from PyPI without extras" + echo + install_released_airflow_version "${USE_AIRFLOW_VERSION}" fi - echo - installable_files=() - for file in /dist/*.{whl,tar.gz} - do - if [[ ${USE_AIRFLOW_VERSION} == "wheel" && ${file} == "apache?airflow-[0-9]"* ]]; then - # Skip Apache Airflow package - it's just been installed above with extras - echo "Skipping ${file}" - continue + if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then + echo + echo "Install all packages from dist folder" + if [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then + echo "(except apache-airflow)" fi - if [[ ${PACKAGE_FORMAT} == "wheel" && ${file} == *".whl" ]]; then - echo "Adding ${file} to install" - installable_files+=( "${file}" ) + if [[ ${PACKAGE_FORMAT} == "both" ]]; then + echo + echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'${COLOR_RESET}" + echo + exit 1 fi - if [[ ${PACKAGE_FORMAT} == "sdist" && ${file} == *".tar.gz" ]]; then - echo "Adding ${file} to install" - installable_files+=( "${file}" ) + echo + installable_files=() + for file in /dist/*.{whl,tar.gz} + do + if [[ ${USE_AIRFLOW_VERSION} == "wheel" && ${file} == "apache?airflow-[0-9]"* ]]; then + # Skip Apache Airflow package - it's just been installed above with extras + echo "Skipping ${file}" + continue + fi + if [[ ${PACKAGE_FORMAT} == "wheel" && ${file} == *".whl" ]]; then + echo "Adding ${file} to install" + installable_files+=( "${file}" ) + fi + if [[ ${PACKAGE_FORMAT} == "sdist" && ${file} == *".tar.gz" ]]; then + echo "Adding ${file} to install" + installable_files+=( "${file}" ) + fi + done + if (( ${#installable_files[@]} )); then + pip install "${installable_files[@]}" --no-deps fi - done - if (( ${#installable_files[@]} )); then - pip install "${installable_files[@]}" --no-deps fi -fi - -# Added to have run-tests on path -export PATH=${PATH}:${AIRFLOW_SOURCES} - -# This is now set in conftest.py - only for pytest tests -unset AIRFLOW__CORE__UNIT_TEST_MODE -mkdir -pv "${AIRFLOW_HOME}/logs/" -cp -f "${IN_CONTAINER_DIR}/airflow_ci.cfg" "${AIRFLOW_HOME}/unittests.cfg" + # Added to have run-tests on path + export PATH=${PATH}:${AIRFLOW_SOURCES} -# Change the default worker_concurrency for tests -export AIRFLOW__CELERY__WORKER_CONCURRENCY=8 + # This is now set in conftest.py - only for pytest tests + unset AIRFLOW__CORE__UNIT_TEST_MODE -set +e -"${IN_CONTAINER_DIR}/check_environment.sh" -ENVIRONMENT_EXIT_CODE=$? -set -e -if [[ ${ENVIRONMENT_EXIT_CODE} != 0 ]]; then - echo - echo "Error: check_environment returned ${ENVIRONMENT_EXIT_CODE}. Exiting." - echo - exit ${ENVIRONMENT_EXIT_CODE} -fi - -# Create symbolic link to fix possible issues with kubectl config cmd-path -mkdir -p /usr/lib/google-cloud-sdk/bin -touch /usr/lib/google-cloud-sdk/bin/gcloud -ln -s -f /usr/bin/gcloud /usr/lib/google-cloud-sdk/bin/gcloud + mkdir -pv "${AIRFLOW_HOME}/logs/" + cp -f "${IN_CONTAINER_DIR}/airflow_ci.cfg" "${AIRFLOW_HOME}/unittests.cfg" -if [[ ${SKIP_SSH_SETUP="false"} == "false" ]]; then - # Set up ssh keys - echo 'yes' | ssh-keygen -t rsa -C your_email@youremail.com -m PEM -P '' -f ~/.ssh/id_rsa \ - >"${AIRFLOW_HOME}/logs/ssh-keygen.log" 2>&1 + # Change the default worker_concurrency for tests + export AIRFLOW__CELERY__WORKER_CONCURRENCY=8 - cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys - ln -s -f ~/.ssh/authorized_keys ~/.ssh/authorized_keys2 - chmod 600 ~/.ssh/* - - # SSH Service - sudo service ssh restart >/dev/null 2>&1 - - # Sometimes the server is not quick enough to load the keys! - while [[ $(ssh-keyscan -H localhost 2>/dev/null | wc -l) != "3" ]] ; do - echo "Not all keys yet loaded by the server" - sleep 0.05 - done + set +e - ssh-keyscan -H localhost >> ~/.ssh/known_hosts 2>/dev/null -fi + "${IN_CONTAINER_DIR}/check_environment.sh" + ENVIRONMENT_EXIT_CODE=$? + set -e + if [[ ${ENVIRONMENT_EXIT_CODE} != 0 ]]; then + echo + echo "Error: check_environment returned ${ENVIRONMENT_EXIT_CODE}. Exiting." + echo + exit ${ENVIRONMENT_EXIT_CODE} + fi + # Create symbolic link to fix possible issues with kubectl config cmd-path + mkdir -p /usr/lib/google-cloud-sdk/bin + touch /usr/lib/google-cloud-sdk/bin/gcloud + ln -s -f /usr/bin/gcloud /usr/lib/google-cloud-sdk/bin/gcloud + + if [[ ${SKIP_SSH_SETUP="false"} == "false" ]]; then + # Set up ssh keys + echo 'yes' | ssh-keygen -t rsa -C your_email@youremail.com -m PEM -P '' -f ~/.ssh/id_rsa \ + >"${AIRFLOW_HOME}/logs/ssh-keygen.log" 2>&1 + + cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys + ln -s -f ~/.ssh/authorized_keys ~/.ssh/authorized_keys2 + chmod 600 ~/.ssh/* + + # SSH Service + sudo service ssh restart >/dev/null 2>&1 + + # Sometimes the server is not quick enough to load the keys! + while [[ $(ssh-keyscan -H localhost 2>/dev/null | wc -l) != "3" ]] ; do + echo "Not all keys yet loaded by the server" + sleep 0.05 + done + + ssh-keyscan -H localhost >> ~/.ssh/known_hosts 2>/dev/null + fi -# shellcheck source=scripts/in_container/configure_environment.sh -. "${IN_CONTAINER_DIR}/configure_environment.sh" + # shellcheck source=scripts/in_container/configure_environment.sh + . "${IN_CONTAINER_DIR}/configure_environment.sh" -# shellcheck source=scripts/in_container/run_init_script.sh -. "${IN_CONTAINER_DIR}/run_init_script.sh" + # shellcheck source=scripts/in_container/run_init_script.sh + . "${IN_CONTAINER_DIR}/run_init_script.sh" -cd "${AIRFLOW_SOURCES}" + cd "${AIRFLOW_SOURCES}" -if [[ ${START_AIRFLOW:="false"} == "true" ]]; then - export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} - export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES} - # shellcheck source=scripts/in_container/bin/run_tmux - exec run_tmux + if [[ ${START_AIRFLOW:="false"} == "true" ]]; then + export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} + export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES} + # shellcheck source=scripts/in_container/bin/run_tmux + exec run_tmux + fi fi set +u