From e8681c8c0daaafe54e9be3d5138a05812b0c1f99 Mon Sep 17 00:00:00 2001 From: blag Date: Mon, 9 Sep 2019 12:48:11 -0600 Subject: [PATCH 1/2] Fix indentation in prepare-integration.sh --- scripts/travis/prepare-integration.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/travis/prepare-integration.sh b/scripts/travis/prepare-integration.sh index deb77d620f..9edfc74003 100755 --- a/scripts/travis/prepare-integration.sh +++ b/scripts/travis/prepare-integration.sh @@ -2,8 +2,8 @@ set -e if [ "$(whoami)" != 'root' ]; then - echo 'Please run with sudo' - exit 2 + echo 'Please run with sudo' + exit 2 fi UBUNTU_VERSION=`lsb_release -a 2>&1 | grep Codename | grep -v "LSB" | awk '{print $2}'` @@ -28,6 +28,6 @@ chmod 777 logs/* # code and runs tests under a different system user). # NOTE: We need to pass "--exe" flag to nosetests when using this workaround. if [ "${UBUNTU_VERSION}" == "xenial" ]; then - echo "Applying workaround for stanley user permissions issue to /home/travis on Xenial" - chmod 777 -R /home/travis + echo "Applying workaround for stanley user permissions issue to /home/travis on Xenial" + chmod 777 -R /home/travis fi From 5e5796bcc729154cbd4c83164ed789d0badde1ff Mon Sep 17 00:00:00 2001 From: blag Date: Thu, 29 Aug 2019 15:00:18 -0600 Subject: [PATCH 2/2] Convert root Makefile to use invoke --- .travis.yml | 13 +- Makefile | 1012 ++----------------------- scripts/travis/prepare-integration.sh | 11 +- scripts/travis/setup-mistral.sh | 2 +- tasks/__init__.py | 407 ++++++++++ tasks/build.py | 69 ++ tasks/check.py | 232 ++++++ tasks/ci.py | 122 +++ tasks/clean.py | 57 ++ tasks/generate.py | 72 ++ tasks/git_tasks/__init__.py | 2 + tasks/git_tasks/submodule.py | 13 + tasks/lint.py | 104 +++ tasks/requirements/__init__.py | 80 ++ tasks/requirements/fixate.py | 176 +++++ tasks/requirements/install.py | 71 ++ tasks/test.py | 197 +++++ tasks/travis.py | 35 + tools/launchdev.sh | 4 +- 19 files changed, 1702 insertions(+), 977 deletions(-) create mode 100644 tasks/__init__.py create mode 100644 tasks/build.py create mode 100644 tasks/check.py create mode 100644 tasks/ci.py create mode 100644 tasks/clean.py create mode 100644 tasks/generate.py create mode 100644 tasks/git_tasks/__init__.py create mode 100644 tasks/git_tasks/submodule.py create mode 100644 tasks/lint.py create mode 100644 tasks/requirements/__init__.py create mode 100644 tasks/requirements/fixate.py create mode 100644 tasks/requirements/install.py create mode 100644 tasks/test.py create mode 100644 tasks/travis.py diff --git a/.travis.yml b/.travis.yml index 60617b9dc1..17b4875f12 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,10 +32,10 @@ matrix: # job which also includes "make requirements" and other steps # "make requirements" can take substantially lower if the cache is purged # and this would cause too many intermediate failures / false positives - - env: TASK=ci-unit CACHE_NAME=py2 COMMAND_THRESHOLD=700 + - env: TASK=ci-unit CACHE_NAME=py2 COMMAND_THRESHOLD=1400 python: 2.7 name: "Unit Tests (Python 2.7 MongoDB 3.4)" - #- env: TASK=ci-unit CACHE_NAME=py2 COMMAND_THRESHOLD=700 + #- env: TASK=ci-unit CACHE_NAME=py2 COMMAND_THRESHOLD=1400 #python: 2.7 #name: "Unit Tests (Python 2.7 MongoDB 3.6)" #addons: @@ -49,16 +49,16 @@ matrix: # - mongodb-org-server # - mongodb-org-shell # - git - - env: TASK=ci-integration CACHE_NAME=py2 COMMAND_THRESHOLD=700 + - env: TASK=ci-integration CACHE_NAME=py2 COMMAND_THRESHOLD=1400 python: 2.7 name: "Integration Tests (Python 2.7)" - - env: TASK="ci-checks ci-packs-tests" CACHE_NAME=py2 COMMAND_THRESHOLD=280 + - env: TASK="ci-checks ci-packs-tests" CACHE_NAME=py2 COMMAND_THRESHOLD=560 python: 2.7 name: "Lint Checks, Packs Tests (Python 2.7)" - - env: TASK="compilepy3 ci-py3-unit" CACHE_NAME=py3 COMMAND_THRESHOLD=680 + - env: TASK="compilepy3 ci-py3-unit" CACHE_NAME=py3 COMMAND_THRESHOLD=1360 python: 3.6 name: "Unit Tests, Pack Tests (Python 3.6)" - - env: TASK="ci-py3-integration" CACHE_NAME=py3 COMMAND_THRESHOLD=310 + - env: TASK="ci-py3-integration" CACHE_NAME=py3 COMMAND_THRESHOLD=620 python: 3.6 name: "Integration Tests (Python 3.6)" @@ -97,6 +97,7 @@ cache: before_install: - pip install --upgrade "pip>=19.0,<20.0" - sudo pip install --upgrade "virtualenv==16.6.0" + - sudo pip install --upgrade invoke install: - ./scripts/travis/install-requirements.sh diff --git a/Makefile b/Makefile index d1f28a3aed..61c7b0c70c 100644 --- a/Makefile +++ b/Makefile @@ -1,430 +1,32 @@ ROOT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) -SHELL := /bin/bash -TOX_DIR := .tox OS := $(shell uname) # We separate the OSX X and Linux virtualenvs so we can run in a Docker # container (st2devbox) while doing things on our host Mac machine ifeq ($(OS),Darwin) VIRTUALENV_DIR ?= virtualenv-osx - VIRTUALENV_ST2CLIENT_DIR ?= virtualenv-st2client-osx - VIRTUALENV_COMPONENTS_DIR ?= virtualenv-components-osx else VIRTUALENV_DIR ?= virtualenv - VIRTUALENV_ST2CLIENT_DIR ?= virtualenv-st2client - VIRTUALENV_COMPONENTS_DIR ?= virtualenv-components endif PYTHON_VERSION ?= python2.7 -BINARIES := bin - -# All components are prefixed by st2 and not .egg-info. COMPONENTS := $(shell ls -a | grep ^st2 | grep -v .egg-info) COMPONENTS_RUNNERS := $(wildcard contrib/runners/*) -COMPONENTS_WITHOUT_ST2TESTS := $(shell ls -a | grep ^st2 | grep -v .egg-info | grep -v st2tests | grep -v st2exporter) - COMPONENTS_WITH_RUNNERS := $(COMPONENTS) $(COMPONENTS_RUNNERS) -COMPONENTS_WITH_RUNNERS_WITHOUT_MISTRAL_RUNNER := $(foreach component,$(filter-out contrib/runners/mistral_v2,$(COMPONENTS_WITH_RUNNERS)),$(component)) - -COMPONENTS_TEST_DIRS := $(wildcard st2*/tests) $(wildcard contrib/runners/*/tests) - -# Components that implement a component-controlled test-runner. These components provide an -# in-component Makefile. (Temporary fix until I can generalize the pecan unittest setup. -mar) -# Note: We also want to ignore egg-info dir created during build -COMPONENT_SPECIFIC_TESTS := st2tests st2client.egg-info -# nasty hack to get a space into a variable -colon := : -comma := , -dot := . -slash := / space_char := space_char += COMPONENT_PYTHONPATH = $(subst $(space_char),:,$(realpath $(COMPONENTS_WITH_RUNNERS))) -COMPONENTS_TEST := $(foreach component,$(filter-out $(COMPONENT_SPECIFIC_TESTS),$(COMPONENTS_WITH_RUNNERS)),$(component)) -COMPONENTS_TEST_WITHOUT_MISTRAL_RUNNER := $(foreach component,$(filter-out $(COMPONENT_SPECIFIC_TESTS),$(COMPONENTS_WITH_RUNNERS_WITHOUT_MISTRAL_RUNNER)),$(component)) -COMPONENTS_TEST_COMMA := $(subst $(slash),$(dot),$(subst $(space_char),$(comma),$(COMPONENTS_TEST))) -COMPONENTS_TEST_MODULES := $(subst $(slash),$(dot),$(COMPONENTS_TEST_DIRS)) -COMPONENTS_TEST_MODULES_COMMA := $(subst $(space_char),$(comma),$(COMPONENTS_TEST_MODULES)) - -COVERAGE_GLOBS := .coverage.unit.* .coverage.integration.* .coverage.mistral.* -COVERAGE_GLOBS_QUOTED := $(foreach glob,$(COVERAGE_GLOBS),'$(glob)') - -REQUIREMENTS := test-requirements.txt requirements.txt -PIP_OPTIONS := $(ST2_PIP_OPTIONS) - -ifndef PYLINT_CONCURRENCY - PYLINT_CONCURRENCY := 1 -endif - -NOSE_OPTS := --rednose --immediate --with-parallel - -ifndef NOSE_TIME - NOSE_TIME := yes -endif - -ifeq ($(NOSE_TIME),yes) - NOSE_OPTS := --rednose --immediate --with-parallel --with-timer - NOSE_WITH_TIMER := 1 -endif - -ifndef PIP_OPTIONS - PIP_OPTIONS := -endif - -# NOTE: We only run coverage on master and version branches and not on pull requests since -# it has a big performance overhead and is very slow. -ifeq ($(ENABLE_COVERAGE),yes) - NOSE_COVERAGE_FLAGS := --with-coverage --cover-branches --cover-erase - NOSE_COVERAGE_PACKAGES := --cover-package=$(COMPONENTS_TEST_COMMA) -else - INCLUDE_TESTS_IN_COVERAGE := -endif - -# If we aren't running test coverage, don't try to include tests in coverage -# results -ifdef INCLUDE_TESTS_IN_COVERAGE - NOSE_COVERAGE_FLAGS += --cover-tests - NOSE_COVERAGE_PACKAGES := $(NOSE_COVERAGE_PACKAGES),$(COMPONENTS_TEST_MODULES_COMMA) -endif .PHONY: all -all: requirements configgen check tests - -.PHONY: .coverage_globs -.coverage_globs: - @for coverage_result in $$( \ - for coverage_glob in $(COVERAGE_GLOBS_QUOTED); do \ - compgen -G $${coverage_glob}; \ - done; \ - ); do \ - echo $${coverage_result}; \ - done - -# Target for debugging Makefile variable assembly -.PHONY: play -play: - @echo COVERAGE_GLOBS=$(COVERAGE_GLOBS_QUOTED) - @echo - @echo COMPONENTS=$(COMPONENTS) - @echo - @echo COMPONENTS_WITH_RUNNERS=$(COMPONENTS_WITH_RUNNERS) - @echo - @echo COMPONENTS_WITH_RUNNERS_WITHOUT_MISTRAL_RUNNER=$(COMPONENTS_WITH_RUNNERS_WITHOUT_MISTRAL_RUNNER) - @echo - @echo COMPONENTS_TEST=$(COMPONENTS_TEST) - @echo - @echo COMPONENTS_TEST_COMMA=$(COMPONENTS_TEST_COMMA) - @echo - @echo COMPONENTS_TEST_DIRS=$(COMPONENTS_TEST_DIRS) - @echo - @echo COMPONENTS_TEST_MODULES=$(COMPONENTS_TEST_MODULES) - @echo - @echo COMPONENTS_TEST_MODULES_COMMA=$(COMPONENTS_TEST_MODULES_COMMA) - @echo - @echo COMPONENTS_TEST_WITHOUT_MISTRAL_RUNNER=$(COMPONENTS_TEST_WITHOUT_MISTRAL_RUNNER) - @echo - @echo COMPONENT_PYTHONPATH=$(COMPONENT_PYTHONPATH) - @echo - @echo TRAVIS_PULL_REQUEST=$(TRAVIS_PULL_REQUEST) - @echo - @echo TRAVIS_EVENT_TYPE=$(TRAVIS_EVENT_TYPE) - @echo - @echo NOSE_OPTS=$(NOSE_OPTS) - @echo - @echo ENABLE_COVERAGE=$(ENABLE_COVERAGE) - @echo - @echo NOSE_COVERAGE_FLAGS=$(NOSE_COVERAGE_FLAGS) - @echo - @echo NOSE_COVERAGE_PACKAGES=$(NOSE_COVERAGE_PACKAGES) - @echo - @echo INCLUDE_TESTS_IN_COVERAGE=$(INCLUDE_TESTS_IN_COVERAGE) - @echo - -.PHONY: check -check: check-requirements flake8 checklogs - -# NOTE: We pass --no-deps to the script so we don't install all the -# package dependencies which are already installed as part of "requirements" -# make targets. This speeds up the build -.PHONY: install-runners -install-runners: - - @echo "" - @echo "================== INSTALL RUNNERS ====================" - @echo "" - @for component in $(COMPONENTS_RUNNERS); do \ - echo "==========================================================="; \ - echo "Installing runner:" $$component; \ - echo "==========================================================="; \ - (. $(VIRTUALENV_DIR)/bin/activate; cd $$component; python setup.py develop --no-deps); \ - done - -.PHONY: check-requirements -check-requirements: requirements - @echo - @echo "============== CHECKING REQUIREMENTS ==============" - @echo - # Update requirements and then make sure no files were changed - git status -- *requirements.txt */*requirements.txt | grep -q "nothing to commit" - @echo "All requirements files up-to-date!" - -.PHONY: check-python-packages -check-python-packages: - # Make target which verifies all the components Python packages are valid - @echo "" - @echo "================== CHECK PYTHON PACKAGES ====================" - @echo "" - - test -f $(VIRTUALENV_COMPONENTS_DIR)/bin/activate || virtualenv --python=$(PYTHON_VERSION) --no-site-packages $(VIRTUALENV_COMPONENTS_DIR) --no-download - @for component in $(COMPONENTS_WITHOUT_ST2TESTS); do \ - echo "==========================================================="; \ - echo "Checking component:" $$component; \ - echo "==========================================================="; \ - (set -e; cd $$component; ../$(VIRTUALENV_COMPONENTS_DIR)/bin/python setup.py --version) || exit 1; \ - done - -.PHONY: check-python-packages-nightly -check-python-packages-nightly: - # NOTE: This is subset of check-python-packages target. - # We run more extensive and slower tests as part of the nightly build to speed up PR builds - @echo "" - @echo "================== CHECK PYTHON PACKAGES ====================" - @echo "" - - test -f $(VIRTUALENV_COMPONENTS_DIR)/bin/activate || virtualenv --python=$(PYTHON_VERSION) --no-site-packages $(VIRTUALENV_COMPONENTS_DIR) --no-download - @for component in $(COMPONENTS_WITHOUT_ST2TESTS); do \ - echo "==========================================================="; \ - echo "Checking component:" $$component; \ - echo "==========================================================="; \ - (set -e; cd $$component; ../$(VIRTUALENV_COMPONENTS_DIR)/bin/python setup.py --version) || exit 1; \ - (set -e; cd $$component; ../$(VIRTUALENV_COMPONENTS_DIR)/bin/python setup.py sdist bdist_wheel) || exit 1; \ - (set -e; cd $$component; ../$(VIRTUALENV_COMPONENTS_DIR)/bin/python setup.py develop --no-deps) || exit 1; \ - ($(VIRTUALENV_COMPONENTS_DIR)/bin/python -c "import $$component") || exit 1; \ - (set -e; cd $$component; rm -rf dist/; rm -rf $$component.egg-info) || exit 1; \ - done - -.PHONY: ci-checks-nightly -ci-checks-nightly: check-python-packages-nightly - -.PHONY: checklogs -checklogs: - @echo - @echo "================== LOG WATCHER ====================" - @echo - . $(VIRTUALENV_DIR)/bin/activate; ./tools/log_watcher.py 10 - -.PHONY: pylint -pylint: requirements .pylint - -.PHONY: configgen -configgen: requirements .configgen - -.PHONY: .configgen -.configgen: - @echo - @echo "================== config gen ====================" - @echo - echo "# Sample config which contains all the available options which the corresponding descriptions" > conf/st2.conf.sample; - echo "# Note: This file is automatically generated using tools/config_gen.py - DO NOT UPDATE MANUALLY" >> conf/st2.conf.sample - echo "" >> conf/st2.conf.sample - . $(VIRTUALENV_DIR)/bin/activate; python ./tools/config_gen.py >> conf/st2.conf.sample; - -.PHONY: .pylint -.pylint: - @echo - @echo "================== pylint ====================" - @echo - # Lint st2 components - @for component in $(COMPONENTS); do\ - echo "==========================================================="; \ - echo "Running pylint on" $$component; \ - echo "==========================================================="; \ - . $(VIRTUALENV_DIR)/bin/activate ; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models --load-plugins=pylint_plugins.db_models $$component/$$component || exit 1; \ - done - # Lint runner modules and packages - @for component in $(COMPONENTS_RUNNERS); do\ - echo "==========================================================="; \ - echo "Running pylint on" $$component; \ - echo "==========================================================="; \ - . $(VIRTUALENV_DIR)/bin/activate ; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models --load-plugins=pylint_plugins.db_models $$component/*.py || exit 1; \ - done - # Lint Python pack management actions - . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models contrib/packs/actions/*.py || exit 1; - . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models contrib/packs/actions/*/*.py || exit 1; - # Lint other packs - . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models contrib/linux/*/*.py || exit 1; - . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models contrib/chatops/*/*.py || exit 1; - # Lint Python scripts - . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models scripts/*.py || exit 1; - . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models tools/*.py || exit 1; - . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc pylint_plugins/*.py || exit 1; - -.PHONY: lint-api-spec -lint-api-spec: requirements .lint-api-spec - -.PHONY: .lint-api-spec -.lint-api-spec: - @echo - @echo "================== Lint API spec ====================" - @echo - . $(VIRTUALENV_DIR)/bin/activate; st2common/bin/st2-validate-api-spec --config-file conf/st2.dev.conf - -.PHONY: generate-api-spec -generate-api-spec: requirements .generate-api-spec - -.PHONY: .generate-api-spec -.generate-api-spec: .lint-api-spec - @echo - @echo "================== Generate openapi.yaml file ====================" - @echo - echo "# NOTE: This file is auto-generated - DO NOT EDIT MANUALLY" > st2common/st2common/openapi.yaml - echo "# Edit st2common/st2common/openapi.yaml.j2 and then run" >> st2common/st2common/openapi.yaml - echo "# make .generate-api-spec" >> st2common/st2common/openapi.yaml - echo "# to generate the final spec file" >> st2common/st2common/openapi.yaml - . $(VIRTUALENV_DIR)/bin/activate; st2common/bin/st2-generate-api-spec --config-file conf/st2.dev.conf >> st2common/st2common/openapi.yaml - -.PHONY: circle-lint-api-spec -circle-lint-api-spec: - @echo - @echo "================== Lint API spec ====================" - @echo - . $(VIRTUALENV_DIR)/bin/activate; st2common/bin/st2-validate-api-spec --config-file conf/st2.dev.conf || echo "Open API spec lint failed." - -.PHONY: flake8 -flake8: requirements .flake8 - -.PHONY: .flake8 -.flake8: - @echo - @echo "==================== flake ====================" - @echo - . $(VIRTUALENV_DIR)/bin/activate; flake8 --config ./lint-configs/python/.flake8 $(COMPONENTS) - . $(VIRTUALENV_DIR)/bin/activate; flake8 --config ./lint-configs/python/.flake8 $(COMPONENTS_RUNNERS) - . $(VIRTUALENV_DIR)/bin/activate; flake8 --config ./lint-configs/python/.flake8 contrib/packs/actions/ - . $(VIRTUALENV_DIR)/bin/activate; flake8 --config ./lint-configs/python/.flake8 contrib/linux - . $(VIRTUALENV_DIR)/bin/activate; flake8 --config ./lint-configs/python/.flake8 contrib/chatops/ - . $(VIRTUALENV_DIR)/bin/activate; flake8 --config ./lint-configs/python/.flake8 scripts/ - . $(VIRTUALENV_DIR)/bin/activate; flake8 --config ./lint-configs/python/.flake8 tools/ - . $(VIRTUALENV_DIR)/bin/activate; flake8 --config ./lint-configs/python/.flake8 pylint_plugins/ - -# Make task which verifies st2client installs and works fine -.PHONY: .st2client-install-check -.st2client-install-check: - @echo - @echo "==================== st2client install check ====================" - @echo - test -f $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate || virtualenv --python=$(PYTHON_VERSION) --no-site-packages $(VIRTUALENV_ST2CLIENT_DIR) --no-download - - # Setup PYTHONPATH in bash activate script... - # Delete existing entries (if any) - sed -i '/_OLD_PYTHONPATHp/d' $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate - sed -i '/PYTHONPATH=/d' $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate - sed -i '/export PYTHONPATH/d' $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate - - echo '_OLD_PYTHONPATH=$$PYTHONPATH' >> $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate - echo 'PYTHONPATH=${ROOT_DIR}:$(COMPONENT_PYTHONPATH)' >> $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate - echo 'export PYTHONPATH' >> $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate - touch $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate - chmod +x $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate - - $(VIRTUALENV_ST2CLIENT_DIR)/bin/pip install --upgrade "pip>=9.0,<9.1" - # NOTE We need to upgrade setuptools to avoid bug with dependency resolving in old versions - $(VIRTUALENV_ST2CLIENT_DIR)/bin/pip install --upgrade "setuptools==41.0.1" - $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate; cd st2client ; ../$(VIRTUALENV_ST2CLIENT_DIR)/bin/python setup.py install ; cd .. - $(VIRTUALENV_ST2CLIENT_DIR)/bin/st2 --version - $(VIRTUALENV_ST2CLIENT_DIR)/bin/python -c "import st2client" - -.PHONY: bandit -bandit: requirements .bandit - -.PHONY: .bandit -.bandit: - @echo - @echo "==================== bandit ====================" - @echo - . $(VIRTUALENV_DIR)/bin/activate; bandit -r $(COMPONENTS_WITH_RUNNERS) -lll -x build,dist - -.PHONY: lint -lint: requirements .lint - -.PHONY: .lint -.lint: .generate-api-spec .flake8 .pylint .st2client-dependencies-check .st2common-circular-dependencies-check .rst-check .st2client-install-check +all: invoke + @$(VIRTUALENV_DIR)/bin/invoke all .PHONY: clean -clean: .cleanpycs - -.PHONY: compile -compile: - @echo "======================= compile ========================" - @echo "------- Compile all .py files (syntax check test - Python 2) ------" - @if python -c 'import compileall,re; compileall.compile_dir(".", rx=re.compile(r"/virtualenv|virtualenv-osx|virtualenv-py3|.tox|.git|.venv-st2devbox"), quiet=True)' | grep .; then exit 1; else exit 0; fi - -.PHONY: compilepy3 -compilepy3: - @echo "======================= compile ========================" - @echo "------- Compile all .py files (syntax check test - Python 3) ------" - @if python3 -c 'import compileall,re; compileall.compile_dir(".", rx=re.compile(r"/virtualenv|virtualenv-osx|virtualenv-py3|.tox|.git|.venv-st2devbox|./st2tests/st2tests/fixtures/packs/test"), quiet=True)' | grep .; then exit 1; else exit 0; fi - -.PHONY: .cleanpycs -.cleanpycs: +clean: @echo "Removing all .pyc files" - find $(COMPONENTS_WITH_RUNNERS) -name \*.pyc -type f -print0 | xargs -0 -I {} rm {} - -.PHONY: .st2client-dependencies-check -.st2client-dependencies-check: - @echo "Checking for st2common imports inside st2client" - find ${ROOT_DIR}/st2client/st2client/ -name \*.py -type f -print0 | xargs -0 cat | grep st2common ; test $$? -eq 1 - -.PHONY: .st2common-circular-dependencies-check -.st2common-circular-dependencies-check: - @echo "Checking st2common for circular dependencies" - find ${ROOT_DIR}/st2common/st2common/ -name \*.py -type f -print0 | xargs -0 cat | grep st2reactor ; test $$? -eq 1 - find ${ROOT_DIR}/st2common/st2common/ \( -name \*.py ! -name runnersregistrar\.py -name \*.py ! -name compat\.py | -name inquiry\.py \) -type f -print0 | xargs -0 cat | grep st2actions ; test $$? -eq 1 - find ${ROOT_DIR}/st2common/st2common/ -name \*.py -type f -print0 | xargs -0 cat | grep st2api ; test $$? -eq 1 - find ${ROOT_DIR}/st2common/st2common/ -name \*.py -type f -print0 | xargs -0 cat | grep st2auth ; test $$? -eq 1 - find ${ROOT_DIR}/st2common/st2common/ -name \*.py -type f -print0 | xargs -0 cat | grep st2debug; test $$? -eq 1 - find ${ROOT_DIR}/st2common/st2common/ \( -name \*.py ! -name router\.py -name \*.py \) -type f -print0 | xargs -0 cat | grep st2stream; test $$? -eq 1 - find ${ROOT_DIR}/st2common/st2common/ -name \*.py -type f -print0 | xargs -0 cat | grep st2exporter; test $$? -eq 1 - -.PHONY: .cleanmongodb -.cleanmongodb: - @echo "==================== cleanmongodb ====================" - @echo "----- Dropping all MongoDB databases -----" - @sudo pkill -9 mongod - @sudo rm -rf /var/lib/mongodb/* - @sudo chown -R mongodb:mongodb /var/lib/mongodb/ - @sudo service mongodb start - @sleep 15 - @mongo --eval "rs.initiate()" - @sleep 15 - -.PHONY: .cleanmysql -.cleanmysql: - @echo "==================== cleanmysql ====================" - @echo "----- Dropping all Mistral MYSQL databases -----" - @mysql -uroot -pStackStorm -e "DROP DATABASE IF EXISTS mistral" - @mysql -uroot -pStackStorm -e "CREATE DATABASE mistral" - @mysql -uroot -pStackStorm -e "GRANT ALL PRIVILEGES ON mistral.* TO 'mistral'@'127.0.0.1' IDENTIFIED BY 'StackStorm'" - @mysql -uroot -pStackStorm -e "FLUSH PRIVILEGES" - @/opt/openstack/mistral/.venv/bin/python /opt/openstack/mistral/tools/sync_db.py --config-file /etc/mistral/mistral.conf - -.PHONY: .cleanrabbitmq -.cleanrabbitmq: - @echo "==================== cleanrabbitmq ====================" - @echo "Deleting all RabbitMQ queue and exchanges" - @sudo rabbitmqctl stop_app - @sudo rabbitmqctl reset - @sudo rabbitmqctl start_app - -.PHONY: .cleancoverage -.cleancoverage: - @echo "==================== cleancoverage ====================" - @echo "Removing all coverage results directories" - @echo - rm -rf .coverage $(COVERAGE_GLOBS) \ - .coverage.unit .coverage.integration .coverage.mistral + find . -name \*.pyc -type f -print0 | xargs -0 -I {} rm {} .PHONY: distclean distclean: clean @@ -432,9 +34,23 @@ distclean: clean @echo "==================== distclean ====================" @echo rm -rf $(VIRTUALENV_DIR) + if [ -d virtualenv-st2client ]; then rm -rf virtualenv-st2client; fi + if [ -d virtualenv-components ]; then rm -rf virtualenv-components; fi + +# Optional virtualenv wrapper +ifneq ($(VIRTUALENV_DIR),virtualenv) +.PHONY: virtualenv +virtualenv: $(VIRTUALENV_DIR) +endif + +.PHONY: $(VIRTUALENV_DIR) + # Note: We always want to update virtualenv/bin/activate file to make sure + # PYTHONPATH is up to date and to avoid caching issues on Travis +$(VIRTUALENV_DIR): + # Note: We pass --no-download flag to make sure version of pip which we install (9.0.1) is used + # instead of latest version being downloaded from PyPi + test -f $(VIRTUALENV_DIR)/bin/activate || virtualenv --python=$(PYTHON_VERSION) --no-site-packages $(VIRTUALENV_DIR) --no-download -.PHONY: requirements -requirements: virtualenv .sdist-requirements install-runners @echo @echo "==================== requirements ====================" @echo @@ -443,75 +59,16 @@ requirements: virtualenv .sdist-requirements install-runners $(VIRTUALENV_DIR)/bin/pip install --upgrade "pip>=19.0,<20.0" $(VIRTUALENV_DIR)/bin/pip install --upgrade "virtualenv==16.6.0" # Required for packs.install in dev envs - # Generate all requirements to support current CI pipeline. - $(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv,virtualenv-osx -s st2*/in-requirements.txt contrib/runners/*/in-requirements.txt -f fixed-requirements.txt -o requirements.txt - - # Generate finall requirements.txt file for each component - @for component in $(COMPONENTS_WITH_RUNNERS); do\ - echo "==========================================================="; \ - echo "Generating requirements.txt for" $$component; \ - echo "==========================================================="; \ - $(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv,virtualenv-osx -s $$component/in-requirements.txt -f fixed-requirements.txt -o $$component/requirements.txt; \ - done - - # Fix for Travis CI race - $(VIRTUALENV_DIR)/bin/pip install "six==1.12.0" - - # Fix for Travis CI caching issue - if [[ "$(TRAVIS_EVENT_TYPE)" != "" ]]; then\ - $(VIRTUALENV_DIR)/bin/pip uninstall -y "pytz" || echo "not installed"; \ - $(VIRTUALENV_DIR)/bin/pip uninstall -y "python-dateutil" || echo "not installed"; \ - $(VIRTUALENV_DIR)/bin/pip uninstall -y "orquesta" || echo "not installed"; \ - fi - - # Install requirements - # - for req in $(REQUIREMENTS); do \ - echo "Installing $$req..." ; \ - $(VIRTUALENV_DIR)/bin/pip install $(PIP_OPTIONS) -r $$req ; \ - done - - # Install st2common package to load drivers defined in st2common setup.py - # NOTE: We pass --no-deps to the script so we don't install all the - # package dependencies which are already installed as part of "requirements" - # make targets. This speeds up the build - (cd st2common; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) - - # Note: We install prance here and not as part of any component - # requirements.txt because it has a conflict with our dependency (requires - # new version of requests) which we cant resolve at this moment - $(VIRTUALENV_DIR)/bin/pip install "prance==0.15.0" - - # Install st2common to register metrics drivers - # NOTE: We pass --no-deps to the script so we don't install all the - # package dependencies which are already installed as part of "requirements" - # make targets. This speeds up the build - (cd ${ROOT_DIR}/st2common; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) - - # Some of the tests rely on submodule so we need to make sure submodules are check out - git submodule update --recursive --remote - -.PHONY: virtualenv - # Note: We always want to update virtualenv/bin/activate file to make sure - # PYTHONPATH is up to date and to avoid caching issues on Travis -virtualenv: - @echo - @echo "==================== virtualenv ====================" - @echo - # Note: We pass --no-download flag to make sure version of pip which we install (9.0.1) is used - # instead of latest version being downloaded from PyPi - test -f $(VIRTUALENV_DIR)/bin/activate || virtualenv --python=$(PYTHON_VERSION) --no-site-packages $(VIRTUALENV_DIR) --no-download - # Setup PYTHONPATH in bash activate script... # Delete existing entries (if any) ifeq ($(OS),Darwin) echo 'Setting up virtualenv on $(OS)...' - sed -i '' '/_OLD_PYTHONPATHp/d' $(VIRTUALENV_DIR)/bin/activate + sed -i '' '/_OLD_PYTHONPATH/d' $(VIRTUALENV_DIR)/bin/activate sed -i '' '/PYTHONPATH=/d' $(VIRTUALENV_DIR)/bin/activate sed -i '' '/export PYTHONPATH/d' $(VIRTUALENV_DIR)/bin/activate else echo 'Setting up virtualenv on $(OS)...' - sed -i '/_OLD_PYTHONPATHp/d' $(VIRTUALENV_DIR)/bin/activate + sed -i '/_OLD_PYTHONPATH/d' $(VIRTUALENV_DIR)/bin/activate sed -i '/PYTHONPATH=/d' $(VIRTUALENV_DIR)/bin/activate sed -i '/export PYTHONPATH/d' $(VIRTUALENV_DIR)/bin/activate endif @@ -522,504 +79,27 @@ endif echo 'export PYTHONPATH' >> $(VIRTUALENV_DIR)/bin/activate touch $(VIRTUALENV_DIR)/bin/activate - # Setup PYTHONPATH in fish activate script... - #echo '' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo 'set -gx _OLD_PYTHONPATH $$PYTHONPATH' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo 'set -gx PYTHONPATH $$_OLD_PYTHONPATH $(COMPONENT_PYTHONPATH)' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo 'functions -c deactivate old_deactivate' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo 'function deactivate' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo ' if test -n $$_OLD_PYTHONPATH' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo ' set -gx PYTHONPATH $$_OLD_PYTHONPATH' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo ' set -e _OLD_PYTHONPATH' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo ' end' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo ' old_deactivate' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo ' functions -e old_deactivate' >> $(VIRTUALENV_DIR)/bin/activate.fish - #echo 'end' >> $(VIRTUALENV_DIR)/bin/activate.fish - #touch $(VIRTUALENV_DIR)/bin/activate.fish - -.PHONY: tests -tests: pytests - -.PHONY: pytests -pytests: compile requirements .flake8 .pylint .pytests-coverage - -.PHONY: .pytests -.pytests: compile .configgen .generate-api-spec .unit-tests clean - -.PHONY: .pytests-coverage -.pytests-coverage: .unit-tests-coverage-html clean - -.PHONY: unit-tests -unit-tests: requirements .unit-tests - -.PHONY: .unit-tests -.unit-tests: - @echo - @echo "==================== tests ====================" - @echo - @echo "----- Dropping st2-test db -----" - @mongo st2-test --eval "db.dropDatabase();" - @for component in $(COMPONENTS_TEST); do\ - echo "==========================================================="; \ - echo "Running tests in" $$component; \ - echo "-----------------------------------------------------------"; \ - . $(VIRTUALENV_DIR)/bin/activate; \ - nosetests $(NOSE_OPTS) -s -v \ - $$component/tests/unit || exit 1; \ - echo "-----------------------------------------------------------"; \ - echo "Done running tests in" $$component; \ - echo "==========================================================="; \ - done - -.PHONY: .run-unit-tests-coverage -ifdef INCLUDE_TESTS_IN_COVERAGE -.run-unit-tests-coverage: NOSE_COVERAGE_PACKAGES := $(NOSE_COVERAGE_PACKAGES),tests.unit -endif -.run-unit-tests-coverage: - @echo - @echo "==================== unit tests with coverage ====================" - @echo - @echo "----- Dropping st2-test db -----" - @mongo st2-test --eval "db.dropDatabase();" - for component in $(COMPONENTS_TEST_WITHOUT_MISTRAL_RUNNER); do\ - echo "==========================================================="; \ - echo "Running tests in" $$component; \ - echo "-----------------------------------------------------------"; \ - . $(VIRTUALENV_DIR)/bin/activate; \ - COVERAGE_FILE=.coverage.unit.$$(echo $$component | tr '/' '.') \ - nosetests $(NOSE_OPTS) -s -v $(NOSE_COVERAGE_FLAGS) \ - $(NOSE_COVERAGE_PACKAGES) \ - $$component/tests/unit || exit 1; \ - echo "-----------------------------------------------------------"; \ - echo "Done running tests in" $$component; \ - echo "==========================================================="; \ - done - -.PHONY: .combine-unit-tests-coverage -.combine-unit-tests-coverage: .run-unit-tests-coverage - @if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \ - . $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \ - coverage combine .coverage.unit.*; \ - fi - -.coverage.unit: - @if compgen -G '.coverage.unit.*'; then \ - for coverage_result in $$(compgen -G '.coverage.unit.*'); do \ - echo "Combining data from $${coverage_result}"; \ - . $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \ - coverage combine $${coverage_result}; \ - done; \ - else \ - echo "Running unit tests"; \ - make .combine-unit-tests-coverage; \ - fi - -.PHONY: .report-unit-tests-coverage -.report-unit-tests-coverage: .coverage.unit - @if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \ - . $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \ - coverage report; \ - fi - -.PHONY: .unit-tests-coverage-html -.unit-tests-coverage-html: .coverage.unit - @if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \ - . $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \ - coverage html; \ - fi - -.PHONY: itests -itests: requirements .itests - -.PHONY: .itests -.itests: - @echo - @echo "==================== integration tests ====================" - @echo - @echo "----- Dropping st2-test db -----" - @mongo st2-test --eval "db.dropDatabase();" - @for component in $(COMPONENTS_TEST); do\ - echo "==========================================================="; \ - echo "Running tests in" $$component; \ - echo "-----------------------------------------------------------"; \ - . $(VIRTUALENV_DIR)/bin/activate; \ - nosetests $(NOSE_OPTS) -s -v \ - $$component/tests/integration || exit 1; \ - echo "-----------------------------------------------------------"; \ - echo "Done running tests in" $$component; \ - echo "==========================================================="; \ - done - -.PHONY: .run-integration-tests-coverage -ifdef INCLUDE_TESTS_IN_COVERAGE -.run-integration-tests-coverage: NOSE_COVERAGE_PACKAGES := $(NOSE_COVERAGE_PACKAGES),tests.integration -endif -.run-integration-tests-coverage: - @echo - @echo "================ integration tests with coverage ================" - @echo - @echo "----- Dropping st2-test db -----" - @mongo st2-test --eval "db.dropDatabase();" - @for component in $(COMPONENTS_TEST); do\ - echo "==========================================================="; \ - echo "Running tests in" $$component; \ - echo "-----------------------------------------------------------"; \ - . $(VIRTUALENV_DIR)/bin/activate; \ - COVERAGE_FILE=.coverage.integration.$$(echo $$component | tr '/' '.') \ - nosetests $(NOSE_OPTS) -s -v --exe $(NOSE_COVERAGE_FLAGS) \ - $(NOSE_COVERAGE_PACKAGES) \ - $$component/tests/integration || exit 1; \ - echo "-----------------------------------------------------------"; \ - echo "Done running tests in" $$component; \ - echo "==========================================================="; \ - done - @echo - @echo "============== runners integration tests with coverage ==============" - @echo - @echo "The tests assume st2 is running on 127.0.0.1." - @for component in $(COMPONENTS_RUNNERS); do\ - echo "==========================================================="; \ - echo "Running tests in" $$component; \ - echo "==========================================================="; \ - . $(VIRTUALENV_DIR)/bin/activate; \ - COVERAGE_FILE=.coverage.integration.$$(echo $$component | tr '/' '.') \ - nosetests $(NOSE_OPTS) -s -v \ - $(NOSE_COVERAGE_FLAGS) $(NOSE_COVERAGE_PACKAGES) $$component/tests/integration || exit 1; \ - done - @echo - @echo "==================== Orquesta integration tests with coverage (HTML reports) ====================" - @echo "The tests assume st2 is running on 127.0.0.1." - @echo - . $(VIRTUALENV_DIR)/bin/activate; \ - COVERAGE_FILE=.coverage.integration.orquesta \ - nosetests $(NOSE_OPTS) -s -v \ - $(NOSE_COVERAGE_FLAGS) $(NOSE_COVERAGE_PACKAGES) st2tests/integration/orquesta || exit 1; \ - - -.PHONY: .combine-integration-tests-coverage -.combine-integration-tests-coverage: .run-integration-tests-coverage - @if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \ - . $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \ - coverage combine .coverage.integration.*; \ - fi - -.coverage.integration: - @if compgen -G '.coverage.integration.*'; then \ - for coverage_result in $$(compgen -G '.coverage.integration.*'); do \ - echo "Combining data from $${coverage_result}"; \ - . $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \ - coverage combine $${coverage_result}; \ - done; \ - else \ - echo "Running integration tests"; \ - make .combine-integration-tests-coverage; \ - fi - -.PHONY: .report-integration-tests-coverage -.report-integration-tests-coverage: .coverage.integration - @if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \ - . $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \ - coverage report; \ - fi - -.PHONY: .integration-tests-coverage-html -.integration-tests-coverage-html: .coverage.integration - @if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \ - . $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \ - coverage html; \ - fi - -.PHONY: .itests-coverage-html -.itests-coverage-html: .integration-tests-coverage-html - -.PHONY: mistral-itests -mistral-itests: requirements .mistral-itests - -.PHONY: .mistral-itests -.mistral-itests: - @echo - @echo "==================== MISTRAL integration tests ====================" - @echo "The tests assume both st2 and mistral are running on 127.0.0.1." - @echo - . $(VIRTUALENV_DIR)/bin/activate; nosetests $(NOSE_OPTS) -s -v st2tests/integration/mistral || exit 1; - -.PHONY: .run-mistral-itests-coverage -ifdef INCLUDE_TESTS_IN_COVERAGE -.run-mistral-itests-coverage: NOSE_COVERAGE_PACKAGES := $(NOSE_COVERAGE_PACKAGES),st2tests.mistral.integration -endif -.run-mistral-itests-coverage: - @echo - @echo "==================== MISTRAL integration tests with coverage ====================" - @echo "The tests assume both st2 and mistral are running on 127.0.0.1." - @echo - . $(VIRTUALENV_DIR)/bin/activate; \ - COVERAGE_FILE=.coverage.mistral.integration \ - nosetests $(NOSE_OPTS) -s -v $(NOSE_COVERAGE_FLAGS) \ - $(NOSE_COVERAGE_PACKAGES) \ - st2tests/integration/mistral || exit 1; - -.coverage.mistral.integration: - if [ ! -e .coverage.mistral.integration ]; then \ - make .run-mistral-itests-coverage; \ - fi - -.PHONY: .mistral-itests-coverage-html -.mistral-itests-coverage-html: .coverage.mistral.integration - . $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.mistral.integration coverage html - -.PHONY: .coverage-combine -.coverage-combine: .run-unit-tests-coverage .run-integration-tests-coverage .run-mistral-itests-coverage - . $(VIRTUALENV_DIR)/bin/activate; coverage combine $(COVERAGE_GLOBS) - -# This is a real target, but we need to do our own make trickery in case some -# but not all of the prerequisites are available -.coverage: - @NUM_COVERAGE_RESULTS=0; \ - for coverage_result in $$( \ - for coverage_glob in $(COVERAGE_GLOBS_QUOTED); do \ - compgen -G $${coverage_glob}; \ - done; \ - ); do \ - NUM_COVERAGE_RESULTS=$$(( NUM_COVERAGE_RESULTS+1 )); \ - echo "Combining $${coverage_result}: $$NUM_COVERAGE_RESULTS"; \ - . $(VIRTUALENV_DIR)/bin/activate; coverage combine $${coverage_result}; \ - done; \ - if [ $${NUM_COVERAGE_RESULTS} -eq 0 ]; then \ - make .coverage-combine; \ - fi - -# @for coverage_result in $(COVERAGE_GLOBS); do \ -# [ -e $${coverage_result} ] || echo "$${coverage_result} does not exist." && continue; \ -# echo "Combining data from $${coverage_result}"; \ -# . $(VIRTUALENV_DIR)/bin/activate; coverage combine $${coverage_result}; \ -# done || \ -# (echo "Running .coverage-combine"; make .coverage-combine) - -.PHONY: .coverage-report -.coverage-report: .coverage - . $(VIRTUALENV_DIR)/bin/activate; coverage report - -.PHONY: .coverage-html -.coverage-html: .coverage - . $(VIRTUALENV_DIR)/bin/activate; coverage html - -.PHONY: orquesta-itests -orquesta-itests: requirements .orquesta-itests - -.PHONY: .orquesta-itests -.orquesta-itests: - @echo - @echo "==================== Orquesta integration tests ====================" - @echo "The tests assume st2 is running on 127.0.0.1." - @echo - . $(VIRTUALENV_DIR)/bin/activate; nosetests $(NOSE_OPTS) -s -v st2tests/integration/orquesta || exit 1; - -.PHONY: .orquesta-itests-coverage-html -.orquesta-itests-coverage-html: - @echo - @echo "==================== Orquesta integration tests with coverage (HTML reports) ====================" - @echo "The tests assume st2 is running on 127.0.0.1." - @echo - . $(VIRTUALENV_DIR)/bin/activate; nosetests $(NOSE_OPTS) -s -v --with-coverage \ - --cover-inclusive --cover-html st2tests/integration/orquesta || exit 1; - -.PHONY: packs-tests -packs-tests: requirements .packs-tests - -.PHONY: .packs-tests -.packs-tests: - @echo - @echo "==================== packs-tests ====================" - @echo - # Install st2common to register metrics drivers - (cd ${ROOT_DIR}/st2common; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) - . $(VIRTUALENV_DIR)/bin/activate; find ${ROOT_DIR}/contrib/* -maxdepth 0 -type d -print0 | xargs -0 -I FILENAME ./st2common/bin/st2-run-pack-tests -c -t -x -p FILENAME - - -.PHONY: runners-tests -runners-tests: requirements .runners-tests - -.PHONY: .runners-tests -.runners-tests: - @echo - @echo "==================== runners-tests ====================" - @echo - @echo "----- Dropping st2-test db -----" - @mongo st2-test --eval "db.dropDatabase();" - @for component in $(COMPONENTS_RUNNERS); do\ - echo "==========================================================="; \ - echo "Running tests in" $$component; \ - echo "==========================================================="; \ - . $(VIRTUALENV_DIR)/bin/activate; nosetests $(NOSE_OPTS) -s -v $$component/tests/unit || exit 1; \ - done - -.PHONY: runners-itests -runners-itests: requirements .runners-itests - -.PHONY: .runners-itests -.runners-itests: - @echo - @echo "==================== runners-itests ====================" - @echo - @echo "----- Dropping st2-test db -----" - @for component in $(COMPONENTS_RUNNERS); do\ - echo "==========================================================="; \ - echo "Running tests in" $$component; \ - echo "==========================================================="; \ - . $(VIRTUALENV_DIR)/bin/activate; nosetests $(NOSE_OPTS) -s -v $$component/tests/integration || exit 1; \ - done - -.PHONY: .runners-itests-coverage-html -.runners-itests-coverage-html: - @echo - @echo "============== runners-itests-coverage-html ==============" - @echo - @echo "The tests assume st2 is running on 127.0.0.1." - @for component in $(COMPONENTS_RUNNERS); do\ - echo "==========================================================="; \ - echo "Running tests in" $$component; \ - echo "==========================================================="; \ - . $(VIRTUALENV_DIR)/bin/activate; nosetests $(NOSE_OPTS) -s -v --with-coverage \ - --cover-inclusive --cover-html $$component/tests/integration || exit 1; \ - done - -.PHONY: cli -cli: - @echo - @echo "=================== Building st2 client ===================" - @echo - pushd $(CURDIR) && cd st2client && ((python setup.py develop || printf "\n\n!!! ERROR: BUILD FAILED !!!\n") || popd) - -.PHONY: rpms -rpms: - @echo - @echo "==================== rpm ====================" - @echo - rm -Rf ~/rpmbuild - $(foreach COM,$(COMPONENTS), pushd $(COM); make rpm; popd;) - pushd st2client && make rpm && popd - -rhel-rpms: - @echo - @echo "==================== rpm ====================" - @echo - rm -Rf ~/rpmbuild - $(foreach COM,$(COMPONENTS), pushd $(COM); make rhel-rpm; popd;) - pushd st2client && make rhel-rpm && popd - -.PHONY: debs -debs: - @echo - @echo "==================== deb ====================" - @echo - rm -Rf ~/debbuild - $(foreach COM,$(COMPONENTS), pushd $(COM); make deb; popd;) - pushd st2client && make deb && popd - -# >>>> -.PHONY: .sdist-requirements -.sdist-requirements: - # Copy over shared dist utils module which is needed by setup.py - @for component in $(COMPONENTS_WITH_RUNNERS); do\ - cp -f ./scripts/dist_utils.py $$component/dist_utils.py;\ - scripts/write-headers.sh $$component/dist_utils.py || break;\ - done - - # Copy over CHANGELOG.RST, CONTRIBUTING.RST and LICENSE file to each component directory - #@for component in $(COMPONENTS_TEST); do\ - # test -s $$component/README.rst || cp -f README.rst $$component/; \ - # cp -f CONTRIBUTING.rst $$component/; \ - # cp -f LICENSE $$component/; \ - #done - - -.PHONY: ci -ci: ci-checks ci-unit ci-integration ci-mistral ci-packs-tests - -.PHONY: ci-checks -ci-checks: compile .generated-files-check .pylint .flake8 check-requirements .st2client-dependencies-check .st2common-circular-dependencies-check circle-lint-api-spec .rst-check .st2client-install-check check-python-packages - -.PHONY: ci-py3-unit -ci-py3-unit: - @echo - @echo "==================== ci-py3-unit ====================" - @echo - NOSE_WITH_TIMER=$(NOSE_WITH_TIMER) tox -e py36-unit -vv - NOSE_WITH_TIMER=$(NOSE_WITH_TIMER) tox -e py36-packs -vv - -.PHONY: ci-py3-unit-nightly -ci-py3-unit-nightly: - @echo - @echo "==================== ci-py3-unit ====================" - @echo - NOSE_WITH_TIMER=$(NOSE_WITH_TIMER) tox -e py36-unit-nightly -vv - -.PHONY: ci-py3-integration -ci-py3-integration: requirements .ci-prepare-integration .ci-py3-integration - -.PHONY: .ci-py3-integration -.ci-py3-integration: - @echo - @echo "==================== ci-py3-integration ====================" - @echo - NOSE_WITH_TIMER=$(NOSE_WITH_TIMER) tox -e py36-integration -vv - -.PHONY: .rst-check -.rst-check: - @echo - @echo "==================== rst-check ====================" - @echo - . $(VIRTUALENV_DIR)/bin/activate; rstcheck --report warning CHANGELOG.rst - -.PHONY: .generated-files-check -.generated-files-check: - # Verify that all the files which are automatically generated have indeed been re-generated and - # committed - @echo "==================== generated-files-check ====================" - - # 1. Sample config - conf/st2.conf.sample - cp conf/st2.conf.sample /tmp/st2.conf.sample.upstream - make .configgen - diff conf/st2.conf.sample /tmp/st2.conf.sample.upstream || (echo "conf/st2.conf.sample hasn't been re-generated and committed. Please run \"make configgen\" and include and commit the generated file." && exit 1) - # 2. OpenAPI definition file - st2common/st2common/openapi.yaml (generated from - # st2common/st2common/openapi.yaml.j2) - cp st2common/st2common/openapi.yaml /tmp/openapi.yaml.upstream - make .generate-api-spec - diff st2common/st2common/openapi.yaml /tmp/openapi.yaml.upstream || (echo "st2common/st2common/openapi.yaml hasn't been re-generated and committed. Please run \"make generate-api-spec\" and include and commit the generated file." && exit 1) - - @echo "All automatically generated files are up to date." - -.PHONY: ci-unit -ci-unit: .unit-tests-coverage-html - -.PHONY: ci-unit-nightly -ci-unit-nightly: - # NOTE: We run mistral runner checks only as part of a nightly build to speed up - # non nightly builds (Mistral will be deprecated in the future) - @echo - @echo "============== ci-unit-nightly ==============" - @echo - . $(VIRTUALENV_DIR)/bin/activate; nosetests $(NOSE_OPTS) -s -v contrib/runners/mistral_v2/tests/unit - -.PHONY: .ci-prepare-integration -.ci-prepare-integration: - sudo -E ./scripts/travis/prepare-integration.sh - -.PHONY: ci-integration -ci-integration: .ci-prepare-integration .itests-coverage-html - -.PHONY: ci-runners -ci-runners: .ci-prepare-integration .runners-itests-coverage-html - -.PHONY: .ci-prepare-mistral -.ci-prepare-mistral: - sudo -E ./scripts/travis/setup-mistral.sh - -.PHONY: ci-mistral -ci-mistral: .ci-prepare-integration .ci-prepare-mistral .mistral-itests-coverage-html - -.PHONY: ci-orquesta -ci-orquesta: .ci-prepare-integration .orquesta-itests-coverage-html - -.PHONY: ci-packs-tests -ci-packs-tests: .packs-tests +virtualenv-components: + virtualenv --python=$(PYTHON_VERSION) --no-site-packages $@ --no-download + +virtualenv-st2client: + virtualenv --python=$(PYTHON_VERSION) --no-site-packages $@ --no-download + +$(VIRTUALENV_DIR)/bin/invoke: $(VIRTUALENV_DIR) + . $(VIRTUALENV_DIR)/bin/activate && pip install invoke + +.PHONY: invoke +invoke: $(VIRTUALENV_DIR)/bin/invoke + +# https://stackoverflow.com/a/33018558 +# Workaround to support all previous make targets +# This default target simply passes all targets on to invoke +# We can't add invoke as a make dependency for the .DEFAULT target since the +# dependency will get overridden by whatever target is passed in +.DEFAULT: + @# Manually make virtualenv target + if [ ! -d $(VIRTUALENV_DIR) ]; then make virtualenv; fi + @# Manually make invoke target + if [ ! -e $(VIRTUALENV_DIR)/bin/invoke ]; then make invoke; fi + . $(VIRTUALENV_DIR)/bin/activate && invoke $@ + @#. $(VIRTUALENV_DIR)/bin/activate && echo $$PYTHONPATH diff --git a/scripts/travis/prepare-integration.sh b/scripts/travis/prepare-integration.sh index 9edfc74003..d4cdfe1e1e 100755 --- a/scripts/travis/prepare-integration.sh +++ b/scripts/travis/prepare-integration.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -set -e +set -x if [ "$(whoami)" != 'root' ]; then echo 'Please run with sudo' @@ -20,8 +20,13 @@ st2 --version # This script runs as root on Travis which means other processes which don't run # as root can't write to logs/ directory and tests fail -chmod 777 logs/ -chmod 777 logs/* +# This _seems_ to only be used by Mistral, which we are in the process of +# removing, so we either need to create the directory here if it doesn't exist, +# or we need to not bother with this if they don't already exist. +if [[ -d logs ]]; then + chmod 777 logs/ + chmod 777 logs/* +fi # Workaround for Travis on Ubuntu Xenial so local runner integration tests work # when executing them under user "stanley" (by default Travis checks out the diff --git a/scripts/travis/setup-mistral.sh b/scripts/travis/setup-mistral.sh index 66fe35c1ae..b926ba58df 100755 --- a/scripts/travis/setup-mistral.sh +++ b/scripts/travis/setup-mistral.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -set -e +set -ex if [ "$(whoami)" != 'root' ]; then echo 'Please run with sudo' diff --git a/tasks/__init__.py b/tasks/__init__.py new file mode 100644 index 0000000000..4edd7f3091 --- /dev/null +++ b/tasks/__init__.py @@ -0,0 +1,407 @@ +import os + +from invoke import Collection, task, run + +import build +import check as check_tasks +import ci as ci_tasks +import clean as clean_tasks +import generate +import git_tasks +import lint as lint_tasks +import requirements as requirements_tasks +import test + + + +# All tasks are implemented in submodules of this package +# All tasks in this module are only for reverse compatibility with the original +# Makefile + +# This task aliases a Python built-in +@task(requirements_tasks.requirements, generate.config, check_tasks.check, test.pytests) +def all_(ctx): + pass + + +@task +def play(ctx): + ''' + Print out environment variables used by invoke + ''' + # Since invoke tasks can accept arguments, this isn't as necessary as it + # was in the Makefile. However, we still use it in Travis tests, so we + # replicate it here as well, but we only print out environment variables + # that we use. Most environment variables used in the Makefile. + print('TRAVIS_PULL_REQUEST: {}'.format(os.environ.get('TRAVIS_PULL_REQUEST'))) + print('') + print('TRAVIS_EVENT_TYPE: {}'.format(os.environ.get('TRAVIS_EVENT_TYPE'))) + print('') + print('NOSE_OPTS: {}'.format(os.environ.get('NOSE_OPTS'))) + print('') + print('NOSE_COVERAGE_FLAGS: {}'.format(os.environ.get('NOSE_COVERAGE_FLAGS'))) + print('') + print('NOSE_COVERAGE_PACKAGES: {}'.format(os.environ.get('NOSE_COVERAGE_PACKAGES'))) + print('') + print('ST2_PIP_OPTIONS: {}'.format(os.environ.get('ST2_PIP_OPTIONS'))) + print('') + print('PYLINT_CONCURRENCY: {}'.format(os.environ.get('PYLINT_CONCURRENCY'), '1')) + print('') + + +@task(check_tasks.check) +def check(ctx): + pass + + +@task(requirements_tasks.install.runners) +def install_runners(ctx): + pass + + +@task(check_tasks.requirements) +def check_requirements(ctx): + pass + + +@task(check_tasks.python_packages) +def check_python_packages(ctx): + pass + + +@task(check_tasks.python_packages_nightly) +def check_python_packages_nightly(ctx): + pass + + +@task(ci_tasks.checks_nightly) +def ci_checks_nightly(ctx): + pass + + +@task(check_tasks.logs) +def checklogs(ctx): + pass + + +@task(generate.config, aliases=('.configgen',)) +def configgen(ctx): + pass + + +@task(lint_tasks.pylint, aliases=('.pylint',)) +def pylint(ctx): + pass + + +@task(lint_tasks.api_spec, aliases=('.lint_api_spec',)) +def lint_api_spec(ctx): + pass + + +@task(generate.api_spec, aliases=('.generate_api_spec',)) +def generate_api_spec(ctx): + pass + + +@task(lint_tasks.circle_api_spec) +def circle_lint_api_spec(ctx): + pass + + +@task(lint_tasks.flake8, aliases=('.flake8',)) +def flake8(ctx): + pass + + +@task(lint_tasks.lint, aliases=('.lint',)) +def lint(ctx): + pass + + +@task(check_tasks.st2client_install, aliases=('.st2client_install_check',)) +def st2client_install_check(ctx): + pass + + +@task(check_tasks.bandit, aliases=('.bandit',)) +def bandit(ctx): + pass + + +@task(clean_tasks.pycs, aliases=('.cleanpycs',)) +def cleanpycs(ctx): + pass + + +@task(clean_tasks.clean) +def clean(ctx): + pass + + +@task(check_tasks.compile_) +def compile_(ctx): + pass + + +@task(check_tasks.compilepy3) +def compilepy3(ctx): + pass + + +@task(check_tasks.st2client_dependencies, aliases=('.st2client_dependencies_check',)) +def st2client_dependencies_check(ctx): + pass + + +@task(check_tasks.st2common_circular_dependencies, aliases=('.st2common_circular_dependencies_check',)) +def st2common_circular_dependencies(ctx): + pass + + +@task(clean_tasks.mongodb, aliases=('.cleanmongodb',)) +def cleanmongodb(ctx): + pass + + +@task(clean_tasks.mysql, aliases=('.cleanmysql',)) +def cleanmysql(ctx): + pass + + +@task(clean_tasks.rabbitmq, aliases=('.cleanrabbitmq',)) +def cleanrabbitmq(ctx): + pass + + +@task(clean_tasks.coverage, aliases=('.cleancoverage',)) +def cleancoverage(ctx): + pass + + +@task(requirements_tasks.requirements) +def requirements(ctx): + pass + + +@task(test.pytests, aliases=('.pytests',)) +def pytests(ctx): + pass + + +@task(test.pytests) +def tests(ctx): + pass + + +@task(test.unit, aliases=('.unit_tests',)) +def unit_tests(ctx, coverage=False, nose_opts=None): + pass + + +@task(test.integration, aliases=('itests', '.itests',)) +def integration_tests(ctx, coverage=False, nose_opts=None): + pass + + +@task(test.mistral, aliases=('.mistral-itests',)) +def mistral_itests(ctx, coverage=False, nose_opts=None): + pass + + +@task(test.orquesta, aliases=('.orquesta-itests',)) +def orquesta_itests(ctx, coverage=False, nose_opts=None): + pass + + +@task(test.packs, aliases=('.packs-tests',)) +def packs_tests(ctx, coverage=False, nose_opts=None): + pass + + +@task(test.runners_unit, aliases=('.runners-tests',)) +def runners_tests(ctx, coverage=False, nose_opts=None): + pass + + +@task(test.runners_integration, aliases=('.runners-itests',)) +def runners_itests(ctx, coverage=False, nose_opts=None): + pass + + +@task(build.cli) +def cli(ctx): + pass + + +@task(build.rpms) +def rpms(ctx): + pass + + +@task(build.rhel_rpms) +def rhel_rpms(ctx): + pass + + +@task(build.debs) +def debs(ctx): + pass + + +@task(requirements_tasks.sdist, aliases=('.sdist-requirements',)) +def sdist_requirements(ctx): + pass + + +@task(ci_tasks.ci) +def ci(ctx): + pass + + +@task(ci_tasks.checks) +def ci_checks(ctx): + pass + + +@task(ci_tasks.py3_unit) +def ci_py3_unit(ctx): + pass + + +@task(ci_tasks.py3_unit_nightly) +def ci_py3_unit_nightly(ctx): + pass + + +@task(ci_tasks.py3_integration, aliases=('.ci_py3_integration',)) +def ci_py3_integration(ctx): + pass + + +@task(check_tasks.rst, aliases=('.rst_check',)) +def rst_check(ctx): + pass + + +@task(check_tasks.generated_files, aliases=('.generated_files_check',)) +def generated_files_check(ctx): + pass + + +@task(ci_tasks.unit) +def ci_unit(ctx): + pass + + +@task(ci_tasks.unit_nightly) +def ci_unit_nightly(ctx, coverage=False, nose_opts=None): + pass + + +@task(ci_tasks.prepare_integration, aliases=('.ci_prepare_integration',)) +def ci_prepare_integration(ctx, coverage=False, nose_opts=None): + pass + + +@task(ci_tasks.integration) +def ci_integration(ctx, coverage=False, nose_opts=None): + pass + + +@task(ci_tasks.runners) +def ci_runners(ctx, coverage=False, nose_opts=None): + pass + + +@task(ci_tasks.prepare_mistral, aliases=('.ci-prepare-mistral',)) +def ci_prepare_mistral(ctx): + pass + + +@task(ci_tasks.mistral) +def ci_mistral(ctx, coverage=False, nose_opts=None): + pass + + +@task(ci_tasks.orquesta) +def ci_orquesta(ctx, coverage=False, nose_opts=None): + pass + + +@task(ci_tasks.packs_tests) +def ci_packs_tests(ctx): + pass + + +namespace = Collection() + +namespace.add_task(all_, name='all') +namespace.add_task(play) +namespace.add_task(check) +namespace.add_task(install_runners) +namespace.add_task(check_requirements) +namespace.add_task(check_python_packages) +namespace.add_task(check_python_packages_nightly) +namespace.add_task(ci_checks_nightly) +namespace.add_task(checklogs) +namespace.add_task(configgen) +namespace.add_task(pylint) +namespace.add_task(lint_api_spec) +namespace.add_task(generate_api_spec) +namespace.add_task(circle_lint_api_spec) +namespace.add_task(flake8) +namespace.add_task(lint) +namespace.add_task(st2client_install_check) +namespace.add_task(bandit) +namespace.add_task(cleanpycs) +namespace.add_task(clean) +namespace.add_task(compile_, name='compile') +namespace.add_task(compilepy3) +namespace.add_task(st2client_dependencies_check) +namespace.add_task(st2common_circular_dependencies) +namespace.add_task(cleanmongodb) +namespace.add_task(cleanmysql) +namespace.add_task(cleanrabbitmq) +namespace.add_task(cleancoverage) +namespace.add_task(requirements) +namespace.add_task(pytests) +namespace.add_task(tests) +namespace.add_task(unit_tests) +namespace.add_task(integration_tests) +namespace.add_task(mistral_itests) +namespace.add_task(orquesta_itests) +namespace.add_task(packs_tests) +namespace.add_task(runners_tests) +namespace.add_task(runners_itests) +namespace.add_task(cli) +namespace.add_task(rpms) +namespace.add_task(rhel_rpms) +namespace.add_task(debs) +namespace.add_task(sdist_requirements) +namespace.add_task(ci) +namespace.add_task(ci_checks) +namespace.add_task(ci_py3_unit) +namespace.add_task(ci_py3_unit_nightly) +namespace.add_task(ci_py3_integration) +namespace.add_task(rst_check) +namespace.add_task(generated_files_check) +namespace.add_task(ci_unit) +namespace.add_task(ci_unit_nightly) +namespace.add_task(ci_prepare_integration) +namespace.add_task(ci_integration) +namespace.add_task(ci_runners) +namespace.add_task(ci_prepare_mistral) +namespace.add_task(ci_mistral) +namespace.add_task(ci_orquesta) +namespace.add_task(ci_packs_tests) + +# Once we transition to invoke, we can switch to calling tasks directly +# namespace.add_collection(build) +# namespace.add_collection(check_tasks) +# namespace.add_collection(ci_tasks) +# namespace.add_collection(clean_tasks) +# namespace.add_collection(generate) +# namespace.add_collection(lint_tasks) +# namespace.add_collection(test) +# namespace.add_collection(requirements_tasks, name='requirements') diff --git a/tasks/build.py b/tasks/build.py new file mode 100644 index 0000000000..ef07b0a1ba --- /dev/null +++ b/tasks/build.py @@ -0,0 +1,69 @@ +from __future__ import print_function + +import glob +import sys + +from invoke import exceptions, run, task + + +@task +def cli(ctx): + print("") + print("=================== Building st2 client ===================") + print("") + with ctx.cd('st2client'): + try: + ctx.run("python setup.py develop") + except exceptions.Failure as e: + print("!!! ERROR: BUILD FAILED !!!\n", file=sys.stderr) + + +@task +def rpms(ctx): + print("") + print("==================== rpm ====================") + print("") + run("rm -Rf ~/rpmbuild") + for component in list(set(glob.glob("st2*")) - set(glob.glob("*.egg-info")) - set(['st2tests', 'st2exporter'])): + with ctx.cd(component): + try: + ctx.run("make rpm") + except exceptions.Failure as e: + raise e + break + with ctx.cd("st2client"): + run("make rpm") + + +@task +def rhel_rpms(ctx): + print("") + print("==================== rpm ====================") + print("") + run("rm -Rf ~/rpmbuild") + for component in list(set(glob.glob("st2*")) - set(glob.glob("*.egg-info")) - set(['st2tests', 'st2exporter'])): + with ctx.cd(component): + try: + ctx.run("make rhel-rpm") + except exceptions.Failure as e: + raise e + break + with ctx.cd("st2client"): + ctx.run("make rhel-rpm") + + +@task +def debs(ctx): + print("") + print("==================== deb ====================") + print("") + run("rm -Rf ~/debbuild") + for component in list(set(glob.glob("st2*")) - set(glob.glob("*.egg-info")) - set(['st2tests', 'st2exporter'])): + with ctx.cd(component): + try: + ctx.run("make deb") + except exceptions.Failure as e: + raise e + break + with ctx.cd('st2client'): + ctx.run("make deb") diff --git a/tasks/check.py b/tasks/check.py new file mode 100644 index 0000000000..ced793feda --- /dev/null +++ b/tasks/check.py @@ -0,0 +1,232 @@ +from __future__ import print_function + +import compileall +import fnmatch +import glob +import modulefinder +import os +import re +import sys + +from invoke import exceptions, run, task + +import generate +import requirements as requirements_tasks + + +@task(requirements_tasks.requirements) +def requirements(ctx): + ''' + Update requirements and then make sure no files were changed + ''' + print("") + print("============== CHECKING REQUIREMENTS ==============") + print("") + # Update requirements and then make sure no files were changed + # run("git status -- *requirements.txt */*requirements.txt | grep -q \"nothing to commit\"") + run("git status -- requirements.txt test-requirements.txt */*requirements.txt | grep -q \"nothing to commit\"") + print("All requirements files up-to-date!") + + +@task +def logs(ctx): + ''' + Summarize statistics for ST2 logs + ''' + print("") + print("================== LOG WATCHER ====================") + print("") + from tools import log_watcher + log_watcher.main(['tools/log_watcher.py', '10']) + + +# The original make target also depended upon lint.flake8, but that causes +# an import cycle, so we skip it +@task(requirements, logs, default=True) +def check(ctx): + pass + + +@task(requirements_tasks.requirements) +def bandit(ctx): + print("") + print("==================== bandit ====================") + print("") + for component in list((set(glob.glob("st2*")) + | set(glob.glob("contrib/runners/*"))) + - set(glob.glob("*.egg-info")) + - set(['st2tests', 'st2exporter'])): + run("bandit -r {component} -lll -x build,dist".format(component=component)) + + +@task(requirements_tasks.requirements) +def compile_(ctx): + print("") + print("======================= compile ========================") + print("") + print("------- Compile all .py files (syntax check test - Python 2) ------") + compileall.compile_dir(".", rx=re.compile(r"/virtualenv|virtualenv-osx|virtualenv-py3|.tox|.git|.venv-st2devbox"), quiet=True) + + +@task +def compilepy3(ctx): + print("") + print("======================= compile ========================") + print("") + print("------- Compile all .py files (syntax check test - Python 3) ------") + run("python3 -c 'import compileall,re; compileall.compile_dir(\".\", force=True, quiet=1, rx=re.compile(r\"/virtualenv|.tox|.git|.venv-st2devbox|./st2tests/st2tests/fixtures/packs/test\"), workers=8)'", echo=True) + + +@task(requirements_tasks.requirements) +def python_packages(ctx): + ''' + Make target which verifies all the components Python packages are valid + ''' + print("") + print("================== CHECK PYTHON PACKAGES ====================") + print("") + + run("make virtualenv-components") + for component in list(set(glob.glob("st2*")) - set(glob.glob("*.egg-info")) - set(['st2tests', 'st2exporter'])): + print("===========================================================") + print("Checking component: {component}".format(component=component)) + print("===========================================================") + run("virtualenv-components/bin/python {component}/setup.py --version".format(component=component)) + + +@task +def python_packages_nightly(ctx): + ''' + Make target which verifies all the components Python packages are valid + NOTE: This is superset of check-python-packages target. + We run more extensive and slower tests as part of the nightly build to speed up PR builds + ''' + print("") + print("================== CHECK PYTHON PACKAGES ====================") + print("") + + run("make virtualenv-components") + for component in list(set(glob.glob("st2*")) - set(glob.glob("*.egg-info")) - set(['st2tests', 'st2exporter'])): + print("===========================================================") + print("Checking component: {component}".format(component=component)) + print("===========================================================") + with ctx.cd(component): + ctx.run("../virtualenv-components/bin/python setup.py --version") + ctx.run("../virtualenv-components/bin/python setup.py sdist bdist_wheel") + ctx.run("../virtualenv-components/bin/python setup.py develop --no-deps") + ctx.run("virtualenv-components/bin/python -c \"import {component}\"".format(component=component)) + ctx.run("rm -rf {component}/dist/; rm -rf {component}/{component}.egg-info".format(component=component)) + +@task +def st2client_install(ctx): + print("") + print("==================== st2client install check ====================") + print("") + + run("make virtualenv-st2client") + + # COMPONENTS := $(shell ls -a | grep ^st2 | grep -v .egg-info) + # COMPONENTS_RUNNERS := $(wildcard contrib/runners/*) + # COMPONENTS_WITH_RUNNERS := $(COMPONENTS) $(COMPONENTS_RUNNERS) + # COMPONENTS_WITH_RUNNERS_WITHOUT_MISTRAL_RUNNER := $(foreach component,$(filter-out contrib/runners/mistral_v2,$(COMPONENTS_WITH_RUNNERS)),$(component)) + # COMPONENT_PYTHONPATH = $(subst $(space_char),:,$(realpath $(COMPONENTS_WITH_RUNNERS))) + + # Setup PYTHONPATH in bash activate script... + components = list(set(glob.glob("st2*") + glob.glob("contrib/runners/*")) - set(glob.glob("*.egg-info") + ['contrib/runners/mistral_v2'])) + print(components) + # We need to add each component path to the PYTHONPATH for st2client + with open('virtualenv-st2client/bin/activate', 'r') as f: + lines = f.readlines() + + # Write out each line, except for the lines that we don't want + with open('virtualenv-st2client/bin/activate', 'w+') as f: + for line in lines: + # Skip writing out lines that start with... + if any([line.startswith('_OLD_PYTHONPATH'), + line.startswith('PYTHONPATH='), + line.startswith('export PYTHONPATH')]): + continue + f.write(line) + + # Now rewrite those lines + f.write('_OLD_PYTHONPATH=$PYTHONPATH\n') + f.write('PYTHONPATH={root_dir}:{components}\n'.format( + root_dir='.', + components=':'.join(components))) + f.write('export PYTHONPATH\n') + + ctx.run("touch virtualenv-st2client/bin/activate") + ctx.run("chmod +x virtualenv-st2client/bin/activate") + + run("virtualenv-st2client/bin/pip install --upgrade \"pip>=9.0,<9.1\"") + run("virtualenv-st2client/bin/pip install --upgrade \"setuptools==41.0.1\"") + with ctx.prefix('../virtualenv-st2client/bin/activate'): + with ctx.cd('st2client'): + ctx.run("which python", echo=True) + ctx.run("python setup.py install", echo=True) + run("st2 --version") + run("python -c \"import st2client\"") + + +@task +def st2client_dependencies(ctx): + finder = modulefinder.ModuleFinder() + for root, dirnames, filenames in os.walk('st2client/st2client'): + for filename in fnmatch.filter(filenames, '*.py'): + # fname = os.path.join(root, filename) + # finder.run_script(fname) + # assert 'st2common' not in finder.modules.keys() + run("grep -qE 'st2common.*import|import.*st2common' {f} && " + "exit -1 || exit 0".format(f=os.path.join(root, filename))) + + +@task +def st2common_circular_dependencies(ctx): + dont_import_modules = ['st2api', 'st2auth', 'st2debug', 'st2exporter', 'st2reactor'] + modstring = '|'.join(dont_import_modules) + for root, dirnames, filenames in os.walk('st2common/st2common'): + for filename in fnmatch.filter(filenames, '*.py'): + # Note: The jinja asyncsupport.py module is a Python 3-only module, because it + # contains uses the async keyword, which is a syntax error in Python 2. + # Due to this, we cannot simply import it, not can we use ModuleFinder + # like we do in st2client_dependencies. + # So instead, we just wrap grep. + run("grep -qE '({mods}).*import|import.*({mods})' {f} && " + "exit -1 || exit 0".format(mods=modstring, f=os.path.join(root, filename))) + + +@task +def generated_files(ctx): + ''' + Verify that all the files which are automatically generated have indeed been re-generated and + committed + ''' + print("==================== generated-files-check ====================") + + # 1. Sample config - conf/st2.conf.sample + run("cp conf/st2.conf.sample /tmp/st2.conf.sample.upstream") + generate.config(ctx) + try: + run("diff conf/st2.conf.sample /tmp/st2.conf.sample.upstream") + except exceptions.Failure as e: + print("conf/st2.conf.sample hasn't been re-generated and committed. Please run \"make configgen\" and include and commit the generated file.", file=sys.stderr) + raise e + # 2. OpenAPI definition file - st2common/st2common/openapi.yaml (generated from + # st2common/st2common/openapi.yaml.j2) + run("cp st2common/st2common/openapi.yaml /tmp/openapi.yaml.upstream") + generate.api_spec(ctx) + try: + run("diff st2common/st2common/openapi.yaml /tmp/openapi.yaml.upstream") + except exceptions.Failure as e: + print("st2common/st2common/openapi.yaml hasn't been re-generated and committed. Please run \"make generate.api-spec\" and include and commit the generated file.", file=sys.stderr) + raise e + + print("All automatically generated files are up to date.") + + +@task +def rst(ctx): + print("") + print("==================== rst-check ====================") + print("") + run("rstcheck --report warning CHANGELOG.rst") diff --git a/tasks/ci.py b/tasks/ci.py new file mode 100644 index 0000000000..b23a93cbe6 --- /dev/null +++ b/tasks/ci.py @@ -0,0 +1,122 @@ +import glob + +from invoke import exceptions, run, task + +import check +import lint +import test + + +@task +def py3_unit(ctx): + print("") + print("==================== ci-py3-unit ====================") + print("") + run("NOSE_WITH_TIMER=1 tox -e py36-unit -vv") + run("NOSE_WITH_TIMER=1 tox -e py36-packs -vv") + + +@task +def py3_unit_nightly(ctx): + print("") + print("==================== ci-py3-unit ====================") + print("") + run("NOSE_WITH_TIMER=1 tox -e py36-unit-nightly -vv") + + +@task +def prepare_integration(ctx): + run("sudo -E scripts/travis/prepare-integration.sh") + + +@task(prepare_integration) +def py3_integration(ctx): + print("") + print("==================== ci-py3-integration ====================") + print("") + run("NOSE_WITH_TIMER=1 tox -e py36-integration -vv") + + +@task(check.compile_, check.generated_files, lint.pylint, lint.flake8, check.requirements, + check.st2client_dependencies, check.st2common_circular_dependencies, lint.circle_api_spec, + check.rst, check.st2client_install, check.python_packages) +def checks(ctx): + pass + + +@task(checks, test.unit, test.integration, test.mistral, test.packs, default=True) +def ci(ctx, coverage=False, nose_opts=None): + if coverage: + run("coverage combine") + run("coverage report") + + +@task(check.python_packages_nightly) +def checks_nightly(ctx): + pass + + +@task(test.unit) +def unit(ctx, coverage=False, nose_opts=None): + pass + + +@task +def unit_nightly(ctx, coverage=False, nose_opts=None): + # NOTE: We run mistral runner checks only as part of a nightly build to speed up + # non nightly builds (Mistral will be deprecated in the future) + print("") + print("============== ci-unit-nightly ==============") + print("") + + opts = { + 'rednose': True, + 'immediate': True, + 'with-parallel': True, + } + + if coverage: + components = list(set(glob.glob("st2*")) - set(['st2tests']) - set(glob.glob('*.egg-info'))) + ['contrib/runners/mistral_v2'] + opts['with-coverage'] = True + opts['cover-branches'] = True + opts['cover-package'] = ','.join(components) + + if nose_opts: + opts.update(nose_opts) + + opts_list = [ + '--{key}'.format(key=key) if value is True else '--{key}={value}'.format(key=key, value=value) + for key, value in opts.items() + ] + + run("nosetests {opts} -s -v contrib/runners/mistral_v2/tests/unit".format(opts=' '.join(opts_list))) + + +@task(prepare_integration, test.integration) +def integration(ctx, coverage=False, nose_opts=None): + pass + + +@task(prepare_integration, test.runners_integration) +def runners(ctx, coverage=False, nose_opts=None): + pass + + +@task +def prepare_mistral(ctx): + run("sudo -E ./scripts/travis/setup-mistral.sh") + + +@task(prepare_integration, prepare_mistral, test.mistral) +def mistral(ctx, coverage=False, nose_opts=None): + pass + + +@task(prepare_integration, test.orquesta) +def orquesta(ctx, coverage=False, nose_opts=None): + pass + + +@task(test.packs) +def packs_tests(ctx): + pass diff --git a/tasks/clean.py b/tasks/clean.py new file mode 100644 index 0000000000..9ac8938bfc --- /dev/null +++ b/tasks/clean.py @@ -0,0 +1,57 @@ +import glob +import os + +from invoke import task + + +@task +def pycs(ctx): + print("Removing all .pyc files") + for pycfile in glob.glob('**/*.pyc', recursive=True): + os.remove(pycfile) + + +@task +def mongodb(ctx): + print("==================== cleanmongodb ====================") + print("----- Dropping all MongoDB databases -----") + ctx.sudo("pkill -9 mongod") + ctx.sudo("rm -rf /var/lib/mongodb/*") + ctx.sudo("chown -R mongodb:mongodb /var/lib/mongodb/") + ctx.sudo("service mongodb start") + run("sleep 15") + run("mongo --eval \"rs.initiate()\"") + run("sleep 15") + + +@task +def mysql(ctx): + print("==================== cleanmysql ====================") + print("----- Dropping all Mistral MYSQL databases -----") + run("mysql -uroot -pStackStorm -e \"DROP DATABASE IF EXISTS mistral\"") + run("mysql -uroot -pStackStorm -e \"CREATE DATABASE mistral\"") + run("mysql -uroot -pStackStorm -e \"GRANT ALL PRIVILEGES ON mistral.* TO 'mistral'@'127.0.0.1' IDENTIFIED BY 'StackStorm'\"") + run("mysql -uroot -pStackStorm -e \"FLUSH PRIVILEGES\"") + run("/opt/openstack/mistral/.venv/bin/python /opt/openstack/mistral/tools/sync_db.py --config-file /etc/mistral/mistral.conf") + + +@task +def rabbitmq(ctx): + print("==================== cleanrabbitmq ====================") + print("Deleting all RabbitMQ queue and exchanges") + ctx.sudo("rabbitmqctl stop_app") + ctx.sudo("rabbitmqctl reset") + ctx.sudo("rabbitmqctl start_app") + + +@task +def coverage(ctx): + print("==================== cleancoverage ====================") + print("Removing all coverage results directories") + print("") + run("rm -rf .coverage") + + +@task(pycs, default=True) +def clean(ctx): + pass diff --git a/tasks/generate.py b/tasks/generate.py new file mode 100644 index 0000000000..30c4129b99 --- /dev/null +++ b/tasks/generate.py @@ -0,0 +1,72 @@ +try: + # Python 2 + from StringIO import StringIO +except ImportError: + # Python 3 + # See https://stackoverflow.com/a/40984270 for a better Python3 only implementation + from io import StringIO +import sys + +from invoke import exceptions, run, task + +import requirements + + +class Capturing(list): + ''' + >>> with Capturing() as output: + ... print("Hello world!") + >>> assert output == ["Hello world!"] + >>> with Capturing(output) as output: + ... print("Foobar") + >>> assert output == ["Hello world!", "Foobar"] + ''' + def __enter__(self): + self._stdout = sys.stdout + sys.stdout = self._stringio = StringIO() + return self + def __exit__(self, *args): + self.extend(self._stringio.getvalue().splitlines()) + del self._stringio # free up some memory + sys.stdout = self._stdout + + +@task(requirements.install.fixed_requirements) +def config(ctx): + # Don't eagerly import this module, because config_gen requires olso_config, which isn't + # isn't installed if the requirements tasks haven't run yet + # from tools import config_gen + + print("") + print("================== config gen ====================") + print("") + with open('conf/st2.conf.sample', 'w+') as f: + f.write('# Sample config which contains all the available options which the corresponding descriptions\n') + f.write('# Note: This file is automatically generated using tools/config_gen.py - DO NOT UPDATE MANUALLY\n') + f.write('\n') + run("python tools/config_gen.py >> conf/st2.conf.sample") + # with Capturing() as output: + # config_gen.main(['tools/config_gen.py']) + # for line in output: + # f.write(line) + + +@task +def api_spec(ctx): + # Break an import cycle + import lint + lint.api_spec + print("") + print("================== Generate openapi.yaml file ====================") + print("") + with open('st2common/st2common/openapi.yaml', 'w+') as f: + f.write('# NOTE: This file is auto-generated - DO NOT EDIT MANUALLY\n') + f.write('# Edit st2common/st2common/openapi.yaml.j2 and then run\n') + f.write('# make .generate-api-spec\n') + f.write('# to generate the final spec file\n') + run("st2common/bin/st2-generate-api-spec --config-file conf/st2.dev.conf >> st2common/st2common/openapi.yaml") + + +@task(config, api_spec, default=True) +def generate(ctx): + pass diff --git a/tasks/git_tasks/__init__.py b/tasks/git_tasks/__init__.py new file mode 100644 index 0000000000..c1f1ff449a --- /dev/null +++ b/tasks/git_tasks/__init__.py @@ -0,0 +1,2 @@ + +import submodule diff --git a/tasks/git_tasks/submodule.py b/tasks/git_tasks/submodule.py new file mode 100644 index 0000000000..7a5033635b --- /dev/null +++ b/tasks/git_tasks/submodule.py @@ -0,0 +1,13 @@ +from invoke import run, task + + +@task +def update(ctx, recursive=True): + try: + from git import Repo + except ImportError: + # Some of the tests rely on submodule so we need to make sure submodules are check out + run("git submodule update --recursive --remote") + else: + repo = Repo() + repo.submodule_update(recursive=recursive) diff --git a/tasks/lint.py b/tasks/lint.py new file mode 100644 index 0000000000..43bfedc82f --- /dev/null +++ b/tasks/lint.py @@ -0,0 +1,104 @@ +import glob +import os + +from invoke import run, task + +import check +import generate +import requirements + + +@task +def api_spec(ctx): + print("") + print("================== Lint API spec ====================") + print("") + run("st2common/bin/st2-validate-api-spec --config-file conf/st2.dev.conf") + + +@task +def circle_api_spec(ctx): + print("") + print("================== Lint API spec ====================") + print("") + try: + run("st2common/bin/st2-validate-api-spec --config-file conf/st2.dev.conf") + except Exception as e: + print("Open API spec lint failed") + raise e + + +@task(requirements.install.test_requirements) +def flake8(ctx): + print("") + print("================== flake8 ====================") + print("") + run("flake8 --config ./lint-configs/python/.flake8 {components}".format( + components=' '.join(glob.glob("st2*")))) + run("flake8 --config ./lint-configs/python/.flake8 {runners}".format( + runners=' '.join(glob.glob("contrib/runners/*")))) + run("flake8 --config ./lint-configs/python/.flake8 contrib/packs/actions/") + run("flake8 --config ./lint-configs/python/.flake8 contrib/linux") + run("flake8 --config ./lint-configs/python/.flake8 contrib/chatops/") + run("flake8 --config ./lint-configs/python/.flake8 scripts/") + run("flake8 --config ./lint-configs/python/.flake8 tools/") + run("flake8 --config ./lint-configs/python/.flake8 pylint_plugins/") + + +@task +def pylint(ctx): + print("") + print("================== pylint ====================") + print("") + # Lint st2 components + for component in list(set(glob.glob("st2*")) - set(glob.glob("*.egg-info")) - set(['st2tests', 'st2exporter'])): + print("===========================================================") + print("Running pylint on {component}".format(component=component)) + print("===========================================================") + run("pylint -j {pylint_concurrency} -E " + "--rcfile=./lint-configs/python/.pylintrc " + "--load-plugins=pylint_plugins.api_models " + "--load-plugins=pylint_plugins.db_models {component}/{component}".format( + pylint_concurrency=int(os.environ.get('PYLINT_CONCURRENCY', '1')), + component=component)) + + # Lint runner modules and packages + for component in glob.glob("contrib/runners/*"): + print("===========================================================") + print("Running pylint on {component}".format(component=component)) + print("===========================================================") + run("pylint -j {pylint_concurrency} -E " + "--rcfile=./lint-configs/python/.pylintrc " + "--load-plugins=pylint_plugins.api_models " + "--load-plugins=pylint_plugins.db_models " + "{component}/*.py".format( + pylint_concurrency=int(os.environ.get('PYLINT_CONCURRENCY', '1')), + component=component)) + + # Lint Python pack management actions + run("pylint -j {pylint_concurrency} -E " + "--rcfile=./lint-configs/python/.pylintrc " + "--load-plugins=pylint_plugins.api_models " + "contrib/packs/actions/*.py " + "contrib/packs/actions/*/*.py".format(pylint_concurrency=int(os.environ.get('PYLINT_CONCURRENCY', '1')))) + # Lint other packs + run("pylint -j {pylint_concurrency} -E " + "--rcfile=./lint-configs/python/.pylintrc " + "--load-plugins=pylint_plugins.api_models " + "contrib/linux/*/*.py " + "contrib/chatops/*/*.py".format(pylint_concurrency=int(os.environ.get('PYLINT_CONCURRENCY', '1')))) + # Lint Python scripts + run("pylint -j {pylint_concurrency} -E " + "--rcfile=./lint-configs/python/.pylintrc " + "--load-plugins=pylint_plugins.api_models " + "scripts/*.py " + "tools/*.py".format(pylint_concurrency=int(os.environ.get('PYLINT_CONCURRENCY', '1')))) + run("pylint -j {pylint_concurrency} -E " + "--rcfile=./lint-configs/python/.pylintrc " + "pylint_plugins/*.py".format(pylint_concurrency=int(os.environ.get('PYLINT_CONCURRENCY', '1')))) + + +@task(generate.api_spec, flake8, pylint, check.st2client_dependencies, + check.st2common_circular_dependencies, check.rst, check.st2client_install, default=True) +def lint(ctx): + pass diff --git a/tasks/requirements/__init__.py b/tasks/requirements/__init__.py new file mode 100644 index 0000000000..c839be696d --- /dev/null +++ b/tasks/requirements/__init__.py @@ -0,0 +1,80 @@ +import glob +import os + +from invoke import call, Collection, exceptions, run, task + +import fixate +from .. import git_tasks +import install +from .. import travis + + +# COMPONENTS := $(shell ls -a | grep ^st2 | grep -v .egg-info) +# COMPONENTS_RUNNERS := $(wildcard contrib/runners/*) +# COMPONENTS_WITH_RUNNERS := $(COMPONENTS) $(COMPONENTS_RUNNERS) +# COMPONENT_SPECIFIC_TESTS := st2tests *.egg-info +# COMPONENTS_TEST := $(foreach component,$(filter-out $(COMPONENT_SPECIFIC_TESTS),$(COMPONENTS_WITH_RUNNERS)),$(component)) +@task +def sdist(ctx): + # Copy over shared dist utils modules which is needed by setup.py + for component in glob.glob("contrib/runners/*"): + run("cp -f scripts/dist_utils.py {component}/dist_utils.py".format(component=component)) + try: + run("scripts/write-headers.sh {component}/dist_utils.py".format(component=component)) + except exceptions.Failure: + break + + # Copy over CHANGELOG.RST, CONTRIBUTING.RST and LICENSE file to each component directory + #@for component in $(COMPONENTS_TEST); do\ + # test -s $$component/README.rst || cp -f README.rst $$component/; \ + # cp -f CONTRIBUTING.rst $$component/; \ + # cp -f LICENSE $$component/; \ + #done + + +@task(pre=[ + sdist, + install.runners, + ], + post=[ + # Generate all requirements to support current CI pipeline. + fixate.requirements, + # Fix for Travis CI race + travis.fix_race, + # Fix for Travis CI caching issue + travis.bust_cache, + # Install requirements + install.requirements, + # Install st2common package to load drivers defined in st2common setup.py + # NOTE: We pass --no-deps to the script so we don't install all the + # package dependencies which are already installed as part of "requirements" + # make targets. This speeds up the build + call(install.st2common_develop, dummy=1), + # Note: We install prance here and not as part of any component + # requirements.txt because it has a conflict with our dependency (requires + # new version of requests) which we cant resolve at this moment + install.prance, + # Install st2common to register metrics drivers + # NOTE: We pass --no-deps to the script so we don't install all the + # package dependencies which are already installed as part of "requirements" + # make targets. This speeds up the build + call(install.st2common_develop, dummy=2), # Deduplicate call from previous call + # Some of the tests rely on submodule so we need to make sure submodules are checked out + git_tasks.submodule.update, + ], + default=True) +def requirements(ctx): + print('') + print('==================== requirements ====================') + print('') + # Make sure we use the latest version of pip, which is 19 + run("pip --version") + run("pip install --upgrade \"pip>=19.0,<20.0\"") + run("pip install --upgrade \"virtualenv==16.6.0\"") # Required for packs.install in dev envs + + +namespace = Collection() +namespace.add_task(sdist) +namespace.add_task(requirements) +namespace.add_collection(fixate) +namespace.add_collection(install) diff --git a/tasks/requirements/fixate.py b/tasks/requirements/fixate.py new file mode 100644 index 0000000000..c821a97000 --- /dev/null +++ b/tasks/requirements/fixate.py @@ -0,0 +1,176 @@ +from __future__ import absolute_import, print_function + +import glob +import os +import os.path +import sys + +from distutils.version import StrictVersion + +from invoke import run, task + +# NOTE: This script can't rely on any 3rd party dependency so we need to use this code here +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +if PY3: + text_type = str +else: + text_type = unicode + +OSCWD = os.path.abspath(os.curdir) +GET_PIP = ' curl https://bootstrap.pypa.io/get-pip.py | python' + +try: + import pip + from pip import __version__ as pip_version +except ImportError as e: + print('Failed to import pip: %s' % (text_type(e))) + print('') + print('Download pip:\n%s' % (GET_PIP)) + sys.exit(1) + +try: + # pip < 10.0 + from pip.req import parse_requirements +except ImportError: + # pip >= 10.0 + + try: + from pip._internal.req.req_file import parse_requirements + except ImportError as e: + print('Failed to import parse_requirements from pip: %s' % (text_type(e))) + print('Using pip: %s' % (str(pip_version))) + sys.exit(1) + + +# Lifted straight from fixate-requirements.py +def load_requirements(file_path): + return tuple((r for r in parse_requirements(file_path, session=False))) + + +def locate_file(path, must_exist=False): + if not os.path.isabs(path): + path = os.path.join(OSCWD, path) + if must_exist and not os.path.isfile(path): + print("Error: couldn't locate file `{0}'".format(path)) + return path + + +def merge_source_requirements(sources): + """ + Read requirements source files and merge it's content. + """ + projects = set() + merged_requirements = [] + for infile_path in (locate_file(p, must_exist=True) for p in sources): + for req in load_requirements(infile_path): + # Requirements starting with project name "project ..." + if req.req: + # Skip already added project name + if req.name in projects: + continue + projects.add(req.name) + merged_requirements.append(req) + + # Requirements lines like "vcs+proto://url" + elif req.link: + merged_requirements.append(req) + else: + raise RuntimeError('Unexpected requirement {0}'.format(req)) + + return merged_requirements + + +def write_requirements(sources=None, fixed_requirements=None, output_file=None, + skip=None): + """ + Write resulting requirements taking versions from the fixed_requirements. + """ + skip = skip or [] + + requirements = merge_source_requirements(sources) + fixed = load_requirements(locate_file(fixed_requirements, must_exist=True)) + + # Make sure there are no duplicate / conflicting definitions + fixedreq_hash = {} + for req in fixed: + project_name = req.name + + if not req.req: + continue + + if project_name in fixedreq_hash: + raise ValueError('Duplicate definition for dependency "%s"' % (project_name)) + + fixedreq_hash[project_name] = req + + lines_to_write = [] + links = set() + for req in requirements: + if req.name in skip: + continue + + # we don't have any idea how to process links, so just add them + if req.link and req.link not in links: + links.add(req.link) + rline = str(req.link) + + if req.editable: + rline = '-e %s' % (rline) + elif req.req: + project = req.name + if project in fixedreq_hash: + rline = str(fixedreq_hash[project].req) + else: + rline = str(req.req) + + lines_to_write.append(rline) + + # Sort the lines to guarantee a stable order + lines_to_write = sorted(lines_to_write) + data = '\n'.join(lines_to_write) + '\n' + with open(output_file, 'w') as fp: + fp.write('# Don\'t edit this file. It\'s generated automatically!\n') + fp.write('# If you want to update global dependencies, modify fixed-requirements.txt\n') + fp.write('# and then run \'make requirements\' to update requirements.txt for all\n') + fp.write('# components.\n') + fp.write('# If you want to update depdencies for a single component, modify the\n') + fp.write('# in-requirements.txt for that component and then run \'make requirements\' to\n') + fp.write('# update the component requirements.txt\n') + fp.write(data) + + print('Requirements written to: {0}'.format(output_file)) + + +@task +def root_requirements(ctx): + # Generate all requirements to support current CI pipeline. + ignore_virtualenvs = list(glob.glob('virtualenv*')) + component_requirements = list(glob.glob('st2*/in-requirements.txt')) + runner_requirements = list(glob.glob('contrib/runners/*/in-requirements.txt')) + write_requirements(skip=ignore_virtualenvs, + sources=component_requirements+runner_requirements, + fixed_requirements='fixed-requirements.txt', + output_file='requirements.txt') + + +@task +def component_requirements(ctx): + ignore_virtualenvs = list(glob.glob('virtualenv*')) + # Generate all requirements to support current CI pipeline. + for component in list((set(glob.glob("st2*")) | set(glob.glob("contrib/runners/*"))) - set(glob.glob('*.egg-info'))): + print("===========================================================\n" + "Generating requirements for {component}\n" + "===========================================================" + .format(component=component)) + write_requirements(skip=ignore_virtualenvs, + sources=['{component}/in-requirements.txt'.format(component=component)], + fixed_requirements='fixed-requirements.txt', + output_file='{component}/requirements.txt'.format(component=component)) + print("") + + +@task(root_requirements, component_requirements, default=True) +def requirements(ctx, default=True): + pass diff --git a/tasks/requirements/install.py b/tasks/requirements/install.py new file mode 100644 index 0000000000..82ecda533a --- /dev/null +++ b/tasks/requirements/install.py @@ -0,0 +1,71 @@ +import glob +import os + +from invoke import Collection, run, task + + +@task +def test_requirements(ctx): + run("pip install {pip_options} -r test-requirements.txt".format( + pip_options=os.environ.get('ST2_PIP_OPTIONS', '')), echo=True) + + +@task +def root_requirements(ctx): + run("pip install {pip_options} -r requirements.txt".format( + pip_options=os.environ.get('ST2_PIP_OPTIONS', '')), echo=True) + + +@task +def fixed_requirements(ctx): + run("pip install {pip_options} -r fixed-requirements.txt".format( + pip_options=os.environ.get('ST2_PIP_OPTIONS', '')), echo=True) + + +@task(test_requirements, root_requirements, fixed_requirements, default=True) +def requirements(ctx): + pass + + +# dummy_kwargs is used to deduplicate executions of this task - it is not used +# in any way +@task +def st2common_develop(ctx, **dummy_kwargs): + # Install st2common package to load drivers defined in st2common setup.py, + # and also to register metrics drivers + # NOTE: We pass --no-deps to the script so we don't install all the + # package dependencies which are already installed as part of "requirements" + # make targets. This speeds up the build + with ctx.cd('st2common'): + ctx.run("python setup.py develop --no-deps") + + +@task +def flake8(ctx): + # Manually install flake8 + run("pip install flake8") + + +@task +def prance(ctx): + # Note: We install prance here and not as part of any component + # requirements.txt because it has a conflict with our dependency (requires + # new version of requests) which we cant resolve at this moment + run("pip install \"prance==0.15.0\"") + + +@task +# NOTE: We pass --no-deps to the script so we don't install all the +# package dependencies which are already installed as part of "requirements" +# make targets. This speeds up the build +def runners(ctx): + print("") + print("================== INSTALL RUNNERS ====================") + print("") + for component in glob.glob("contrib/runners/*"): + print("===========================================================") + print("Installing runner: {component}".format(component=component)) + print("===========================================================") + with ctx.cd(component): + ctx.run("python setup.py develop --no-deps") + print("============== DONE INSTALLING RUNNERS ================") diff --git a/tasks/test.py b/tasks/test.py new file mode 100644 index 0000000000..8dc511efb7 --- /dev/null +++ b/tasks/test.py @@ -0,0 +1,197 @@ +import glob + +from invoke import call, exceptions, run, task + +import check +import lint +import requirements +import travis + + +@task +def drop_db(ctx): + print("----- Dropping st2-test db -----") + run("mongo st2-test --eval \"db.dropDatabase();\"") + + +@task(requirements.requirements) +def components(ctx, what='unit', coverage=False, nose_opts=None): + print("") + print("==================== {what} tests{with_coverage} ====================".format( + what=what, + with_coverage=' with coverage' if coverage else '')) + print("") + + components = list(set(glob.glob("st2*")) - set(['st2tests']) - set(glob.glob('*.egg-info'))) + + opts = { + 'rednose': True, + 'immediate': True, + 'with-parallel': True, + } + + if coverage: + opts['with-coverage'] = True + opts['cover-branches'] = True + opts['cover-package'] = ','.join(components) + + if nose_opts: + opts.update(nose_opts) + + opts_list = [ + '--{key}'.format(key=key) if value is True else '--{key}={value}'.format(key=key, value=value) + for key, value in opts.items() + ] + + for component in components: + print("===========================================================") + print("Running tests in {component}".format(component=component)) + print("-----------------------------------------------------------") + print("") + ctx.run("nosetests {opts} -s -v {component}/tests/{what}".format( + opts=' '.join(opts_list), + component=component, + what=what)) + print("") + print("-----------------------------------------------------------") + print("Done running tests in {component}".format(component=component)) + print("===========================================================") + + +@task(pre=[drop_db]) +def unit(ctx, coverage=False, nose_opts=None): + components(ctx, what='unit', coverage=coverage, nose_opts=nose_opts) + + +@task(pre=[drop_db, travis.prepare_integration]) +def integration(ctx, coverage=False, nose_opts=None): + components(ctx, what='integration', coverage=coverage, nose_opts=nose_opts) + + +@task(pre=[travis.prepare_integration, travis.setup_mistral]) +def mistral(ctx, coverage=False, nose_opts=None): + print("") + print("==================== MISTRAL integration tests ====================") + print("The tests assume both st2 and mistral are running on 127.0.0.1.") + print("") + + opts = { + 'rednose': True, + 'immediate': True, + 'with-parallel': True, + } + + if coverage: + opts['with-coverage'] = True + opts['cover-branches'] = True + opts['cover-package'] = ','.join(component + runners) + + if nose_opts: + opts.update(nose_opts) + + opts_list = [ + '--{key}'.format(key=key) if value is True else '--{key}={value}'.format(key=key, value=value) + for key, value in opts.items() + ] + + run("nosetests {opts} -s -v st2tests/integration/mistral".format(opts=' '.join(opts_list))) + + +@task +def orquesta(ctx, coverage=False, nose_opts=None): + print("") + print("==================== Orquesta integration tests ====================") + print("The tests assume st2 is running on 127.0.0.1.") + print("") + + opts = { + 'rednose': True, + 'immediate': True, + 'with-parallel': True, + } + + if coverage: + opts['with-coverage'] = True + opts['cover-branches'] = True + opts['cover-package'] = ','.join(component + runners) + + if nose_opts: + opts.update(nose_opts) + + opts_list = [ + '--{key}'.format(key=key) if value is True else '--{key}={value}'.format(key=key, value=value) + for key, value in opts.items() + ] + + run("nosetests {opts} -s -v st2tests/integration/orquesta".format(opts=' '.join(opts_list))) + + +@task +def packs(ctx): + print("") + print("==================== packs-tests ====================") + print("") + # Install st2common to register metrics drivers + with ctx.cd('st2common'): + ctx.run("python setup.py develop --no-deps") + # If the search pattern to glob.glob ends in a slash, it only searches for + # directories (eg: packs). Otherwise, it also includes README.md, which + # causes the st2-run-pack-tests command to fail. + contrib_packs = glob.glob("contrib/*/") + for pack in contrib_packs: + run("st2common/bin/st2-run-pack-tests -c -t -x -p {filename}".format(filename=pack)) + + +def runners(ctx, what='unit', coverage=False, nose_opts=None): + print("") + print("====================== runners-tests ======================") + print("") + + opts = { + 'rednose': True, + 'immediate': True, + 'with-parallel': True, + } + + if coverage: + opts['with-coverage'] = True + opts['cover-branches'] = True + opts['cover-package'] = ','.join(component + runners) + + if nose_opts: + opts.update(nose_opts) + + opts_list = [ + '--{key}'.format(key=key) if value is True else '--{key}={value}'.format(key=key, value=value) + for key, value in opts.items() + ] + runners = glob.glob("contrib/runners/*") + + for runner in runners: + print("") + print("===========================================================") + print("") + print("Running tests in {runner}".format(runner=runner)) + print("") + print("===========================================================") + run("nosetests {opts} -s -v {runner}/tests/{what}".format(opts=' '.join(opts_list), runner=runner, what=what)) + + +@task(drop_db) +def runners_unit(ctx, coverage=False, nose_opts=None): + runners(ctx, what='unit', coverage=coverage, nose_opts=nose_opts) + + +@task(drop_db) +def runners_integration(ctx, coverage=False, nose_opts=None): + runners(ctx, what='integration', coverage=coverage, nose_opts=nose_opts) + + +@task +def pytests_coverage(ctx): + unit(ctx, coverage=True) + + +@task(check.compile_, lint.flake8, lint.pylint, unit, default=True) +def pytests(ctx): + pass diff --git a/tasks/travis.py b/tasks/travis.py new file mode 100644 index 0000000000..648ab1de69 --- /dev/null +++ b/tasks/travis.py @@ -0,0 +1,35 @@ +import os +import subprocess + +from invoke import run, task + + +@task +def prepare_integration(ctx): + # run("sudo -E scripts/travis/prepare-integration.sh") + subprocess.Popen('sudo -E ./scripts/travis/prepare-integration.sh', + env=os.environ.copy(), + shell=True) + + +@task +def setup_mistral(ctx): + # run("sudo -E scripts/travis/setup-mistral.sh") + subprocess.Popen('sudo -E ./scripts/travis/setup-mistral.sh', + env=os.environ.copy(), + shell=True) + + +@task +def fix_race(ctx): + # Fix for Travis CI race + run("pip install \"six==1.12.0\"") + + +@task +def bust_cache(ctx): + # Fix for Travis CI caching issue + if os.environ.get('TRAVIS_EVENT_TYPE'): + run("pip uninstall --yes \"pytz\" || echo \"pytz not installed\"") + run("pip uninstall --yes \"python-dateutil\" || echo \"python-dateutil not installed\"") + run("pip uninstall --yes \"orquesta\" || echo \"orquesta not installed\"") diff --git a/tools/launchdev.sh b/tools/launchdev.sh index 5cc6b13899..a8a97544fc 100755 --- a/tools/launchdev.sh +++ b/tools/launchdev.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -x function usage() { echo "Usage: $0 [start|stop|restart|startclean] [-r runner_count] [-s scheduler_count] [-w workflow_engine_count] [-g] [-x] [-c] [-6] [-m]" >&2 @@ -428,7 +429,8 @@ function st2start(){ fi if [ "$copy_test_packs" = true ]; then - st2 run packs.setup_virtualenv packs=fixtures + which st2 + st2 --config-file $ST2_CONF --debug run packs.setup_virtualenv packs=fixtures if [ $? != 0 ]; then echo "Warning: Unable to setup virtualenv for the \"tests\" pack. Please setup virtualenv for the \"tests\" pack before running integration tests" fi