Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,22 @@ matrix:
include:
- env: TASK=ci-unit NODE_INDEX=0 NODE_TOTAL=2
python: 2.7
name: "Unit Tests (Python 2.7) - 1"
- env: TASK=ci-unit NODE_INDEX=1 NODE_TOTAL=2
python: 2.7
name: "Unit Tests (Python 2.7) - 2"
- env: TASK=ci-integration
python: 2.7
name: "Integration Tests (Python 2.7)"
- env: TASK="ci-checks ci-packs-tests"
python: 2.7
name: "Lint Checks, Packs Tests (Python 2.7)"
- env: TASK="compilepy3 ci-py3-unit" CACHE_NAME=py3
python: 3.6
name: "Unit Tests (Python 3.6)"
- env: TASK="ci-py3-integration" CACHE_NAME=py3
python: 3.6
name: "Integration Tests (Python 3.6)"
addons:
apt:
sources:
Expand Down Expand Up @@ -82,11 +88,5 @@ before_cache:
- if [ ${TRAVIS_PULL_REQUEST} = 'false' ]; then rm -rf virtualenv/; fi

after_success:
- if [ ${TASK} = 'ci-unit' ] || [ ${TASK} = 'ci-integration' ]; then codecov; fi

# https://docs.travis-ci.com/user/notifications/#Webhooks-Delivery-Format
#notifications:
# webhooks:
# #- https://ci-webhooks.stackstorm.net/webhooks/build/events
# # See: webhook.site/#/06fde88c-1610-4c85-ba4e-d9bcacfefe4c
# - http://webhook.site/06fde88c-1610-4c85-ba4e-d9bcacfefe4c
# NOTE: We only generate and submit coverage report for master and version branches
- if [ ${TASK} = 'ci-unit' ] || [ ${TASK} = 'ci-integration' ] && [ ${TRAVIS_PULL_REQUEST} = 'false' ]; then codecov; fi
95 changes: 64 additions & 31 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,6 @@ PIP_OPTIONS := $(ST2_PIP_OPTIONS)

NOSE_OPTS := --rednose --immediate --with-parallel
NOSE_TIME := $(NOSE_TIME)
NOSE_COVERAGE_FLAGS := --with-coverage --cover-branches --cover-erase
NOSE_COVERAGE_PACKAGES := --cover-package=$(COMPONENTS_TEST_COMMA)

ifdef NOSE_TIME
NOSE_OPTS := --rednose --immediate --with-parallel --with-timer
Expand All @@ -60,7 +58,22 @@ ifndef PIP_OPTIONS
PIP_OPTIONS :=
endif

ifneq ($(INCLUDE_TESTS_IN_COVERAGE),)
# NOTE: We only run coverage on master and version branches and not on pull requests since
# it has a big performance overhead and is very slow.
ifeq ($(TRAVIS_PULL_REQUEST),false)
ENABLE_COVERAGE := yes
endif

ifdef ENABLE_COVERAGE
NOSE_COVERAGE_FLAGS := --with-coverage --cover-branches --cover-erase
NOSE_COVERAGE_PACKAGES := --cover-package=$(COMPONENTS_TEST_COMMA)
else
INCLUDE_TESTS_IN_COVERAGE :=
endif

# If we aren't running test coverage, don't try to include tests in coverage
# results
ifdef INCLUDE_TESTS_IN_COVERAGE
NOSE_COVERAGE_FLAGS += --cover-tests
NOSE_COVERAGE_PACKAGES := $(NOSE_COVERAGE_PACKAGES),$(COMPONENTS_TEST_MODULES_COMMA)
endif
Expand Down Expand Up @@ -99,10 +112,16 @@ play:
@echo
@echo COMPONENT_PYTHONPATH=$(COMPONENT_PYTHONPATH)
@echo
@echo TRAVIS_PULL_REQUEST=$(TRAVIS_PULL_REQUEST)
@echo
@echo ENABLE_COVERAGE=$(ENABLE_COVERAGE)
@echo
@echo NOSE_COVERAGE_FLAGS=$(NOSE_COVERAGE_FLAGS)
@echo
@echo NOSE_COVERAGE_PACKAGES=$(NOSE_COVERAGE_PACKAGES)
@echo
@echo INCLUDE_TESTS_IN_COVERAGE=$(INCLUDE_TESTS_IN_COVERAGE)
@echo

.PHONY: check
check: requirements flake8 checklogs
Expand Down Expand Up @@ -182,7 +201,7 @@ generate-api-spec: requirements .generate-api-spec
echo "# Edit st2common/st2common/openapi.yaml.j2 and then run" >> st2common/st2common/openapi.yaml
echo "# make .generate-api-spec" >> st2common/st2common/openapi.yaml
echo "# to generate the final spec file" >> st2common/st2common/openapi.yaml
. virtualenv/bin/activate; st2common/bin/st2-generate-api-spec --config-file conf/st2.dev.conf >> st2common/st2common/openapi.yaml
. $(VIRTUALENV_DIR)/bin/activate; st2common/bin/st2-generate-api-spec --config-file conf/st2.dev.conf >> st2common/st2common/openapi.yaml

.PHONY: circle-lint-api-spec
circle-lint-api-spec:
Expand Down Expand Up @@ -231,13 +250,13 @@ clean: .cleanpycs
compile:
@echo "======================= compile ========================"
@echo "------- Compile all .py files (syntax check test - Python 2) ------"
@if python -c 'import compileall,re; compileall.compile_dir(".", rx=re.compile(r"/virtualenv|.tox"), quiet=True)' | grep .; then exit 1; else exit 0; fi
@if python -c 'import compileall,re; compileall.compile_dir(".", rx=re.compile(r"/virtualenv|virtualenv-osx|.tox"), quiet=True)' | grep .; then exit 1; else exit 0; fi

.PHONY: compilepy3
compilepy3:
@echo "======================= compile ========================"
@echo "------- Compile all .py files (syntax check test - Python 3) ------"
@if python3 -c 'import compileall,re; compileall.compile_dir(".", rx=re.compile(r"/virtualenv|.tox|./st2tests/st2tests/fixtures/packs/test"), quiet=True)' | grep .; then exit 1; else exit 0; fi
@if python3 -c 'import compileall,re; compileall.compile_dir(".", rx=re.compile(r"/virtualenv|virtualenv-osx|.tox|./st2tests/st2tests/fixtures/packs/test"), quiet=True)' | grep .; then exit 1; else exit 0; fi

.PHONY: .cleanpycs
.cleanpycs:
Expand Down Expand Up @@ -316,14 +335,14 @@ requirements: virtualenv .sdist-requirements
$(VIRTUALENV_DIR)/bin/pip install --upgrade "virtualenv==15.1.0" # Required for packs.install in dev envs.

# Generate all requirements to support current CI pipeline.
$(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv -s st2*/in-requirements.txt contrib/runners/*/in-requirements.txt -f fixed-requirements.txt -o requirements.txt
$(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv,virtualenv-osx -s st2*/in-requirements.txt contrib/runners/*/in-requirements.txt -f fixed-requirements.txt -o requirements.txt

# Generate finall requirements.txt file for each component
@for component in $(COMPONENTS_WITH_RUNNERS); do\
echo "==========================================================="; \
echo "Generating requirements.txt for" $$component; \
echo "==========================================================="; \
$(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv -s $$component/in-requirements.txt -f fixed-requirements.txt -o $$component/requirements.txt; \
$(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv,virtualenv-osx -s $$component/in-requirements.txt -f fixed-requirements.txt -o $$component/requirements.txt; \
done

# Fix for Travis CI race
Expand Down Expand Up @@ -424,7 +443,7 @@ unit-tests: requirements .unit-tests
done

.PHONY: .run-unit-tests-coverage
ifneq ($(INCLUDE_TESTS_IN_COVERAGE),)
ifdef INCLUDE_TESTS_IN_COVERAGE
.run-unit-tests-coverage: NOSE_COVERAGE_PACKAGES := $(NOSE_COVERAGE_PACKAGES),tests.unit
endif
.run-unit-tests-coverage:
Expand All @@ -449,29 +468,36 @@ endif

.PHONY: .combine-unit-tests-coverage
.combine-unit-tests-coverage: .run-unit-tests-coverage
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \
coverage combine .coverage.unit.*
@if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \
coverage combine .coverage.unit.*; \
fi

.coverage.unit:
@compgen -G '.coverage.unit.*' && \
@if compgen -G '.coverage.unit.*'; then \
for coverage_result in $$(compgen -G '.coverage.unit.*'); do \
echo "Combining data from $${coverage_result}"; \
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \
coverage combine $${coverage_result}; \
done \
|| \
done; \
else \
echo "Running unit tests"; \
make .combine-unit-tests-coverage
make .combine-unit-tests-coverage; \
fi

.PHONY: .report-unit-tests-coverage
.report-unit-tests-coverage: .coverage.unit
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \
coverage report
@if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \
coverage report; \
fi

.PHONY: .unit-tests-coverage-html
.unit-tests-coverage-html: .coverage.unit
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \
coverage html
@if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.unit \
coverage html; \
fi

.PHONY: itests
itests: requirements .itests
Expand All @@ -496,7 +522,7 @@ itests: requirements .itests
done

.PHONY: .run-integration-tests-coverage
ifneq ($(INCLUDE_TESTS_IN_COVERAGE),)
ifdef INCLUDE_TESTS_IN_COVERAGE
.run-integration-tests-coverage: NOSE_COVERAGE_PACKAGES := $(NOSE_COVERAGE_PACKAGES),tests.integration
endif
.run-integration-tests-coverage:
Expand All @@ -521,29 +547,36 @@ endif

.PHONY: .combine-integration-tests-coverage
.combine-integration-tests-coverage: .run-integration-tests-coverage
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \
coverage combine .coverage.integration.*
@if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \
coverage combine .coverage.integration.*; \
Copy link
Member

@arm4b arm4b Jul 24, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

BTW, combine itself looks suspicious, because codecov already combines coverage natively, sent from different jobs within one build: https://docs.codecov.io/docs/merging-reports

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's true, but I also wanted to combine the tests locally. I can remove that if we want to.

fi

.coverage.integration:
@compgen -G '.coverage.integration.*' && \
@if compgen -G '.coverage.integration.*'; then \
for coverage_result in $$(compgen -G '.coverage.integration.*'); do \
echo "Combining data from $${coverage_result}"; \
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \
coverage combine $${coverage_result}; \
done \
|| \
done; \
else \
echo "Running integration tests"; \
make .combine-integration-tests-coverage
make .combine-integration-tests-coverage; \
fi

.PHONY: .report-integration-tests-coverage
.report-integration-tests-coverage: .coverage.integration
@. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \
coverage report
@if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \
coverage report; \
fi

.PHONY: .integration-tests-coverage-html
.integration-tests-coverage-html: .coverage.integration
@. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \
coverage html
@if [ -n "$(NOSE_COVERAGE_FLAGS)" ]; then \
. $(VIRTUALENV_DIR)/bin/activate; COVERAGE_FILE=.coverage.integration \
coverage html; \
fi

.PHONY: .itests-coverage-html
.itests-coverage-html: .integration-tests-coverage-html
Expand All @@ -560,7 +593,7 @@ mistral-itests: requirements .mistral-itests
. $(VIRTUALENV_DIR)/bin/activate; nosetests $(NOSE_OPTS) -s -v st2tests/integration/mistral || exit 1;

.PHONY: .run-mistral-itests-coverage
ifneq ($(INCLUDE_TESTS_IN_COVERAGE),)
ifdef INCLUDE_TESTS_IN_COVERAGE
.run-mistral-itests-coverage: NOSE_COVERAGE_PACKAGES := $(NOSE_COVERAGE_PACKAGES),st2tests.mistral.integration
endif
.run-mistral-itests-coverage:
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ cryptography==2.2.2
eventlet==0.23.0
flex==6.13.1
git+https://github.com/Kami/logshipper.git@stackstorm_patched#egg=logshipper
git+https://github.com/StackStorm/orchestra.git#egg=orchestra
git+https://github.com/StackStorm/orchestra.git@master#egg=orchestra
git+https://github.com/StackStorm/python-mistralclient.git#egg=python-mistralclient
git+https://github.com/StackStorm/st2-auth-backend-flat-file.git@master#egg=st2-auth-backend-flat-file
gitpython==2.1.10
Expand Down
2 changes: 1 addition & 1 deletion st2common/in-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jsonschema
kombu
mongoengine
networkx
git+https://github.com/StackStorm/orchestra.git#egg=orchestra
git+https://github.com/StackStorm/orchestra.git@master#egg=orchestra
oslo.config
paramiko
pyyaml
Expand Down
2 changes: 1 addition & 1 deletion st2common/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ apscheduler==3.5.1
cryptography==2.2.2
eventlet==0.23.0
flex==6.13.1
git+https://github.com/StackStorm/orchestra.git#egg=orchestra
git+https://github.com/StackStorm/orchestra.git@master#egg=orchestra
greenlet==0.4.13
ipaddr
jinja2
Expand Down
1 change: 1 addition & 0 deletions st2common/tests/unit/test_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,7 @@ def test_db_setup_connecting_info_logging(self, mock_log, mock_mongoengine):


class DbCleanupTest(DbTestCase):
ensure_indexes = True

def test_cleanup(self):
"""
Expand Down
35 changes: 28 additions & 7 deletions st2tests/st2tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
# limitations under the License.

from __future__ import absolute_import
from __future__ import print_function

try:
import simplejson as json
except ImportError:
Expand Down Expand Up @@ -176,6 +178,10 @@ def tearDownClass(cls):


class BaseDbTestCase(BaseTestCase):
# True to synchronously ensure indexes after db_setup is called - NOTE: This is only needed
# with older MongoDB versions. With recent versions this is not needed for the tests anymore
# and offers significant test speeds ups.
ensure_indexes = False

# Set to True to enable printing of all the log messages to the console
DISPLAY_LOG_MESSAGES = False
Expand All @@ -196,30 +202,45 @@ def _establish_connection_and_re_create_db(cls):
cls.db_connection = db_setup(
cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
username=username, password=password, ensure_indexes=False)

cls._drop_collections()
cls.db_connection.drop_database(cfg.CONF.database.db_name)

# Explicity ensure indexes after we re-create the DB otherwise ensure_indexes could failure
# inside db_setup if test inserted invalid data
db_ensure_indexes()
# Explicitly ensure indexes after we re-create the DB otherwise ensure_indexes could failure
# inside db_setup if test inserted invalid data.
# NOTE: This is only needed in distributed scenarios (production deployments) where
# multiple services can start up at the same time and race conditions are possible.
if cls.ensure_indexes:
msg = ('Ensuring indexes for all the models, this could significantly slow down the '
'tests')
print('#' * len(msg), file=sys.stderr)
print(msg, file=sys.stderr)
print('#' * len(msg), file=sys.stderr)
db_ensure_indexes()

@classmethod
def _drop_db(cls):
cls._drop_collections()

if cls.db_connection is not None:
cls.db_connection.drop_database(cfg.CONF.database.db_name)

db_teardown()
cls.db_connection = None

@classmethod
def _drop_collections(cls):
# XXX: Explicitly drop all the collection. Otherwise, artifacts are left over in
# XXX: Explicitly drop all the collections. Otherwise, artifacts are left over in
# subsequent tests.
# See: https://github.com/MongoEngine/mongoengine/issues/566
# See: https://github.com/MongoEngine/mongoengine/issues/565
global ALL_MODELS
for model in ALL_MODELS:
model.drop_collection()

# NOTE: In older MongoDB versions you needed to drop all the collections prior to dropping
# the database - that's not needed anymore with the WiredTiger engine

# for model in ALL_MODELS:
# model.drop_collection()
return


class DbTestCase(BaseDbTestCase):
Expand Down