From 4950ff29c551c6e62e0e059a917d4e7ea4439474 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 18 Jul 2022 17:12:21 -0500 Subject: [PATCH 1/7] rm st2exporter/ --- st2exporter/MANIFEST.in | 10 - st2exporter/Makefile | 52 ----- st2exporter/README.md | 3 - st2exporter/bin/st2exporter | 8 - st2exporter/conf/logging.exporter.conf | 44 ---- st2exporter/conf/st2exporter.dev.conf | 9 - st2exporter/conf/syslog.exporter.conf | 22 -- st2exporter/dist_utils.py | 172 --------------- st2exporter/in-requirements.txt | 5 - st2exporter/requirements.txt | 11 - st2exporter/setup.py | 49 ----- st2exporter/st2exporter/__init__.py | 0 st2exporter/st2exporter/cmd/__init__.py | 0 .../st2exporter/cmd/st2exporter_starter.py | 72 ------- st2exporter/st2exporter/config.py | 75 ------- st2exporter/st2exporter/exporter/__init__.py | 0 st2exporter/st2exporter/exporter/dumper.py | 195 ----------------- .../st2exporter/exporter/file_writer.py | 48 ----- .../st2exporter/exporter/json_converter.py | 26 --- st2exporter/st2exporter/worker.py | 135 ------------ st2exporter/tests/integration/__init__.py | 0 .../integration/test_dumper_integration.py | 111 ---------- .../tests/integration/test_export_worker.py | 121 ----------- st2exporter/tests/unit/__init__.py | 0 st2exporter/tests/unit/test_dumper.py | 202 ------------------ st2exporter/tests/unit/test_json_converter.py | 62 ------ 26 files changed, 1432 deletions(-) delete mode 100644 st2exporter/MANIFEST.in delete mode 100644 st2exporter/Makefile delete mode 100644 st2exporter/README.md delete mode 100755 st2exporter/bin/st2exporter delete mode 100644 st2exporter/conf/logging.exporter.conf delete mode 100644 st2exporter/conf/st2exporter.dev.conf delete mode 100644 st2exporter/conf/syslog.exporter.conf delete mode 100644 st2exporter/dist_utils.py delete mode 100644 st2exporter/in-requirements.txt delete mode 100644 st2exporter/requirements.txt delete mode 100644 st2exporter/setup.py delete mode 100644 st2exporter/st2exporter/__init__.py delete mode 100644 st2exporter/st2exporter/cmd/__init__.py delete mode 100644 st2exporter/st2exporter/cmd/st2exporter_starter.py delete mode 100644 st2exporter/st2exporter/config.py delete mode 100644 st2exporter/st2exporter/exporter/__init__.py delete mode 100644 st2exporter/st2exporter/exporter/dumper.py delete mode 100644 st2exporter/st2exporter/exporter/file_writer.py delete mode 100644 st2exporter/st2exporter/exporter/json_converter.py delete mode 100644 st2exporter/st2exporter/worker.py delete mode 100644 st2exporter/tests/integration/__init__.py delete mode 100644 st2exporter/tests/integration/test_dumper_integration.py delete mode 100644 st2exporter/tests/integration/test_export_worker.py delete mode 100644 st2exporter/tests/unit/__init__.py delete mode 100644 st2exporter/tests/unit/test_dumper.py delete mode 100644 st2exporter/tests/unit/test_json_converter.py diff --git a/st2exporter/MANIFEST.in b/st2exporter/MANIFEST.in deleted file mode 100644 index 0f98c7b8ea..0000000000 --- a/st2exporter/MANIFEST.in +++ /dev/null @@ -1,10 +0,0 @@ -# https://docs.python.org/2/distutils/sourcedist.html#commands -# Include all files under the source tree by default. -# Another behaviour can be used in the future though. -recursive-include st2exporter *.* * -include dist_utils.py -include requirements.txt -include README.rst -include CHANGELOG.rst -include LICENSE -global-exclude *.pyc diff --git a/st2exporter/Makefile b/st2exporter/Makefile deleted file mode 100644 index f28c4c4cf7..0000000000 --- a/st2exporter/Makefile +++ /dev/null @@ -1,52 +0,0 @@ -WHEELDIR ?= /tmp/wheelhouse -ST2_COMPONENT := $(notdir $(CURDIR)) -ST2PKG_RELEASE ?= 1 -ST2PKG_VERSION ?= $(shell python -c "from $(ST2_COMPONENT) import __version__; print __version__,") - -ifneq (,$(wildcard /etc/debian_version)) - DEBIAN := 1 - DESTDIR ?= $(CURDIR)/debian/$(ST2_COMPONENT) -else - REDHAT := 1 -endif - -.PHONY: all install wheelhouse -all: install - -install: wheelhouse injectdeps changelog - -post_install: bdist_wheel - # post_install is triggered debian/rules file, don't call it from install target! - # We don't want anything like below to be postinst - sed -i -r "/args\s*=\s*/s%logs%/var/log/st2%g" $(DESTDIR)/etc/st2/logging.*conf - -populate_version: .stamp-populate_version -.stamp-populate_version: - # populate version should be run before any pip/setup.py works - sh ../scripts/populate-version.sh - touch $@ - -requirements: - python ../scripts/fixate-requirements.py -s in-requirements.txt -f ../fixed-requirements.txt - -changelog: populate_version -ifeq ($(DEBIAN),1) - debchange -v $(ST2PKG_VERSION)-$(ST2PKG_RELEASE) -M "automated build version: $(ST2PKG_VERSION)" -endif - -wheelhouse: .stamp-wheelhouse -.stamp-wheelhouse: populate_version requirements - # Install wheels into shared location - pip wheel --wheel-dir=$(WHEELDIR) -r requirements.txt - touch $@ - -injectdeps: .stamp-injectdeps -.stamp-injectdeps: - # We can modify requirements ONLY AFTER wheelhouse has been built - @echo "st2common" >> requirements.txt - touch $@ - -bdist_wheel: .stamp-bdist_wheel -.stamp-bdist_wheel: populate_version - python setup.py bdist_wheel -d $(WHEELDIR) - touch $@ diff --git a/st2exporter/README.md b/st2exporter/README.md deleted file mode 100644 index 190d05140b..0000000000 --- a/st2exporter/README.md +++ /dev/null @@ -1,3 +0,0 @@ -## St2 exporter - -Listens to execution updates from RabbitMQ and creates JSON files out of them. diff --git a/st2exporter/bin/st2exporter b/st2exporter/bin/st2exporter deleted file mode 100755 index f729ed37f3..0000000000 --- a/st2exporter/bin/st2exporter +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python3 - -import sys -from st2exporter.cmd import st2exporter_starter - - -if __name__ == "__main__": - sys.exit(st2exporter_starter.main()) diff --git a/st2exporter/conf/logging.exporter.conf b/st2exporter/conf/logging.exporter.conf deleted file mode 100644 index 3e32b2f3d6..0000000000 --- a/st2exporter/conf/logging.exporter.conf +++ /dev/null @@ -1,44 +0,0 @@ -[loggers] -keys=root - -[handlers] -keys=consoleHandler, fileHandler, auditHandler - -[formatters] -keys=simpleConsoleFormatter, verboseConsoleFormatter, gelfFormatter - -[logger_root] -level=DEBUG -handlers=consoleHandler, fileHandler, auditHandler - -[handler_consoleHandler] -class=StreamHandler -level=INFO -formatter=simpleConsoleFormatter -args=(sys.stdout,) - -[handler_fileHandler] -class=st2common.log.FormatNamedFileHandler -level=DEBUG -formatter=verboseConsoleFormatter -args=('logs/st2exporter.log',) - -[handler_auditHandler] -class=st2common.log.FormatNamedFileHandler -level=AUDIT -formatter=gelfFormatter -args=('logs/st2exporter.audit.log',) - -[formatter_simpleConsoleFormatter] -class=st2common.logging.formatters.ConsoleLogFormatter -format=%(asctime)s %(levelname)s [-] %(message)s -datefmt= - -[formatter_verboseConsoleFormatter] -class=st2common.logging.formatters.ConsoleLogFormatter -format=%(asctime)s %(thread)s %(levelname)s %(module)s [-] %(message)s -datefmt= - -[formatter_gelfFormatter] -class=st2common.logging.formatters.GelfLogFormatter -format=%(message)s diff --git a/st2exporter/conf/st2exporter.dev.conf b/st2exporter/conf/st2exporter.dev.conf deleted file mode 100644 index b764118f2d..0000000000 --- a/st2exporter/conf/st2exporter.dev.conf +++ /dev/null @@ -1,9 +0,0 @@ -[exporter] -logging = st2actions/conf/logging.notifier.conf - -[messaging] -url = amqp://guest:guest@127.0.0.1:5672/ - -[log] -excludes = requests,paramiko -redirect_stderr = False diff --git a/st2exporter/conf/syslog.exporter.conf b/st2exporter/conf/syslog.exporter.conf deleted file mode 100644 index 3a503027ff..0000000000 --- a/st2exporter/conf/syslog.exporter.conf +++ /dev/null @@ -1,22 +0,0 @@ -[loggers] -keys=root - -[handlers] -keys=syslogHandler - -[formatters] -keys=syslogVerboseFormatter - -[logger_root] -level=DEBUG -handlers=syslogHandler - -[handler_syslogHandler] -class=st2common.log.ConfigurableSyslogHandler -level=DEBUG -formatter=syslogVerboseFormatter -args=() - -[formatter_syslogVerboseFormatter] -format=st2notifier[%(process)d]: %(levelname)s %(thread)s %(module)s [-] %(message)s -datefmt= diff --git a/st2exporter/dist_utils.py b/st2exporter/dist_utils.py deleted file mode 100644 index 2f2043cf29..0000000000 --- a/st2exporter/dist_utils.py +++ /dev/null @@ -1,172 +0,0 @@ -# -*- coding: utf-8 -*- -# NOTE: This file is auto-generated - DO NOT EDIT MANUALLY -# Instead modify scripts/dist_utils.py and run 'make .sdist-requirements' to -# update dist_utils.py files for all components - -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import os -import re -import sys - -from distutils.version import StrictVersion - -# NOTE: This script can't rely on any 3rd party dependency so we need to use this code here -# -# TODO: Why can't this script rely on 3rd party dependencies? Is it because it has to import -# from pip? -# -# TODO: Dear future developer, if you are back here fixing a bug with how we parse -# requirements files, please look into using the packaging package on PyPI: -# https://packaging.pypa.io/en/latest/requirements/ -# and specifying that in the `setup_requires` argument to `setuptools.setup()` -# for subpackages. -# At the very least we can vendorize some of their code instead of reimplementing -# each piece of their code every time our parsing breaks. -PY3 = sys.version_info[0] == 3 - -if PY3: - text_type = str -else: - text_type = unicode # noqa # pylint: disable=E0602 - -GET_PIP = "curl https://bootstrap.pypa.io/get-pip.py | python" - -__all__ = [ - "check_pip_is_installed", - "check_pip_version", - "fetch_requirements", - "apply_vagrant_workaround", - "get_version_string", - "parse_version_string", -] - - -def check_pip_is_installed(): - """ - Ensure that pip is installed. - """ - try: - import pip # NOQA - except ImportError as e: - print("Failed to import pip: %s" % (text_type(e))) - print("") - print("Download pip:\n%s" % (GET_PIP)) - sys.exit(1) - - return True - - -def check_pip_version(min_version="6.0.0"): - """ - Ensure that a minimum supported version of pip is installed. - """ - check_pip_is_installed() - - import pip - - if StrictVersion(pip.__version__) < StrictVersion(min_version): - print( - "Upgrade pip, your version '{0}' " - "is outdated. Minimum required version is '{1}':\n{2}".format( - pip.__version__, min_version, GET_PIP - ) - ) - sys.exit(1) - - return True - - -def fetch_requirements(requirements_file_path): - """ - Return a list of requirements and links by parsing the provided requirements file. - """ - links = [] - reqs = [] - - def _get_link(line): - vcs_prefixes = ["git+", "svn+", "hg+", "bzr+"] - - for vcs_prefix in vcs_prefixes: - if line.startswith(vcs_prefix) or line.startswith("-e %s" % (vcs_prefix)): - req_name = re.findall(".*#egg=(.+)([&|@]).*$", line) - - if not req_name: - req_name = re.findall(".*#egg=(.+?)$", line) - else: - req_name = req_name[0] - - if not req_name: - raise ValueError( - 'Line "%s" is missing "#egg="' % (line) - ) - - link = line.replace("-e ", "").strip() - return link, req_name[0] - - return None, None - - with open(requirements_file_path, "r") as fp: - for line in fp.readlines(): - line = line.strip() - - if line.startswith("#") or not line: - continue - - link, req_name = _get_link(line=line) - - if link: - links.append(link) - else: - req_name = line - - if ";" in req_name: - req_name = req_name.split(";")[0].strip() - - reqs.append(req_name) - - return (reqs, links) - - -def apply_vagrant_workaround(): - """ - Function which detects if the script is being executed inside vagrant and if it is, it deletes - "os.link" attribute. - Note: Without this workaround, setup.py sdist will fail when running inside a shared directory - (nfs / virtualbox shared folders). - """ - if os.environ.get("USER", None) == "vagrant": - del os.link - - -def get_version_string(init_file): - """ - Read __version__ string for an init file. - """ - - with open(init_file, "r") as fp: - content = fp.read() - version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", content, re.M) - if version_match: - return version_match.group(1) - - raise RuntimeError("Unable to find version string in %s." % (init_file)) - - -# alias for get_version_string -parse_version_string = get_version_string diff --git a/st2exporter/in-requirements.txt b/st2exporter/in-requirements.txt deleted file mode 100644 index 62dbc998c7..0000000000 --- a/st2exporter/in-requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -# Remember to list implicit packages here, otherwise version won't be fixated! -six -eventlet -kombu -oslo.config diff --git a/st2exporter/requirements.txt b/st2exporter/requirements.txt deleted file mode 100644 index 9e00c8a9b0..0000000000 --- a/st2exporter/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -# Don't edit this file. It's generated automatically! -# If you want to update global dependencies, modify fixed-requirements.txt -# and then run 'make requirements' to update requirements.txt for all -# components. -# If you want to update depdencies for a single component, modify the -# in-requirements.txt for that component and then run 'make requirements' to -# update the component requirements.txt -eventlet==0.30.2 -kombu==5.0.2 -oslo.config>=1.12.1,<1.13 -six==1.13.0 diff --git a/st2exporter/setup.py b/st2exporter/setup.py deleted file mode 100644 index afaae79cac..0000000000 --- a/st2exporter/setup.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os.path - -from setuptools import setup, find_packages - -from dist_utils import fetch_requirements -from dist_utils import apply_vagrant_workaround -from st2exporter import __version__ - -ST2_COMPONENT = "st2exporter" -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) -REQUIREMENTS_FILE = os.path.join(BASE_DIR, "requirements.txt") - -install_reqs, dep_links = fetch_requirements(REQUIREMENTS_FILE) - -apply_vagrant_workaround() -setup( - name=ST2_COMPONENT, - version=__version__, - description="{} StackStorm event-driven automation platform component".format( - ST2_COMPONENT - ), - author="StackStorm", - author_email="info@stackstorm.com", - license="Apache License (2.0)", - url="https://stackstorm.com/", - install_requires=install_reqs, - dependency_links=dep_links, - test_suite=ST2_COMPONENT, - zip_safe=False, - include_package_data=True, - packages=find_packages(exclude=["setuptools", "tests"]), - scripts=["bin/st2exporter"], -) diff --git a/st2exporter/st2exporter/__init__.py b/st2exporter/st2exporter/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/st2exporter/st2exporter/cmd/__init__.py b/st2exporter/st2exporter/cmd/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/st2exporter/st2exporter/cmd/st2exporter_starter.py b/st2exporter/st2exporter/cmd/st2exporter_starter.py deleted file mode 100644 index 2b86ef2707..0000000000 --- a/st2exporter/st2exporter/cmd/st2exporter_starter.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from st2common.util.monkey_patch import monkey_patch - -monkey_patch() - -import os -import sys - -from st2common import log as logging -from st2common.service_setup import setup as common_setup -from st2common.service_setup import teardown as common_teardown -from st2exporter import config -from st2exporter import worker - -__all__ = ["main"] - - -LOG = logging.getLogger(__name__) - - -def _setup(): - common_setup( - service="exporter", - config=config, - setup_db=True, - register_mq_exchanges=True, - register_signal_handlers=True, - ) - - -def _run_worker(): - LOG.info("(PID=%s) Exporter started.", os.getpid()) - export_worker = worker.get_worker() - try: - export_worker.start(wait=True) - except (KeyboardInterrupt, SystemExit): - LOG.info("(PID=%s) Exporter stopped.", os.getpid()) - export_worker.shutdown() - except: - return 1 - return 0 - - -def _teardown(): - common_teardown() - - -def main(): - try: - _setup() - return _run_worker() - except SystemExit as exit_code: - sys.exit(exit_code) - except: - LOG.exception("(PID=%s) Exporter quit due to exception.", os.getpid()) - return 1 - finally: - _teardown() diff --git a/st2exporter/st2exporter/config.py b/st2exporter/st2exporter/config.py deleted file mode 100644 index 4b43550ef2..0000000000 --- a/st2exporter/st2exporter/config.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Configuration options registration and useful routines. -""" - -from __future__ import absolute_import - -from oslo_config import cfg - -import st2common.config as common_config -from st2common.constants.system import VERSION_STRING -from st2common.constants.system import DEFAULT_CONFIG_FILE_PATH - -CONF = cfg.CONF - - -def parse_args(args=None): - cfg.CONF( - args=args, - version=VERSION_STRING, - default_config_files=[DEFAULT_CONFIG_FILE_PATH], - ) - - -def get_logging_config_path(): - return cfg.CONF.exporter.logging - - -def register_opts(ignore_errors=False): - _register_common_opts(ignore_errors=ignore_errors) - _register_app_opts(ignore_errors=ignore_errors) - - -def _register_common_opts(ignore_errors=False): - common_config.register_opts(ignore_errors=ignore_errors) - - -def _register_app_opts(ignore_errors=False): - dump_opts = [ - cfg.StrOpt( - "dump_dir", - default="/opt/stackstorm/exports/", - help="Directory to dump data to.", - ) - ] - - common_config.do_register_opts( - dump_opts, group="exporter", ignore_errors=ignore_errors - ) - - logging_opts = [ - cfg.StrOpt( - "logging", - default="/etc/st2/logging.exporter.conf", - help="location of the logging.exporter.conf file", - ) - ] - - common_config.do_register_opts( - logging_opts, group="exporter", ignore_errors=ignore_errors - ) diff --git a/st2exporter/st2exporter/exporter/__init__.py b/st2exporter/st2exporter/exporter/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/st2exporter/st2exporter/exporter/dumper.py b/st2exporter/st2exporter/exporter/dumper.py deleted file mode 100644 index 12fbeb4f83..0000000000 --- a/st2exporter/st2exporter/exporter/dumper.py +++ /dev/null @@ -1,195 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import eventlet -from six.moves import queue - -from st2common import log as logging -from st2exporter.exporter.file_writer import TextFileWriter -from st2exporter.exporter.json_converter import JsonConverter -from st2common.models.db.marker import DumperMarkerDB -from st2common.persistence.marker import DumperMarker -from st2common.util import date as date_utils -from st2common.util import isotime - -__all__ = ["Dumper"] - -ALLOWED_EXTENSIONS = ["json"] - -CONVERTERS = {"json": JsonConverter} - -LOG = logging.getLogger(__name__) - - -class Dumper(object): - def __init__( - self, - queue, - export_dir, - file_format="json", - file_prefix="st2-executions-", - batch_size=1000, - sleep_interval=60, - max_files_per_sleep=5, - file_writer=None, - ): - if not queue: - raise Exception("Need a queue to consume data from.") - - if not export_dir: - raise Exception("Export dir needed to dump files to.") - - self._export_dir = export_dir - if not os.path.exists(self._export_dir): - raise Exception( - "Dir path %s does not exist. Create one before using exporter." - % self._export_dir - ) - - self._file_format = file_format.lower() - if self._file_format not in ALLOWED_EXTENSIONS: - raise ValueError("Disallowed extension %s." % file_format) - - self._file_prefix = file_prefix - self._batch_size = batch_size - self._max_files_per_sleep = max_files_per_sleep - self._queue = queue - self._flush_thread = None - self._sleep_interval = sleep_interval - self._converter = CONVERTERS[self._file_format]() - self._shutdown = False - self._persisted_marker = None - - if not file_writer: - self._file_writer = TextFileWriter() - - def start(self, wait=False): - self._flush_thread = eventlet.spawn(self._flush) - if wait: - self.wait() - - def wait(self): - self._flush_thread.wait() - - def stop(self): - self._shutdown = True - return eventlet.kill(self._flush_thread) - - def _get_batch(self): - if self._queue.empty(): - return None - - executions_to_write = [] - for _ in range(self._batch_size): - try: - item = self._queue.get(block=False) - except queue.Empty: - break - else: - executions_to_write.append(item) - - LOG.debug("Returning %d items in batch.", len(executions_to_write)) - LOG.debug("Remaining items in queue: %d", self._queue.qsize()) - return executions_to_write - - def _flush(self): - while not self._shutdown: - while self._queue.empty(): - eventlet.sleep(self._sleep_interval) - - try: - self._write_to_disk() - except: - LOG.error("Failed writing data to disk.") - - def _write_to_disk(self): - count = 0 - self._create_date_folder() - - for _ in range(self._max_files_per_sleep): - batch = self._get_batch() - - if not batch: - return count - - try: - self._write_batch_to_disk(batch) - self._update_marker(batch) - count += 1 - except: - LOG.exception("Writing batch to disk failed.") - return count - - def _create_date_folder(self): - folder_name = self._get_date_folder() - folder_path = os.path.join(self._export_dir, folder_name) - - if not os.path.exists(folder_path): - try: - os.makedirs(folder_path) - except: - LOG.exception("Unable to create sub-folder %s for export.", folder_name) - raise - - def _write_batch_to_disk(self, batch): - doc_to_write = self._converter.convert(batch) - self._file_writer.write_text(doc_to_write, self._get_file_name()) - - def _get_file_name(self): - timestring = date_utils.get_datetime_utc_now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") - file_name = self._file_prefix + timestring + "." + self._file_format - file_name = os.path.join(self._export_dir, self._get_date_folder(), file_name) - return file_name - - def _get_date_folder(self): - return date_utils.get_datetime_utc_now().strftime("%Y-%m-%d") - - def _update_marker(self, batch): - timestamps = [isotime.parse(item.end_timestamp) for item in batch] - new_marker = max(timestamps) - - if self._persisted_marker and self._persisted_marker > new_marker: - LOG.warn( - "Older executions are being exported. Perhaps out of order messages." - ) - - try: - self._write_marker_to_db(new_marker) - except: - LOG.exception("Failed persisting dumper marker to db.") - else: - self._persisted_marker = new_marker - - return self._persisted_marker - - def _write_marker_to_db(self, new_marker): - LOG.info("Updating marker in db to: %s", new_marker) - markers = DumperMarker.get_all() - - if len(markers) > 1: - LOG.exception("More than one dumper marker found. Using first found one.") - - marker = isotime.format(new_marker, offset=False) - updated_at = date_utils.get_datetime_utc_now() - - if markers: - marker_id = markers[0]["id"] - else: - marker_id = None - - marker_db = DumperMarkerDB(id=marker_id, marker=marker, updated_at=updated_at) - return DumperMarker.add_or_update(marker_db) diff --git a/st2exporter/st2exporter/exporter/file_writer.py b/st2exporter/st2exporter/exporter/file_writer.py deleted file mode 100644 index 49b5b4d63a..0000000000 --- a/st2exporter/st2exporter/exporter/file_writer.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import abc -import six - -__all__ = ["FileWriter", "TextFileWriter"] - - -@six.add_metaclass(abc.ABCMeta) -class FileWriter(object): - @abc.abstractmethod - def write(self, data, file_path, replace=False): - """ - Write data to file_path. - """ - pass - - -class TextFileWriter(FileWriter): - # XXX: Should support compression at some point. - - def write_text(self, text_data, file_path, replace=False, compressed=False): - if compressed: - return Exception("Compression not supported.") - - self.write(text_data, file_path, replace=replace) - - def write(self, data, file_path, replace=False): - if os.path.exists(file_path) and not replace: - raise Exception("File %s already exists." % file_path) - - with open(file_path, "w") as f: - f.write(data) diff --git a/st2exporter/st2exporter/exporter/json_converter.py b/st2exporter/st2exporter/exporter/json_converter.py deleted file mode 100644 index ba7e95c0a5..0000000000 --- a/st2exporter/st2exporter/exporter/json_converter.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from st2common.util.jsonify import json_encode - -__all__ = ["JsonConverter"] - - -class JsonConverter(object): - def convert(self, items_list): - if not isinstance(items_list, list): - raise ValueError("Items to be converted should be a list.") - json_doc = json_encode(items_list) - return json_doc diff --git a/st2exporter/st2exporter/worker.py b/st2exporter/st2exporter/worker.py deleted file mode 100644 index a5557ee41f..0000000000 --- a/st2exporter/st2exporter/worker.py +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import eventlet -from six.moves import queue -from oslo_config import cfg - -from st2common import log as logging -from st2common.constants.action import ( - LIVEACTION_STATUS_SUCCEEDED, - LIVEACTION_STATUS_FAILED, - LIVEACTION_STATUS_CANCELED, -) -from st2common.models.api.execution import ActionExecutionAPI -from st2common.models.db.execution import ActionExecutionDB -from st2common.persistence.execution import ActionExecution -from st2common.persistence.marker import DumperMarker -from st2common.transport import consumers -from st2common.transport import utils as transport_utils -from st2common.util import isotime -from st2exporter.exporter.dumper import Dumper -from st2common.transport.queues import EXPORTER_WORK_QUEUE - -__all__ = ["ExecutionsExporter", "get_worker"] - -COMPLETION_STATUSES = [ - LIVEACTION_STATUS_SUCCEEDED, - LIVEACTION_STATUS_FAILED, - LIVEACTION_STATUS_CANCELED, -] -LOG = logging.getLogger(__name__) - - -class ExecutionsExporter(consumers.MessageHandler): - message_type = ActionExecutionDB - - def __init__(self, connection, queues): - super(ExecutionsExporter, self).__init__(connection, queues) - self.pending_executions = queue.Queue() - self._dumper = Dumper( - queue=self.pending_executions, export_dir=cfg.CONF.exporter.dump_dir - ) - self._consumer_thread = None - - def start(self, wait=False): - LOG.info("Bootstrapping executions from db...") - try: - self._bootstrap() - except: - LOG.exception("Unable to bootstrap executions from db. Aborting.") - raise - self._consumer_thread = eventlet.spawn( - super(ExecutionsExporter, self).start, wait=True - ) - self._dumper.start() - if wait: - self.wait() - - def wait(self): - self._consumer_thread.wait() - self._dumper.wait() - - def shutdown(self): - self._dumper.stop() - super(ExecutionsExporter, self).shutdown() - - def process(self, execution): - LOG.debug("Got execution from queue: %s", execution) - if execution.status not in COMPLETION_STATUSES: - return - execution_api = ActionExecutionAPI.from_model(execution, mask_secrets=True) - self.pending_executions.put_nowait(execution_api) - LOG.debug("Added execution to queue.") - - def _bootstrap(self): - marker = self._get_export_marker_from_db() - LOG.info("Using marker %s..." % marker) - missed_executions = self._get_missed_executions_from_db(export_marker=marker) - LOG.info("Found %d executions not exported yet...", len(missed_executions)) - - for missed_execution in missed_executions: - if missed_execution.status not in COMPLETION_STATUSES: - continue - execution_api = ActionExecutionAPI.from_model( - missed_execution, mask_secrets=True - ) - try: - LOG.debug("Missed execution %s", execution_api) - self.pending_executions.put_nowait(execution_api) - except: - LOG.exception("Failed adding execution to in-memory queue.") - continue - LOG.info("Bootstrapped executions...") - - def _get_export_marker_from_db(self): - try: - markers = DumperMarker.get_all() - except: - return None - else: - if len(markers) >= 1: - marker = markers[0] - return isotime.parse(marker.marker) - else: - return None - - def _get_missed_executions_from_db(self, export_marker=None): - if not export_marker: - return self._get_all_executions_from_db() - - # XXX: Should adapt this query to get only executions with status - # in COMPLETION_STATUSES. - filters = {"end_timestamp__gt": export_marker} - LOG.info("Querying for executions with filters: %s", filters) - return ActionExecution.query(**filters) - - def _get_all_executions_from_db(self): - return ActionExecution.get_all() # XXX: Paginated call. - - -def get_worker(): - with transport_utils.get_connection() as conn: - return ExecutionsExporter(conn, [EXPORTER_WORK_QUEUE]) diff --git a/st2exporter/tests/integration/__init__.py b/st2exporter/tests/integration/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/st2exporter/tests/integration/test_dumper_integration.py b/st2exporter/tests/integration/test_dumper_integration.py deleted file mode 100644 index 0de7b91ed0..0000000000 --- a/st2exporter/tests/integration/test_dumper_integration.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import os - -import mock -import six - -from six.moves import queue - -from st2common.models.api.execution import ActionExecutionAPI -from st2common.persistence.marker import DumperMarker -from st2common.util import isotime -from st2exporter.exporter.dumper import Dumper -from st2tests.base import DbTestCase -from st2tests.fixturesloader import FixturesLoader - -DESCENDANTS_PACK = "descendants" - -DESCENDANTS_FIXTURES = { - "executions": [ - "root_execution.yaml", - "child1_level1.yaml", - "child2_level1.yaml", - "child1_level2.yaml", - "child2_level2.yaml", - "child3_level2.yaml", - "child1_level3.yaml", - "child2_level3.yaml", - "child3_level3.yaml", - ] -} - - -class TestDumper(DbTestCase): - - fixtures_loader = FixturesLoader() - loaded_fixtures = fixtures_loader.load_fixtures( - fixtures_pack=DESCENDANTS_PACK, fixtures_dict=DESCENDANTS_FIXTURES - ) - loaded_executions = loaded_fixtures["executions"] - execution_apis = [] - for execution in loaded_executions.values(): - execution_apis.append(ActionExecutionAPI(**execution)) - - def get_queue(self): - executions_queue = queue.Queue() - - for execution in self.execution_apis: - executions_queue.put(execution) - return executions_queue - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_write_marker_to_db(self): - executions_queue = self.get_queue() - dumper = Dumper( - queue=executions_queue, - export_dir="/tmp", - batch_size=5, - max_files_per_sleep=1, - file_prefix="st2-stuff-", - file_format="json", - ) - timestamps = [ - isotime.parse(execution.end_timestamp) for execution in self.execution_apis - ] - max_timestamp = max(timestamps) - marker_db = dumper._write_marker_to_db(max_timestamp) - persisted_marker = marker_db.marker - self.assertIsInstance(persisted_marker, six.string_types) - self.assertEqual(isotime.parse(persisted_marker), max_timestamp) - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_write_marker_to_db_marker_exists(self): - executions_queue = self.get_queue() - dumper = Dumper( - queue=executions_queue, - export_dir="/tmp", - batch_size=5, - max_files_per_sleep=1, - file_prefix="st2-stuff-", - file_format="json", - ) - timestamps = [ - isotime.parse(execution.end_timestamp) for execution in self.execution_apis - ] - max_timestamp = max(timestamps) - first_marker_db = dumper._write_marker_to_db(max_timestamp) - second_marker_db = dumper._write_marker_to_db( - max_timestamp + datetime.timedelta(hours=1) - ) - markers = DumperMarker.get_all() - self.assertEqual(len(markers), 1) - final_marker_id = markers[0].id - self.assertEqual(first_marker_db.id, final_marker_id) - self.assertEqual(second_marker_db.id, final_marker_id) - self.assertEqual(markers[0].marker, second_marker_db.marker) - self.assertTrue(second_marker_db.updated_at > first_marker_db.updated_at) diff --git a/st2exporter/tests/integration/test_export_worker.py b/st2exporter/tests/integration/test_export_worker.py deleted file mode 100644 index 9237aab0e8..0000000000 --- a/st2exporter/tests/integration/test_export_worker.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import os - -import mock - -from st2common.models.api.execution import ActionExecutionAPI -from st2common.models.db.marker import DumperMarkerDB -from st2common.persistence.marker import DumperMarker -from st2common.util import isotime -from st2common.util import date as date_utils -from st2exporter.worker import ExecutionsExporter -from st2tests.base import DbTestCase -from st2tests.fixturesloader import FixturesLoader -import st2tests.config as tests_config - -tests_config.parse_args() - -DESCENDANTS_PACK = "descendants" - -DESCENDANTS_FIXTURES = { - "executions": [ - "root_execution.yaml", - "child1_level1.yaml", - "child2_level1.yaml", - "child1_level2.yaml", - "child2_level2.yaml", - "child3_level2.yaml", - "child1_level3.yaml", - "child2_level3.yaml", - "child3_level3.yaml", - ] -} - - -class TestExportWorker(DbTestCase): - @classmethod - def setUpClass(cls): - super(TestExportWorker, cls).setUpClass() - fixtures_loader = FixturesLoader() - loaded_fixtures = fixtures_loader.save_fixtures_to_db( - fixtures_pack=DESCENDANTS_PACK, fixtures_dict=DESCENDANTS_FIXTURES - ) - TestExportWorker.saved_executions = loaded_fixtures["executions"] - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_get_marker_from_db(self): - marker_dt = date_utils.get_datetime_utc_now() - datetime.timedelta(minutes=5) - marker_db = DumperMarkerDB( - marker=isotime.format(marker_dt, offset=False), - updated_at=date_utils.get_datetime_utc_now(), - ) - DumperMarker.add_or_update(marker_db) - exec_exporter = ExecutionsExporter(None, None) - export_marker = exec_exporter._get_export_marker_from_db() - self.assertEqual(export_marker, date_utils.add_utc_tz(marker_dt)) - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_get_missed_executions_from_db_no_marker(self): - exec_exporter = ExecutionsExporter(None, None) - all_execs = exec_exporter._get_missed_executions_from_db(export_marker=None) - self.assertEqual(len(all_execs), len(self.saved_executions.values())) - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_get_missed_executions_from_db_with_marker(self): - exec_exporter = ExecutionsExporter(None, None) - all_execs = exec_exporter._get_missed_executions_from_db(export_marker=None) - min_timestamp = min([item.end_timestamp for item in all_execs]) - marker = min_timestamp + datetime.timedelta(seconds=1) - execs_greater_than_marker = [ - item for item in all_execs if item.end_timestamp > marker - ] - all_execs = exec_exporter._get_missed_executions_from_db(export_marker=marker) - self.assertTrue(len(all_execs) > 0) - self.assertTrue(len(all_execs) == len(execs_greater_than_marker)) - for item in all_execs: - self.assertTrue(item.end_timestamp > marker) - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_bootstrap(self): - exec_exporter = ExecutionsExporter(None, None) - exec_exporter._bootstrap() - self.assertEqual( - exec_exporter.pending_executions.qsize(), len(self.saved_executions) - ) - - count = 0 - while count < exec_exporter.pending_executions.qsize(): - self.assertIsInstance( - exec_exporter.pending_executions.get(), ActionExecutionAPI - ) - count += 1 - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_process(self): - some_execution = list(self.saved_executions.values())[5] - exec_exporter = ExecutionsExporter(None, None) - self.assertEqual(exec_exporter.pending_executions.qsize(), 0) - exec_exporter.process(some_execution) - self.assertEqual(exec_exporter.pending_executions.qsize(), 1) - some_execution.status = "scheduled" - exec_exporter.process(some_execution) - self.assertEqual(exec_exporter.pending_executions.qsize(), 1) - - @classmethod - def tearDownClass(cls): - super(TestExportWorker, cls).tearDownClass() diff --git a/st2exporter/tests/unit/__init__.py b/st2exporter/tests/unit/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/st2exporter/tests/unit/test_dumper.py b/st2exporter/tests/unit/test_dumper.py deleted file mode 100644 index 0ddec72e3b..0000000000 --- a/st2exporter/tests/unit/test_dumper.py +++ /dev/null @@ -1,202 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import os - -import eventlet -import mock -from six.moves import queue - -from st2common.models.api.execution import ActionExecutionAPI -from st2common.util import isotime -from st2exporter.exporter.dumper import Dumper -from st2exporter.exporter.file_writer import TextFileWriter -from st2tests.base import EventletTestCase -from st2tests.fixturesloader import FixturesLoader -from st2common.util import date as date_utils - -DESCENDANTS_PACK = "descendants" - -DESCENDANTS_FIXTURES = { - "executions": [ - "root_execution.yaml", - "child1_level1.yaml", - "child2_level1.yaml", - "child1_level2.yaml", - "child2_level2.yaml", - "child3_level2.yaml", - "child1_level3.yaml", - "child2_level3.yaml", - "child3_level3.yaml", - ] -} - - -class TestDumper(EventletTestCase): - - fixtures_loader = FixturesLoader() - loaded_fixtures = fixtures_loader.load_fixtures( - fixtures_pack=DESCENDANTS_PACK, fixtures_dict=DESCENDANTS_FIXTURES - ) - loaded_executions = loaded_fixtures["executions"] - execution_apis = [] - for execution in loaded_executions.values(): - execution_apis.append(ActionExecutionAPI(**execution)) - - def get_queue(self): - executions_queue = queue.Queue() - - for execution in self.execution_apis: - executions_queue.put(execution) - return executions_queue - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_get_batch_batch_size_greater_than_actual(self): - executions_queue = self.get_queue() - qsize = executions_queue.qsize() - self.assertTrue(qsize > 0) - dumper = Dumper(queue=executions_queue, batch_size=2 * qsize, export_dir="/tmp") - batch = dumper._get_batch() - self.assertEqual(len(batch), qsize) - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_get_batch_batch_size_lesser_than_actual(self): - executions_queue = self.get_queue() - qsize = executions_queue.qsize() - self.assertTrue(qsize > 0) - expected_batch_size = int(qsize / 2) - dumper = Dumper( - queue=executions_queue, batch_size=expected_batch_size, export_dir="/tmp" - ) - batch = dumper._get_batch() - self.assertEqual(len(batch), expected_batch_size) - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_get_file_name(self): - dumper = Dumper( - queue=self.get_queue(), - export_dir="/tmp", - file_prefix="st2-stuff-", - file_format="json", - ) - file_name = dumper._get_file_name() - export_date = date_utils.get_datetime_utc_now().strftime("%Y-%m-%d") - self.assertTrue(file_name.startswith("/tmp/" + export_date + "/st2-stuff-")) - self.assertTrue(file_name.endswith("json")) - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_write_to_disk_empty_queue(self): - dumper = Dumper( - queue=queue.Queue(), - export_dir="/tmp", - file_prefix="st2-stuff-", - file_format="json", - ) - # We just make sure this doesn't blow up. - ret = dumper._write_to_disk() - self.assertEqual(ret, 0) - - @mock.patch.object(TextFileWriter, "write_text", mock.MagicMock(return_value=True)) - @mock.patch.object(Dumper, "_update_marker", mock.MagicMock(return_value=None)) - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - def test_write_to_disk(self): - executions_queue = self.get_queue() - max_files_per_sleep = 5 - dumper = Dumper( - queue=executions_queue, - export_dir="/tmp", - batch_size=1, - max_files_per_sleep=max_files_per_sleep, - file_prefix="st2-stuff-", - file_format="json", - ) - # We just make sure this doesn't blow up. - ret = dumper._write_to_disk() - self.assertEqual(ret, max_files_per_sleep) - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - @mock.patch.object(TextFileWriter, "write_text", mock.MagicMock(return_value=True)) - def test_start_stop_dumper(self): - executions_queue = self.get_queue() - sleep_interval = 0.01 - dumper = Dumper( - queue=executions_queue, - sleep_interval=sleep_interval, - export_dir="/tmp", - batch_size=1, - max_files_per_sleep=5, - file_prefix="st2-stuff-", - file_format="json", - ) - dumper.start() - # Call stop after at least one batch was written to disk. - eventlet.sleep(10 * sleep_interval) - dumper.stop() - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - @mock.patch.object(Dumper, "_write_marker_to_db", mock.MagicMock(return_value=True)) - def test_update_marker(self): - executions_queue = self.get_queue() - dumper = Dumper( - queue=executions_queue, - export_dir="/tmp", - batch_size=5, - max_files_per_sleep=1, - file_prefix="st2-stuff-", - file_format="json", - ) - # Batch 1 - batch = self.execution_apis[0:5] - new_marker = dumper._update_marker(batch) - self.assertIsNotNone(new_marker) - timestamps = [isotime.parse(execution.end_timestamp) for execution in batch] - max_timestamp = max(timestamps) - self.assertEqual(new_marker, max_timestamp) - - # Batch 2 - batch = self.execution_apis[0:5] - new_marker = dumper._update_marker(batch) - timestamps = [isotime.parse(execution.end_timestamp) for execution in batch] - max_timestamp = max(timestamps) - self.assertEqual(new_marker, max_timestamp) - dumper._write_marker_to_db.assert_called_with(new_marker) - - @mock.patch.object(os.path, "exists", mock.MagicMock(return_value=True)) - @mock.patch.object(Dumper, "_write_marker_to_db", mock.MagicMock(return_value=True)) - def test_update_marker_out_of_order_batch(self): - executions_queue = self.get_queue() - dumper = Dumper( - queue=executions_queue, - export_dir="/tmp", - batch_size=5, - max_files_per_sleep=1, - file_prefix="st2-stuff-", - file_format="json", - ) - timestamps = [ - isotime.parse(execution.end_timestamp) for execution in self.execution_apis - ] - max_timestamp = max(timestamps) - - # set dumper persisted timestamp to something less than min timestamp in the batch - test_timestamp = max_timestamp + datetime.timedelta(hours=1) - dumper._persisted_marker = test_timestamp - new_marker = dumper._update_marker(self.execution_apis) - self.assertTrue(new_marker < test_timestamp) - # Assert we rolled back the marker. - self.assertEqual(dumper._persisted_marker, max_timestamp) - self.assertEqual(new_marker, max_timestamp) - dumper._write_marker_to_db.assert_called_with(new_marker) diff --git a/st2exporter/tests/unit/test_json_converter.py b/st2exporter/tests/unit/test_json_converter.py deleted file mode 100644 index 07f82a8bf0..0000000000 --- a/st2exporter/tests/unit/test_json_converter.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json - -import unittest2 - -from st2tests.fixturesloader import FixturesLoader -from st2exporter.exporter.json_converter import JsonConverter - -DESCENDANTS_PACK = "descendants" - -DESCENDANTS_FIXTURES = { - "executions": [ - "root_execution.yaml", - "child1_level1.yaml", - "child2_level1.yaml", - "child1_level2.yaml", - "child2_level2.yaml", - "child3_level2.yaml", - "child1_level3.yaml", - "child2_level3.yaml", - "child3_level3.yaml", - ] -} - - -class TestJsonConverter(unittest2.TestCase): - - fixtures_loader = FixturesLoader() - loaded_fixtures = fixtures_loader.load_fixtures( - fixtures_pack=DESCENDANTS_PACK, fixtures_dict=DESCENDANTS_FIXTURES - ) - - def test_convert(self): - executions_list = list(self.loaded_fixtures["executions"].values()) - converter = JsonConverter() - converted_doc = converter.convert(executions_list) - self.assertTrue(type(converted_doc), "string") - reversed_doc = json.loads(converted_doc) - self.assertListEqual(executions_list, reversed_doc) - - def test_convert_non_list(self): - executions_dict = self.loaded_fixtures["executions"] - converter = JsonConverter() - try: - converter.convert(executions_dict) - self.fail("Should have thrown exception.") - except ValueError: - pass From 176abb6e65516161f583c31813eb315694ee8e8e Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 18 Jul 2022 17:12:48 -0500 Subject: [PATCH 2/7] remove st2exporter from conf files --- conf/st2.dev.conf | 3 --- conf/st2.tests.conf | 3 --- conf/st2.tests1.conf | 2 -- conf/st2.tests2.conf | 3 --- st2tests/st2tests/config.py | 13 ------------- .../conf/st2.tests.api.audit_log_level.conf | 3 --- .../conf/st2.tests.api.debug_log_level.conf | 3 --- .../fixtures/conf/st2.tests.api.info_log_level.conf | 3 --- .../conf/st2.tests.api.system_debug_true.conf | 3 --- ...2.tests.api.system_debug_true_logging_debug.conf | 3 --- st2tests/st2tests/fixtures/conf/st2.tests.conf | 3 --- tools/config_gen.py | 1 - 12 files changed, 43 deletions(-) diff --git a/conf/st2.dev.conf b/conf/st2.dev.conf index b4191d60a3..cf2b5b6596 100644 --- a/conf/st2.dev.conf +++ b/conf/st2.dev.conf @@ -121,9 +121,6 @@ logging = st2actions/conf/logging.scheduler.conf [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf - [workflow_engine] logging = st2actions/conf/logging.workflowengine.conf diff --git a/conf/st2.tests.conf b/conf/st2.tests.conf index 0e9afeb288..d4301d0433 100644 --- a/conf/st2.tests.conf +++ b/conf/st2.tests.conf @@ -87,6 +87,3 @@ remote_dir = /tmp [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf - diff --git a/conf/st2.tests1.conf b/conf/st2.tests1.conf index 53ae62e5ae..9c0b0f54a2 100644 --- a/conf/st2.tests1.conf +++ b/conf/st2.tests1.conf @@ -70,5 +70,3 @@ remote_dir = /tmp [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf diff --git a/conf/st2.tests2.conf b/conf/st2.tests2.conf index cb7be38a71..a45948b346 100644 --- a/conf/st2.tests2.conf +++ b/conf/st2.tests2.conf @@ -92,6 +92,3 @@ remote_dir = /tmp [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf - diff --git a/st2tests/st2tests/config.py b/st2tests/st2tests/config.py index 61e4417414..0061185ceb 100644 --- a/st2tests/st2tests/config.py +++ b/st2tests/st2tests/config.py @@ -80,7 +80,6 @@ def _register_config_opts(): _register_action_sensor_opts() _register_ssh_runner_opts() _register_scheduler_opts() - _register_exporter_opts() _register_sensor_container_opts() _register_garbage_collector_opts() @@ -390,18 +389,6 @@ def _register_scheduler_opts(): _register_opts(scheduler_opts, group="scheduler") -def _register_exporter_opts(): - exporter_opts = [ - cfg.StrOpt( - "dump_dir", - default="/opt/stackstorm/exports/", - help="Directory to dump data to.", - ) - ] - - _register_opts(exporter_opts, group="exporter") - - def _register_sensor_container_opts(): partition_opts = [ cfg.StrOpt( diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.api.audit_log_level.conf b/st2tests/st2tests/fixtures/conf/st2.tests.api.audit_log_level.conf index a07773bc48..9b20a3b57c 100644 --- a/st2tests/st2tests/fixtures/conf/st2.tests.api.audit_log_level.conf +++ b/st2tests/st2tests/fixtures/conf/st2.tests.api.audit_log_level.conf @@ -88,6 +88,3 @@ remote_dir = /tmp [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf - diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.api.debug_log_level.conf b/st2tests/st2tests/fixtures/conf/st2.tests.api.debug_log_level.conf index 0b435df722..9448bdc934 100644 --- a/st2tests/st2tests/fixtures/conf/st2.tests.api.debug_log_level.conf +++ b/st2tests/st2tests/fixtures/conf/st2.tests.api.debug_log_level.conf @@ -88,6 +88,3 @@ remote_dir = /tmp [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf - diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.api.info_log_level.conf b/st2tests/st2tests/fixtures/conf/st2.tests.api.info_log_level.conf index 45bf5f364c..e3207d4591 100644 --- a/st2tests/st2tests/fixtures/conf/st2.tests.api.info_log_level.conf +++ b/st2tests/st2tests/fixtures/conf/st2.tests.api.info_log_level.conf @@ -88,6 +88,3 @@ remote_dir = /tmp [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf - diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true.conf b/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true.conf index ee8b2c9e07..a77d7e87dd 100644 --- a/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true.conf +++ b/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true.conf @@ -88,6 +88,3 @@ remote_dir = /tmp [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf - diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true_logging_debug.conf b/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true_logging_debug.conf index caaf4bbfba..eb9fedd9a4 100644 --- a/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true_logging_debug.conf +++ b/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true_logging_debug.conf @@ -88,6 +88,3 @@ remote_dir = /tmp [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf - diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.conf b/st2tests/st2tests/fixtures/conf/st2.tests.conf index da7386382a..69642eaf31 100644 --- a/st2tests/st2tests/fixtures/conf/st2.tests.conf +++ b/st2tests/st2tests/fixtures/conf/st2.tests.conf @@ -89,6 +89,3 @@ remote_dir = /tmp [notifier] logging = st2actions/conf/logging.notifier.conf -[exporter] -logging = st2exporter/conf/logging.exporter.conf - diff --git a/tools/config_gen.py b/tools/config_gen.py index 68a514ee74..aeb792e045 100755 --- a/tools/config_gen.py +++ b/tools/config_gen.py @@ -33,7 +33,6 @@ "st2stream.config", "st2auth.config", "st2common.config", - "st2exporter.config", "st2reactor.rules.config", "st2reactor.sensor.config", "st2reactor.timer.config", From 6b08e0a020005a8219af97c4179b4053a3634095 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 18 Jul 2022 17:14:42 -0500 Subject: [PATCH 3/7] drop st2exporter from launchdev.sh --- tools/launchdev.sh | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/tools/launchdev.sh b/tools/launchdev.sh index 3d4a97b26e..d0c777f996 100755 --- a/tools/launchdev.sh +++ b/tools/launchdev.sh @@ -288,17 +288,6 @@ function st2start(){ --config-file $ST2_CONF fi - # Start Exporter - if [ -n "$ST2_EXPORTER" ]; then - EXPORTS_DIR=$(exportsdir) - sudo mkdir -p $EXPORTS_DIR - sudo chown -R ${CURRENT_USER}:${CURRENT_USER_GROUP} $EXPORTS_DIR - echo 'Starting screen session st2-exporter...' - screen -L -d -m -S st2-exporter ${VIRTUALENV}/bin/python \ - ./st2exporter/bin/st2exporter \ - --config-file $ST2_CONF - fi - # Check whether screen sessions are started SCREENS=( "st2-api" From 44e3da98758666bf5ab56bf1569f27355c0f98b6 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 18 Jul 2022 17:15:10 -0500 Subject: [PATCH 4/7] drop st2exporter db marker model and queue --- st2common/st2common/models/db/marker.py | 55 ---------------------- st2common/st2common/persistence/marker.py | 40 ---------------- st2common/st2common/transport/queues.py | 7 --- st2common/tests/unit/test_db_marker.py | 56 ----------------------- 4 files changed, 158 deletions(-) delete mode 100644 st2common/st2common/models/db/marker.py delete mode 100644 st2common/st2common/persistence/marker.py delete mode 100644 st2common/tests/unit/test_db_marker.py diff --git a/st2common/st2common/models/db/marker.py b/st2common/st2common/models/db/marker.py deleted file mode 100644 index 7a053e5490..0000000000 --- a/st2common/st2common/models/db/marker.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import mongoengine as me - -from st2common.fields import ComplexDateTimeField -from st2common.models.db import stormbase -from st2common.util import date as date_utils - -__all__ = ["MarkerDB", "DumperMarkerDB"] - - -class MarkerDB(stormbase.StormFoundationDB): - """ - Abstract model for storing marker (or cursor) in db. This is typically used when doing - iteration. - - :param marker: Cursor string. - :type marker: ``str`` - - :param updated_at: Timestamp when marker was updated. - :type updated_at: ``datetime.datetime`` - """ - - marker = me.StringField(required=True) - updated_at = ComplexDateTimeField( - default=date_utils.get_datetime_utc_now, - help_text="The timestamp when the liveaction was created.", - ) - - meta = {"abstract": True} - - -class DumperMarkerDB(MarkerDB): - """ - Marker model used by Dumper (in exporter). - """ - - pass - - -MODELS = [MarkerDB, DumperMarkerDB] diff --git a/st2common/st2common/persistence/marker.py b/st2common/st2common/persistence/marker.py deleted file mode 100644 index 6be08a25ec..0000000000 --- a/st2common/st2common/persistence/marker.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -from st2common.models.db import MongoDBAccess -from st2common.models.db.marker import MarkerDB -from st2common.models.db.marker import DumperMarkerDB -from st2common.persistence.base import Access - -__all__ = ["Marker"] - - -class Marker(Access): - impl = MongoDBAccess(MarkerDB) - publisher = None - - @classmethod - def _get_impl(cls): - return cls.impl - - -class DumperMarker(Access): - impl = MongoDBAccess(DumperMarkerDB) - publisher = None - - @classmethod - def _get_impl(cls): - return cls.impl diff --git a/st2common/st2common/transport/queues.py b/st2common/st2common/transport/queues.py index f6f9bcb4ef..0b19c0ff1d 100644 --- a/st2common/st2common/transport/queues.py +++ b/st2common/st2common/transport/queues.py @@ -39,7 +39,6 @@ "ACTIONRUNNER_CANCEL_QUEUE", "ACTIONRUNNER_PAUSE_QUEUE", "ACTIONRUNNER_RESUME_QUEUE", - "EXPORTER_WORK_QUEUE", "NOTIFIER_ACTIONUPDATE_WORK_QUEUE", "RESULTSTRACKER_ACTIONSTATE_WORK_QUEUE", "RULESENGINE_WORK_QUEUE", @@ -76,12 +75,6 @@ ) -# Used by the exporter service -EXPORTER_WORK_QUEUE = execution.get_queue( - "st2.exporter.work", routing_key=publishers.UPDATE_RK -) - - # Used by the notifier service NOTIFIER_ACTIONUPDATE_WORK_QUEUE = execution.get_queue( "st2.notifiers.execution.work", routing_key=publishers.UPDATE_RK diff --git a/st2common/tests/unit/test_db_marker.py b/st2common/tests/unit/test_db_marker.py deleted file mode 100644 index b9cd879ea3..0000000000 --- a/st2common/tests/unit/test_db_marker.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2020 The StackStorm Authors. -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -from st2common.models.db.marker import DumperMarkerDB -from st2common.persistence.marker import DumperMarker -from st2common.exceptions.db import StackStormDBObjectNotFoundError -from st2common.util import date as date_utils - -from st2tests import DbTestCase - - -class DumperMarkerModelTest(DbTestCase): - def test_dumper_marker_crud(self): - saved = DumperMarkerModelTest._create_save_dumper_marker() - retrieved = DumperMarker.get_by_id(saved.id) - self.assertEqual( - saved.marker, retrieved.marker, "Same marker was not returned." - ) - # test update - time_now = date_utils.get_datetime_utc_now() - retrieved.updated_at = time_now - saved = DumperMarker.add_or_update(retrieved) - retrieved = DumperMarker.get_by_id(saved.id) - self.assertEqual(retrieved.updated_at, time_now, "Update to marker failed.") - # cleanup - DumperMarkerModelTest._delete([retrieved]) - try: - retrieved = DumperMarker.get_by_id(saved.id) - except StackStormDBObjectNotFoundError: - retrieved = None - self.assertIsNone(retrieved, "managed to retrieve after failure.") - - @staticmethod - def _create_save_dumper_marker(): - created = DumperMarkerDB() - created.marker = "2015-06-11T00:35:15.260439Z" - created.updated_at = date_utils.get_datetime_utc_now() - return DumperMarker.add_or_update(created) - - @staticmethod - def _delete(model_objects): - for model_object in model_objects: - model_object.delete() From b3c10a6187c7787cbe406e2705364110677f6689 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 18 Jul 2022 17:22:09 -0500 Subject: [PATCH 5/7] add changelog entry --- CHANGELOG.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 440c65b4a9..3ace5ffb3e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -64,6 +64,15 @@ Changed Contributed by @cognifloyd +Removed +~~~~~~~ + +* Removed st2exporter service. It is unmaintained and does not get installed. It was + originally meant to help with analytics by exporting executions as json files that + could be imported into something like elasticsearch. Our code is now instrumented + to make a wider variety of stats available to metrics drivers. #5676 + Contributed by @cognifloyd + 3.7.0 - May 05, 2022 -------------------- From 24669fd855b61b93e6f7adebeff42228c445f5e3 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 18 Jul 2022 18:12:20 -0500 Subject: [PATCH 6/7] drop st2exporter in more conf files --- conf/HA/st2.conf.sample | 3 --- conf/st2.conf.sample | 6 ------ conf/st2.package.conf | 3 --- 3 files changed, 12 deletions(-) diff --git a/conf/HA/st2.conf.sample b/conf/HA/st2.conf.sample index 2efe8e696b..7ca032711d 100644 --- a/conf/HA/st2.conf.sample +++ b/conf/HA/st2.conf.sample @@ -26,9 +26,6 @@ virtualenv_opts = --always-copy [notifier] logging = /etc/st2/logging.notifier.conf -[exporter] -logging = /etc/st2/logging.exporter.conf - [garbagecollector] logging = /etc/st2/logging.garbagecollector.conf diff --git a/conf/st2.conf.sample b/conf/st2.conf.sample index d860951268..8076b12d9d 100644 --- a/conf/st2.conf.sample +++ b/conf/st2.conf.sample @@ -160,12 +160,6 @@ username = None # Compression level when compressors is set to zlib. Valid values are -1 to 9. Defaults to 6. zlib_compression_level = -[exporter] -# Directory to dump data to. -dump_dir = /opt/stackstorm/exports/ -# location of the logging.exporter.conf file -logging = /etc/st2/logging.exporter.conf - [garbagecollector] # Action execution output objects (ones generated by action output streaming) older than this value (days) will be automatically deleted. Defaults to 7. action_executions_output_ttl = 7 diff --git a/conf/st2.package.conf b/conf/st2.package.conf index 10c241b3ad..581d383bd0 100644 --- a/conf/st2.package.conf +++ b/conf/st2.package.conf @@ -25,9 +25,6 @@ virtualenv_opts = --always-copy [notifier] logging = /etc/st2/logging.notifier.conf -[exporter] -logging = /etc/st2/logging.exporter.conf - [garbagecollector] logging = /etc/st2/logging.garbagecollector.conf From fbb52396091b782102abcbaca8e7afddfd32783e Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 19 Jul 2022 11:03:42 -0500 Subject: [PATCH 7/7] add migration to delete marker collections --- .../st2-drop-st2exporter-marker-collections | 71 +++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 st2common/bin/migrations/v3.8/st2-drop-st2exporter-marker-collections diff --git a/st2common/bin/migrations/v3.8/st2-drop-st2exporter-marker-collections b/st2common/bin/migrations/v3.8/st2-drop-st2exporter-marker-collections new file mode 100644 index 0000000000..569cb1037a --- /dev/null +++ b/st2common/bin/migrations/v3.8/st2-drop-st2exporter-marker-collections @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# Copyright 2022 The StackStorm Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Migration which deletes the marker collections now that st2exporter has been removed. +Only st2exporter used the marker collections. + +NB: Most people will not have these collections because st2exporter was optional, +and the collections were not configured to be created automatically. +""" + +import sys +import traceback + +import mongoengine as me + +from mongoengine.connection import get_db +from oslo_config import cfg + +from st2common import config +from st2common.service_setup import db_setup +from st2common.service_setup import db_teardown + + +MARKER_COLLECTION = "marker_d_b" +DUMPER_MARKER_COLLECTION = "dumper_marker_d_b" + + +def delete_marker_collections(): + db = get_db() + collections = db.collection_names() + + if MARKER_COLLECTION in collections: + print(f"Dropping {MARKER_COLLECTION} collection...") + db[MARKER_COLLECTION].drop() + + if DUMPER_MARKER_COLLECTION in collections: + print(f"Dropping {DUMPER_MARKER_COLLECTION} collection...") + db[DUMPER_MARKER_COLLECTION].drop() + + +def main(): + config.parse_args() + db_setup() + + try: + delete_marker_collections(display_prompt=not cfg.CONF.yes) + exit_code = 0 + except Exception as e: + print("ABORTED: Collection deletion aborted on first failure: %s" % (str(e))) + traceback.print_exc() + exit_code = 1 + + db_teardown() + sys.exit(exit_code) + + +if __name__ == "__main__": + main()