From eb778c9c0f4b92e171f31b10552d6c669458c718 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 3 Jan 2019 17:50:15 +0100 Subject: [PATCH 001/105] Update affected tests. --- st2api/tests/unit/controllers/v1/test_packs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/st2api/tests/unit/controllers/v1/test_packs.py b/st2api/tests/unit/controllers/v1/test_packs.py index 1ccd4ea439..8c5d9f338c 100644 --- a/st2api/tests/unit/controllers/v1/test_packs.py +++ b/st2api/tests/unit/controllers/v1/test_packs.py @@ -530,14 +530,14 @@ def test_packs_register_endpoint(self, mock_get_packs): {'packs': ['dummy_pack_1'], 'types': ['action']}) self.assertEqual(resp.status_int, 200) - self.assertEqual(resp.json, {'actions': 1, 'runners': 18}) + self.assertEqual(resp.json, {'actions': 1, 'runners': 15}) # Verify that plural name form also works resp = self.app.post_json('/v1/packs/register', {'packs': ['dummy_pack_1'], 'types': ['actions']}) self.assertEqual(resp.status_int, 200) - self.assertEqual(resp.json, {'actions': 1, 'runners': 18}) + self.assertEqual(resp.json, {'actions': 1, 'runners': 15}) # Register single resource from a single pack specified multiple times - verify that # resources from the same pack are only registered once From 6452f4d21a19fe3d66a586ff31675f6652467063 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 7 Jan 2019 14:15:01 +0100 Subject: [PATCH 002/105] Fix two issues in FileWatchSensor: 1. Mitigate / avoid race when calling add_file() before tailer has been initialized 2. Fix a bug with failing to dispatch a trigger due to code using invalid trigger type reference (it uses trigger ref instead of trigger type ref) --- contrib/linux/sensors/file_watch_sensor.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/contrib/linux/sensors/file_watch_sensor.py b/contrib/linux/sensors/file_watch_sensor.py index d0a74c71a9..547d4306ef 100644 --- a/contrib/linux/sensors/file_watch_sensor.py +++ b/contrib/linux/sensors/file_watch_sensor.py @@ -1,5 +1,7 @@ import os +import eventlet + from logshipper.tail import Tail from st2reactor.sensor.base import Sensor @@ -10,6 +12,7 @@ def __init__(self, sensor_service, config=None): super(FileWatchSensor, self).__init__(sensor_service=sensor_service, config=config) self._trigger = None + self._trigger_type_ref = 'linux.file_watch.line' self._logger = self._sensor_service.get_logger(__name__) self._tail = None @@ -42,6 +45,9 @@ def add_trigger(self, trigger): if not self._trigger: raise Exception('Trigger %s did not contain a ref.' % trigger) + # Wait a bit to avoid initialization race in logshipper library + eventlet.sleep(1.0) + self._tail.add_file(filename=file_path) self._logger.info('Added file "%s"' % (file_path)) @@ -61,7 +67,7 @@ def remove_trigger(self, trigger): self._logger.info('Removed file "%s"' % (file_path)) def _handle_line(self, file_path, line): - trigger = self._trigger + trigger = self._trigger_type_ref payload = { 'file_path': file_path, 'file_name': os.path.basename(file_path), From 106dd08f00f14aa6f168c570597410c908cc8466 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 7 Jan 2019 14:23:53 +0100 Subject: [PATCH 003/105] Fix validate_trigger_payload() method so it also works if a trigger reference and not trigger type reference is passed to it. --- contrib/linux/sensors/file_watch_sensor.py | 3 +-- st2common/st2common/validators/api/reactor.py | 13 ++++++++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/contrib/linux/sensors/file_watch_sensor.py b/contrib/linux/sensors/file_watch_sensor.py index 547d4306ef..3768e7f4a6 100644 --- a/contrib/linux/sensors/file_watch_sensor.py +++ b/contrib/linux/sensors/file_watch_sensor.py @@ -12,7 +12,6 @@ def __init__(self, sensor_service, config=None): super(FileWatchSensor, self).__init__(sensor_service=sensor_service, config=config) self._trigger = None - self._trigger_type_ref = 'linux.file_watch.line' self._logger = self._sensor_service.get_logger(__name__) self._tail = None @@ -67,7 +66,7 @@ def remove_trigger(self, trigger): self._logger.info('Removed file "%s"' % (file_path)) def _handle_line(self, file_path, line): - trigger = self._trigger_type_ref + trigger = self._trigger payload = { 'file_path': file_path, 'file_name': os.path.basename(file_path), diff --git a/st2common/st2common/validators/api/reactor.py b/st2common/st2common/validators/api/reactor.py index 0e0f4397bc..875417b51d 100644 --- a/st2common/st2common/validators/api/reactor.py +++ b/st2common/st2common/validators/api/reactor.py @@ -113,7 +113,7 @@ def validate_trigger_payload(trigger_type_ref, payload, throw_on_inexistent_trig """ This function validates trigger payload parameters for system and user-defined triggers. - :param trigger_type_ref: Reference of a trigger type or a trigger dictionary object. + :param trigger_type_ref: Reference of a trigger type / trigger / trigger dictionary object. :type trigger_type_ref: ``str`` :param payload: Trigger payload. @@ -144,9 +144,20 @@ def validate_trigger_payload(trigger_type_ref, payload, throw_on_inexistent_trig # System trigger payload_schema = SYSTEM_TRIGGER_TYPES[trigger_type_ref]['payload_schema'] else: + # 1. First assume we received TriggerType reference trigger_type_db = triggers.get_trigger_type_db(trigger_type_ref) + if not trigger_type_db: + # 2. If TriggerType was not found, assume we received a Trigger reference # Trigger doesn't exist in the database + trigger_db = triggers.get_trigger_db_by_ref(trigger_type_ref) + + if trigger_db: + trigger_type_db = triggers.get_trigger_type_db(trigger_db.type) + else: + trigger_type_db = None + + if not trigger_type_db: if throw_on_inexistent_trigger: msg = ('Trigger type with reference "%s" doesn\'t exist in the database' % (trigger_type_ref)) From 1f34a184aa7f415a19df66c437cb342c8a57b3b9 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 7 Jan 2019 14:34:57 +0100 Subject: [PATCH 004/105] Update inaccurate comment. --- st2common/st2common/services/trigger_dispatcher.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/st2common/st2common/services/trigger_dispatcher.py b/st2common/st2common/services/trigger_dispatcher.py index 14850a595d..92a46825ae 100644 --- a/st2common/st2common/services/trigger_dispatcher.py +++ b/st2common/st2common/services/trigger_dispatcher.py @@ -40,7 +40,7 @@ def dispatch(self, trigger, payload=None, trace_tag=None, throw_on_validation_er """ Method which dispatches the trigger. - :param trigger: Reference to the TriggerType (.). + :param trigger: Reference to the TriggerTypeDB (.) or TriggerDB object. :type trigger: ``str`` :param payload: Trigger payload. @@ -64,7 +64,7 @@ def dispatch_with_context(self, trigger, payload=None, trace_context=None, """ Method which dispatches the trigger. - :param trigger: Reference to the TriggerType (.). + :param trigger: Reference to the TriggerTypeDB (.) or TriggerDB object. :type trigger: ``str`` :param payload: Trigger payload. From ab2e3562fe0947750a595975c04ff6a2a5ea810a Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 7 Jan 2019 15:59:32 +0100 Subject: [PATCH 005/105] Determine if the trigger_type_ref is actually a trigger ref based if the value is a valid UUID4. --- st2common/st2common/validators/api/reactor.py | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/st2common/st2common/validators/api/reactor.py b/st2common/st2common/validators/api/reactor.py index 875417b51d..35cbcb9d69 100644 --- a/st2common/st2common/validators/api/reactor.py +++ b/st2common/st2common/validators/api/reactor.py @@ -14,8 +14,9 @@ # limitations under the License. from __future__ import absolute_import -import six +import six +import uuid from oslo_config import cfg from apscheduler.triggers.cron import CronTrigger @@ -144,20 +145,25 @@ def validate_trigger_payload(trigger_type_ref, payload, throw_on_inexistent_trig # System trigger payload_schema = SYSTEM_TRIGGER_TYPES[trigger_type_ref]['payload_schema'] else: - # 1. First assume we received TriggerType reference - trigger_type_db = triggers.get_trigger_type_db(trigger_type_ref) + # We assume Trigger ref and not TriggerType ref is passed in if second + # part (trigger name) is a valid UUID version 4 + try: + trigger_uuid = uuid.UUID(trigger_type_ref.split('.')[-1]) + except ValueError: + is_trigger_db = False + else: + is_trigger_db = (trigger_uuid.version == 4) - if not trigger_type_db: - # 2. If TriggerType was not found, assume we received a Trigger reference - # Trigger doesn't exist in the database + if is_trigger_db: trigger_db = triggers.get_trigger_db_by_ref(trigger_type_ref) if trigger_db: - trigger_type_db = triggers.get_trigger_type_db(trigger_db.type) - else: - trigger_type_db = None + trigger_type_ref = trigger_db.type + + trigger_type_db = triggers.get_trigger_type_db(trigger_type_ref) if not trigger_type_db: + # Trigger doesn't exist in the database if throw_on_inexistent_trigger: msg = ('Trigger type with reference "%s" doesn\'t exist in the database' % (trigger_type_ref)) From 35244de8e2ea1a2e89e2ecce8b489b91b4329bb0 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 7 Jan 2019 16:04:00 +0100 Subject: [PATCH 006/105] Add a test case for it. --- st2reactor/tests/unit/test_sensor_service.py | 53 ++++++++++++++++---- 1 file changed, 42 insertions(+), 11 deletions(-) diff --git a/st2reactor/tests/unit/test_sensor_service.py b/st2reactor/tests/unit/test_sensor_service.py index d3058b77f0..214b28dc69 100644 --- a/st2reactor/tests/unit/test_sensor_service.py +++ b/st2reactor/tests/unit/test_sensor_service.py @@ -28,9 +28,14 @@ } -class TriggerTypeMock(object): - def __init__(self, schema={}): - self.payload_schema = schema +class TriggerTypeDBMock(object): + def __init__(self, schema=None): + self.payload_schema = schema or {} + + +class TriggerDBMock(object): + def __init__(self, type=None): + self.type = type class SensorServiceTestCase(unittest2.TestCase): @@ -54,7 +59,7 @@ def tearDown(self): cfg.CONF.system.validate_trigger_payload = self.validate_trigger_payload @mock.patch('st2common.services.triggers.get_trigger_type_db', - mock.MagicMock(return_value=TriggerTypeMock(TEST_SCHEMA))) + mock.MagicMock(return_value=TriggerTypeDBMock(TEST_SCHEMA))) def test_dispatch_success_valid_payload_validation_enabled(self): cfg.CONF.system.validate_trigger_payload = True @@ -75,7 +80,33 @@ def test_dispatch_success_valid_payload_validation_enabled(self): self.assertEqual(self._dispatched_count, 1) @mock.patch('st2common.services.triggers.get_trigger_type_db', - mock.MagicMock(return_value=TriggerTypeMock(TEST_SCHEMA))) + mock.MagicMock(return_value=TriggerTypeDBMock(TEST_SCHEMA))) + @mock.patch('st2common.services.triggers.get_trigger_db_by_ref', + mock.MagicMock(return_value=TriggerDBMock(type='trigger-type-ref'))) + def test_dispatch_success_with_validation_enabled_trigger_reference(self): + # Test a scenario where a Trigger ref and not TriggerType ref is provided + cfg.CONF.system.validate_trigger_payload = True + + # define a valid payload + payload = { + 'name': 'John Doe', + 'age': 25, + 'career': ['foo, Inc.', 'bar, Inc.'], + 'married': True, + 'awards': {'2016': ['hoge prize', 'fuga prize']}, + 'income': 50000 + } + + self.assertEqual(self._dispatched_count, 0) + + # dispatching a trigger + self.sensor_service.dispatch('pack.86582f21-1fbc-44ea-88cb-0cd2b610e93b', payload) + + # This assumed that the target tirgger dispatched + self.assertEqual(self._dispatched_count, 1) + + @mock.patch('st2common.services.triggers.get_trigger_type_db', + mock.MagicMock(return_value=TriggerTypeDBMock(TEST_SCHEMA))) def test_dispatch_success_with_validation_disabled_and_invalid_payload(self): """ Tests that an invalid payload still results in dispatch success with default config @@ -108,7 +139,7 @@ def test_dispatch_success_with_validation_disabled_and_invalid_payload(self): self.assertEqual(self._dispatched_count, 1) @mock.patch('st2common.services.triggers.get_trigger_type_db', - mock.MagicMock(return_value=TriggerTypeMock(TEST_SCHEMA))) + mock.MagicMock(return_value=TriggerTypeDBMock(TEST_SCHEMA))) def test_dispatch_failure_caused_by_incorrect_type(self): # define a invalid payload (the type of 'age' is incorrect) payload = { @@ -131,7 +162,7 @@ def test_dispatch_failure_caused_by_incorrect_type(self): self.assertEqual(self._dispatched_count, 1) @mock.patch('st2common.services.triggers.get_trigger_type_db', - mock.MagicMock(return_value=TriggerTypeMock(TEST_SCHEMA))) + mock.MagicMock(return_value=TriggerTypeDBMock(TEST_SCHEMA))) def test_dispatch_failure_caused_by_lack_of_required_parameter(self): # define a invalid payload (lack of required property) payload = { @@ -149,7 +180,7 @@ def test_dispatch_failure_caused_by_lack_of_required_parameter(self): self.assertEqual(self._dispatched_count, 1) @mock.patch('st2common.services.triggers.get_trigger_type_db', - mock.MagicMock(return_value=TriggerTypeMock(TEST_SCHEMA))) + mock.MagicMock(return_value=TriggerTypeDBMock(TEST_SCHEMA))) def test_dispatch_failure_caused_by_extra_parameter(self): # define a invalid payload ('hobby' is extra) payload = { @@ -162,7 +193,7 @@ def test_dispatch_failure_caused_by_extra_parameter(self): self.assertEqual(self._dispatched_count, 0) @mock.patch('st2common.services.triggers.get_trigger_type_db', - mock.MagicMock(return_value=TriggerTypeMock(TEST_SCHEMA))) + mock.MagicMock(return_value=TriggerTypeDBMock(TEST_SCHEMA))) def test_dispatch_success_with_multiple_type_value(self): payload = { 'name': 'John Doe', @@ -180,7 +211,7 @@ def test_dispatch_success_with_multiple_type_value(self): self.assertEqual(self._dispatched_count, 2) @mock.patch('st2common.services.triggers.get_trigger_type_db', - mock.MagicMock(return_value=TriggerTypeMock(TEST_SCHEMA))) + mock.MagicMock(return_value=TriggerTypeDBMock(TEST_SCHEMA))) def test_dispatch_success_with_null(self): payload = { 'name': 'John Doe', @@ -193,7 +224,7 @@ def test_dispatch_success_with_null(self): self.assertEqual(self._dispatched_count, 1) @mock.patch('st2common.services.triggers.get_trigger_type_db', - mock.MagicMock(return_value=TriggerTypeMock())) + mock.MagicMock(return_value=TriggerTypeDBMock())) def test_dispatch_success_without_payload_schema(self): # the case trigger has no property self.sensor_service.dispatch('trigger-name', {}) From 7f6e602615143f9bf2a8be34b74204ae9579b6ff Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 22 Jan 2019 13:01:50 +0100 Subject: [PATCH 007/105] Fix RabbitMQ connection retry wrapper so it doesn't block for indefinetly if connection to RabbitMQ cannot be established. Also add errback which logs an actual error when trying to re-establish a connection. --- .../transport/connection_retry_wrapper.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/st2common/st2common/transport/connection_retry_wrapper.py b/st2common/st2common/transport/connection_retry_wrapper.py index a657ed2afd..5508e9c0bb 100644 --- a/st2common/st2common/transport/connection_retry_wrapper.py +++ b/st2common/st2common/transport/connection_retry_wrapper.py @@ -127,6 +127,8 @@ def run(self, connection, wrapped_callback): if should_stop: raise # -1, 0 and 1+ are handled properly by eventlet.sleep + self._logger.debug('Received RabbitMQ server error, sleeping for %s seconds ' + 'before retrying: %s' % (wait, str(e))) eventlet.sleep(wait) connection.close() @@ -135,10 +137,21 @@ def run(self, connection, wrapped_callback): # entire ConnectionPool simultaneously but that would require writing our own # ConnectionPool. If a server recovers it could happen that the same process # ends up talking to separate nodes in a cluster. - connection.ensure_connection() + def log_error_on_conn_failure(exc, interval): + self._logger.debug('Failed to re-establish connection to RabbitMQ server: %s' % + (str(e))) + + try: + # NOTE: This function blocks and tries to restablish a connection for + # indefinetly if "max_retries" argument is not specified + connection.ensure_connection(max_retries=5, errback=log_error_on_conn_failure) + except Exception: + self._logger.exception('Connections to RabbitMQ cannot be re-established: %s', + str(e)) + raise except Exception as e: - self._logger.exception('Connections to rabbitmq cannot be re-established: %s', + self._logger.exception('Connections to RabbitMQ cannot be re-established: %s', str(e)) # Not being able to publish a message could be a significant issue for an app. raise From 3be3891a3044bac8d33420f5f53b991b4bee44eb Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 22 Jan 2019 13:34:29 +0100 Subject: [PATCH 008/105] Make max retries configurable, also log sleep interval. --- .../st2common/transport/connection_retry_wrapper.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/st2common/st2common/transport/connection_retry_wrapper.py b/st2common/st2common/transport/connection_retry_wrapper.py index 5508e9c0bb..79ff7f60fa 100644 --- a/st2common/st2common/transport/connection_retry_wrapper.py +++ b/st2common/st2common/transport/connection_retry_wrapper.py @@ -91,9 +91,12 @@ def wrapped_callback(connection, channel): retry_wrapper.run(connection=connection, wrapped_callback=wrapped_callback) """ - def __init__(self, cluster_size, logger): + def __init__(self, cluster_size, logger, ensure_max_retries=3): self._retry_context = ClusterRetryContext(cluster_size=cluster_size) self._logger = logger + # How many times to try to retrying establishing a connection in a place where we are + # calling connection.ensure_connection + self._ensure_max_retries = ensure_max_retries def errback(self, exc, interval): self._logger.error('Rabbitmq connection error: %s', exc.message) @@ -139,13 +142,14 @@ def run(self, connection, wrapped_callback): # ends up talking to separate nodes in a cluster. def log_error_on_conn_failure(exc, interval): - self._logger.debug('Failed to re-establish connection to RabbitMQ server: %s' % - (str(e))) + self._logger.debug('Failed to re-establish connection to RabbitMQ server, ' + 'retrying in %s seconds: %s' % (interval, str(e))) try: # NOTE: This function blocks and tries to restablish a connection for # indefinetly if "max_retries" argument is not specified - connection.ensure_connection(max_retries=5, errback=log_error_on_conn_failure) + connection.ensure_connection(max_retries=self._ensure_max_retries, + errback=log_error_on_conn_failure) except Exception: self._logger.exception('Connections to RabbitMQ cannot be re-established: %s', str(e)) From 7b92b68c5184f0c8aec452a46b4af0b4053121ad Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 22 Jan 2019 13:45:55 +0100 Subject: [PATCH 009/105] Remove debug code. From 5535d133ca2ce591b689a7b30c05da7927a84b36 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 22 Jan 2019 18:44:42 +0100 Subject: [PATCH 010/105] Pin pyyaml to latest version. NOTE: Security vulnerability doesn't affect our code because we already use yaml.safe_load everywhere, but it's still good to update. --- fixed-requirements.txt | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/fixed-requirements.txt b/fixed-requirements.txt index 89ee714048..a395e7c89e 100644 --- a/fixed-requirements.txt +++ b/fixed-requirements.txt @@ -9,12 +9,11 @@ kombu==4.2.1 # Note: amqp is used by kombu amqp==2.3.2 # NOTE: Recent version substantially affect the performance and add big import time overhead -# See https://github.com/StackStorm/st2/issues/4160#issuecomment-394386433 for -details +# See https://github.com/StackStorm/st2/issues/4160#issuecomment-394386433 for details oslo.config>=1.12.1,<1.13 oslo.utils>=3.36.2,<=3.37.0 -six==1.11.0 -pyyaml==3.13 +six==1.12.0 +pyyaml==4.2b4 requests[security]<2.15,>=2.14.1 apscheduler==3.5.3 gitpython==2.1.11 From ba36fc2ac2525814c0c190a5463c22d0f3bd793a Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 22 Jan 2019 18:48:28 +0100 Subject: [PATCH 011/105] Re-generate requirements file and use our flex work which doesn't pin pyyaml to old version. --- fixed-requirements.txt | 2 +- requirements.txt | 4 ++-- st2client/requirements.txt | 2 +- st2common/in-requirements.txt | 2 +- st2common/requirements.txt | 4 ++-- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/fixed-requirements.txt b/fixed-requirements.txt index a395e7c89e..64d1cb3254 100644 --- a/fixed-requirements.txt +++ b/fixed-requirements.txt @@ -42,7 +42,7 @@ tooz==1.63.1 zake==0.2.2 routes==2.4.1 flex==6.13.2 -webob==1.8.2 +webob==1.8.4 prance==0.9.0 pywinrm==0.3.0 # test requirements below diff --git a/requirements.txt b/requirements.txt index ef3c110a5d..011d23491e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,8 +5,8 @@ argcomplete bcrypt cryptography==2.4.1 eventlet==0.24.1 -flex==6.13.2 git+https://github.com/Kami/logshipper.git@stackstorm_patched#egg=logshipper +git+https://github.com/StackStorm/flex.git@pyyaml_pin_fix#egg=flex git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta git+https://github.com/StackStorm/python-mistralclient.git@st2-2.10.1#egg=python-mistralclient git+https://github.com/StackStorm/st2-auth-backend-flat-file.git@master#egg=st2-auth-backend-flat-file @@ -43,7 +43,7 @@ python-json-logger python-statsd==2.1.0 pytz==2018.7 pywinrm==0.3.0 -pyyaml==3.13 +pyyaml==4.2b4 rednose requests[security]<2.15,>=2.14.1 retrying==1.3.3 diff --git a/st2client/requirements.txt b/st2client/requirements.txt index a7841f794c..fad6fb9b43 100644 --- a/st2client/requirements.txt +++ b/st2client/requirements.txt @@ -8,7 +8,7 @@ prompt-toolkit==1.0.15 python-dateutil==2.7.5 python-editor==1.0.3 pytz==2018.7 -pyyaml==3.13 +pyyaml==4.2b4 requests[security]<2.15,>=2.14.1 six==1.11.0 sseclient==0.0.19 diff --git a/st2common/in-requirements.txt b/st2common/in-requirements.txt index 93fd72847e..68798b6102 100644 --- a/st2common/in-requirements.txt +++ b/st2common/in-requirements.txt @@ -25,7 +25,7 @@ tooz zake ipaddr routes -flex +git+https://github.com/StackStorm/flex.git@pyyaml_pin_fix#egg=flex webob jsonpath-rw python-statsd diff --git a/st2common/requirements.txt b/st2common/requirements.txt index 91197ab059..a465362e54 100644 --- a/st2common/requirements.txt +++ b/st2common/requirements.txt @@ -2,7 +2,7 @@ apscheduler==3.5.3 cryptography==2.4.1 eventlet==0.24.1 -flex==6.13.2 +git+https://github.com/StackStorm/flex.git@pyyaml_pin_fix#egg=flex git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta greenlet==0.4.15 ipaddr @@ -18,7 +18,7 @@ prometheus_client==0.1.1 pymongo==3.7.2 python-dateutil==2.7.5 python-statsd==2.1.0 -pyyaml==3.13 +pyyaml==4.2b4 requests[security]<2.15,>=2.14.1 retrying==1.3.3 routes==2.4.1 From 4a6ace4ca03aba586b633e3aadad128c3d4954cf Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 22 Jan 2019 18:51:31 +0100 Subject: [PATCH 012/105] Also update pyyaml in test requirements. --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index a5d5eba385..396c851f5c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -18,7 +18,7 @@ nose-timer>=0.7.2,<0.8 # splitting tests run on a separate CI machines nose-parallel==0.3.1 # Required by st2client tests -pyyaml<4.0,>=3.12 +pyyaml==4.2b4 RandomWords gunicorn==19.7.1 psutil==5.4.5 From 205dbb2b33e8286fa744f4225a34d83dc0bdad87 Mon Sep 17 00:00:00 2001 From: blag Date: Tue, 29 Jan 2019 21:28:03 -0800 Subject: [PATCH 013/105] Update requirements to PyYAML >=4.2b,<5.2 and a possible bump to flex --- fixed-requirements.txt | 2 +- requirements.txt | 4 ++-- st2client/requirements.txt | 2 +- st2common/in-requirements.txt | 2 +- st2common/requirements.txt | 4 ++-- test-requirements.txt | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/fixed-requirements.txt b/fixed-requirements.txt index 64d1cb3254..eece9a44ad 100644 --- a/fixed-requirements.txt +++ b/fixed-requirements.txt @@ -13,7 +13,7 @@ amqp==2.3.2 oslo.config>=1.12.1,<1.13 oslo.utils>=3.36.2,<=3.37.0 six==1.12.0 -pyyaml==4.2b4 +pyyaml>=4.2b4,<5.2 requests[security]<2.15,>=2.14.1 apscheduler==3.5.3 gitpython==2.1.11 diff --git a/requirements.txt b/requirements.txt index 011d23491e..410709fd79 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,8 +5,8 @@ argcomplete bcrypt cryptography==2.4.1 eventlet==0.24.1 +flex>=6.13.2,<6.13.4 git+https://github.com/Kami/logshipper.git@stackstorm_patched#egg=logshipper -git+https://github.com/StackStorm/flex.git@pyyaml_pin_fix#egg=flex git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta git+https://github.com/StackStorm/python-mistralclient.git@st2-2.10.1#egg=python-mistralclient git+https://github.com/StackStorm/st2-auth-backend-flat-file.git@master#egg=st2-auth-backend-flat-file @@ -43,7 +43,7 @@ python-json-logger python-statsd==2.1.0 pytz==2018.7 pywinrm==0.3.0 -pyyaml==4.2b4 +pyyaml>=4.2b4,<5.2 rednose requests[security]<2.15,>=2.14.1 retrying==1.3.3 diff --git a/st2client/requirements.txt b/st2client/requirements.txt index fad6fb9b43..0d9a1c6443 100644 --- a/st2client/requirements.txt +++ b/st2client/requirements.txt @@ -8,7 +8,7 @@ prompt-toolkit==1.0.15 python-dateutil==2.7.5 python-editor==1.0.3 pytz==2018.7 -pyyaml==4.2b4 +pyyaml>=4.2b4,<5.2 requests[security]<2.15,>=2.14.1 six==1.11.0 sseclient==0.0.19 diff --git a/st2common/in-requirements.txt b/st2common/in-requirements.txt index 68798b6102..93fd72847e 100644 --- a/st2common/in-requirements.txt +++ b/st2common/in-requirements.txt @@ -25,7 +25,7 @@ tooz zake ipaddr routes -git+https://github.com/StackStorm/flex.git@pyyaml_pin_fix#egg=flex +flex webob jsonpath-rw python-statsd diff --git a/st2common/requirements.txt b/st2common/requirements.txt index a465362e54..3f1bee613b 100644 --- a/st2common/requirements.txt +++ b/st2common/requirements.txt @@ -2,7 +2,7 @@ apscheduler==3.5.3 cryptography==2.4.1 eventlet==0.24.1 -git+https://github.com/StackStorm/flex.git@pyyaml_pin_fix#egg=flex +flex>=6.13.2,<6.13.4 git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta greenlet==0.4.15 ipaddr @@ -18,7 +18,7 @@ prometheus_client==0.1.1 pymongo==3.7.2 python-dateutil==2.7.5 python-statsd==2.1.0 -pyyaml==4.2b4 +pyyaml>=4.2b4,<5.2 requests[security]<2.15,>=2.14.1 retrying==1.3.3 routes==2.4.1 diff --git a/test-requirements.txt b/test-requirements.txt index 396c851f5c..65f3f2789e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -18,7 +18,7 @@ nose-timer>=0.7.2,<0.8 # splitting tests run on a separate CI machines nose-parallel==0.3.1 # Required by st2client tests -pyyaml==4.2b4 +pyyaml>=4.2b4,<5.2 RandomWords gunicorn==19.7.1 psutil==5.4.5 From 5b4dedd9a61fd6ad62c999f39b9655b28805dfc5 Mon Sep 17 00:00:00 2001 From: blag Date: Tue, 12 Feb 2019 13:55:04 -0800 Subject: [PATCH 014/105] Use flex v6.14.0 --- requirements.txt | 2 +- st2common/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 410709fd79..094e716533 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ argcomplete bcrypt cryptography==2.4.1 eventlet==0.24.1 -flex>=6.13.2,<6.13.4 +flex>=6.13.2,<6.15.0 git+https://github.com/Kami/logshipper.git@stackstorm_patched#egg=logshipper git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta git+https://github.com/StackStorm/python-mistralclient.git@st2-2.10.1#egg=python-mistralclient diff --git a/st2common/requirements.txt b/st2common/requirements.txt index 3f1bee613b..657dca13c4 100644 --- a/st2common/requirements.txt +++ b/st2common/requirements.txt @@ -2,7 +2,7 @@ apscheduler==3.5.3 cryptography==2.4.1 eventlet==0.24.1 -flex>=6.13.2,<6.13.4 +flex>=6.13.2,<6.15.0 git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta greenlet==0.4.15 ipaddr From 88392ccb67389a6b9e2033974600fb4bd56a9984 Mon Sep 17 00:00:00 2001 From: Nick Maludy Date: Wed, 23 Jan 2019 08:10:52 -0500 Subject: [PATCH 015/105] Remove redundant action-aliases for pack install. Fix st2 action-alias match help text --- CHANGELOG.rst | 9 +++++++++ contrib/packs/aliases/pack_install.yaml | 5 +---- st2client/st2client/commands/action_alias.py | 6 +++--- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 1a8b431a8b..b317533768 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -15,6 +15,15 @@ Fixed in place which only allows users with an admin role to use ``?scope=all`` and retrieve / view datastore values for arbitrary system users. (security issue bug fix) +Fixed +~~~~~ + +* Fixed the ``packs.pack_install`` / ``!pack install {{ packs }}`` action-alias to not have + redundant patterns. Previously this prevented it from being executed via + ``st2 action-alias execute 'pack install xxx'``. #4511 + + Contributed by Nick Maludy (Encore Technologies) + 2.10.0 - December 13, 2018 -------------------------- diff --git a/contrib/packs/aliases/pack_install.yaml b/contrib/packs/aliases/pack_install.yaml index 2f267c8318..2705b67e6c 100644 --- a/contrib/packs/aliases/pack_install.yaml +++ b/contrib/packs/aliases/pack_install.yaml @@ -4,10 +4,7 @@ action_ref: "packs.install" pack: "packs" description: "Install/upgrade StackStorm packs." formats: - - display: "pack install [,]" - representation: - - "pack install {{ packs }}" - - display: "pack install [,]" + - display: "pack install [,]" representation: - "pack install {{ packs }}" ack: diff --git a/st2client/st2client/commands/action_alias.py b/st2client/st2client/commands/action_alias.py index 2a5dcd4594..b5ac76e862 100644 --- a/st2client/st2client/commands/action_alias.py +++ b/st2client/st2client/commands/action_alias.py @@ -63,13 +63,13 @@ class ActionAliasMatchCommand(resource.ResourceCommand): def __init__(self, resource, *args, **kwargs): super(ActionAliasMatchCommand, self).__init__( resource, 'match', - 'Get the list of %s that match the command text.' % - resource.get_plural_display_name().lower(), + 'Get the %s that match the command text.' % + resource.get_display_name().lower(), *args, **kwargs) self.parser.add_argument('match_text', metavar='command', - help=('Get the list of %s that match the command text.' % + help=('Get the %s that match the command text.' % resource.get_display_name().lower())) self.parser.add_argument('-a', '--attr', nargs='+', default=self.display_attributes, From bbf6cd753789b188263f2fe072f1ec026e4aa9a3 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 24 Jan 2019 13:32:46 +0100 Subject: [PATCH 016/105] Supress statsd library debug log messages in sensor container when DEBUG mode / log level is not enabled. --- st2reactor/st2reactor/container/sensor_wrapper.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/st2reactor/st2reactor/container/sensor_wrapper.py b/st2reactor/st2reactor/container/sensor_wrapper.py index d409886407..b477368ded 100644 --- a/st2reactor/st2reactor/container/sensor_wrapper.py +++ b/st2reactor/st2reactor/container/sensor_wrapper.py @@ -200,6 +200,10 @@ def __init__(self, pack, file_path, class_name, trigger_types, if '--debug' in parent_args: set_log_level_for_all_loggers() + else: + # NOTE: statsd logger logs everything by default under INFO so we ignore those log + # messages unless verbose / debug mode is used + logging.ignore_statsd_log_messages() self._sensor_instance = self._get_sensor_instance() From 3207e7f6457b8f06258cee5a3cd762ded8de5b52 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 18 Feb 2019 10:01:18 +0100 Subject: [PATCH 017/105] Pull in coverage change. --- test-requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 65f3f2789e..2619b96004 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,4 +1,5 @@ -coverage +# NOTE: codecov only supports coverage==4.5.2 +coverage==4.5.2 pep8==1.7.1 flake8==3.6.0 astroid==1.6.5 From 7922447dec7e34889ed8969a845c2a0e7c56da67 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 24 Jan 2019 19:07:10 +0100 Subject: [PATCH 018/105] Update code so we don't submit metrics for any "get one" API endpoints since this can potentially result in too many unique metrics. --- st2common/st2common/middleware/instrumentation.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/st2common/st2common/middleware/instrumentation.py b/st2common/st2common/middleware/instrumentation.py index ea03b72df1..0523ceb3eb 100644 --- a/st2common/st2common/middleware/instrumentation.py +++ b/st2common/st2common/middleware/instrumentation.py @@ -55,7 +55,15 @@ def __call__(self, environ, start_response): # other endpoints because this would result in a lot of unique metrics which is an # anti-pattern and causes unnecessary load on the metrics server. submit_metrics = endpoint.get('x-submit-metrics', True) - if not submit_metrics: + operation_id = endpoint.get('operationId', None) + is_get_one_endpoint = (operation_id.endswith('.get') or operation_id.endswith('.get_one')) + + if is_get_one_endpoint: + # NOTE: We don't submit metrics for any get one API endpoint since this would result + # in potentially too many unique metrics + submit_metrics = False + + if not submit_metrics or (): LOG.debug('Not submitting request metrics for path: %s' % (request.path)) return self.app(environ, start_response) From a5c58ce4ee6687ffabfd8f9231a5f392eece7e71 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 24 Jan 2019 19:39:51 +0100 Subject: [PATCH 019/105] Make the if check more robust. --- st2common/st2common/middleware/instrumentation.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/st2common/st2common/middleware/instrumentation.py b/st2common/st2common/middleware/instrumentation.py index 0523ceb3eb..3906d7c13d 100644 --- a/st2common/st2common/middleware/instrumentation.py +++ b/st2common/st2common/middleware/instrumentation.py @@ -56,14 +56,15 @@ def __call__(self, environ, start_response): # anti-pattern and causes unnecessary load on the metrics server. submit_metrics = endpoint.get('x-submit-metrics', True) operation_id = endpoint.get('operationId', None) - is_get_one_endpoint = (operation_id.endswith('.get') or operation_id.endswith('.get_one')) + is_get_one_endpoint = bool(operation_id) and (operation_id.endswith('.get') or + operation_id.endswith('.get_one')) if is_get_one_endpoint: # NOTE: We don't submit metrics for any get one API endpoint since this would result # in potentially too many unique metrics submit_metrics = False - if not submit_metrics or (): + if not submit_metrics: LOG.debug('Not submitting request metrics for path: %s' % (request.path)) return self.app(environ, start_response) From 139f7423df2367e59b326b366a01f844bcce1799 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 24 Jan 2019 19:39:51 +0100 Subject: [PATCH 020/105] Make the if check more robust. From 9a87a69857ecf2ade07cecc38627d7d9ad89955c Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 24 Jan 2019 19:46:23 +0100 Subject: [PATCH 021/105] Fix assert in get_sandbox_python_path. Make sure it also works correctly when sys.prefix is relative (e.g. when calling ./tools/launchdev.sh script outside of tools directory or similar). --- st2common/st2common/util/sandboxing.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/st2common/st2common/util/sandboxing.py b/st2common/st2common/util/sandboxing.py index d5ceb109a6..321d4f0250 100644 --- a/st2common/st2common/util/sandboxing.py +++ b/st2common/st2common/util/sandboxing.py @@ -117,7 +117,10 @@ def get_sandbox_python_path(inherit_from_parent=True, inherit_parent_virtualenv= if inherit_parent_virtualenv and hasattr(sys, 'real_prefix'): # We are running inside virtualenv site_packages_dir = get_python_lib() - assert sys.prefix in site_packages_dir + + sys_prefix = os.path.abspath(sys.prefix) + assert sys_prefix in site_packages_dir + sandbox_python_path.append(site_packages_dir) sandbox_python_path = ':'.join(sandbox_python_path) From fbf4846c3ab5112a98c4a30123cea8d343ade8bd Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Sun, 27 Jan 2019 21:55:48 +0100 Subject: [PATCH 022/105] Upgrade python-gnupg dependency because of a security vulnerability (https://blog.hackeriet.no/cve-2019-6690-python-gnupg-vulnerability/). NOTE: This security vulnerability doesn't affect us because we only this library in an isolated scenario (encrypting debug tarball without using a passphrase). --- fixed-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fixed-requirements.txt b/fixed-requirements.txt index eece9a44ad..6535a680fb 100644 --- a/fixed-requirements.txt +++ b/fixed-requirements.txt @@ -22,7 +22,7 @@ pymongo==3.7.2 mongoengine==0.16.0 passlib==1.7.1 lockfile==0.12.2 -python-gnupg==0.4.3 +python-gnupg==0.4.4 jsonpath-rw==1.4.0 pyinotify==0.9.6 semver==2.8.1 From 13c39e98652f999e6f7e182c2fa4dd90018e0bfb Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Sun, 27 Jan 2019 22:00:16 +0100 Subject: [PATCH 023/105] Re-generate requirements files. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 094e716533..1343a7b96f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -38,7 +38,7 @@ pymongo==3.7.2 pyrabbit python-dateutil==2.7.5 python-editor==1.0.3 -python-gnupg==0.4.3 +python-gnupg==0.4.4 python-json-logger python-statsd==2.1.0 pytz==2018.7 From ed11c454a7cf027cb99b304fe80b4662bbce8079 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 31 Jan 2019 10:23:49 +0100 Subject: [PATCH 024/105] Fix "cryptography_symmetric_encrypt" function and make sure we correctly handle unicode (utf-8) data. Previously we didn't correctly convert string to bytes before padding data which resulted in incorrectly padded data (incorrect length) so the actual encryption step failed. Thanks to @dswebbthg for reporting this issue. Resolves #4513. --- st2common/st2common/util/crypto.py | 14 ++++++------ st2common/tests/unit/test_crypto_utils.py | 26 +++++++++++++++++++++++ 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/st2common/st2common/util/crypto.py b/st2common/st2common/util/crypto.py index e6b0ecfefd..834a6e88db 100644 --- a/st2common/st2common/util/crypto.py +++ b/st2common/st2common/util/crypto.py @@ -216,8 +216,14 @@ def cryptography_symmetric_encrypt(encrypt_key, plaintext): assert isinstance(aes_key_bytes, six.binary_type) assert isinstance(hmac_key_bytes, six.binary_type) + if isinstance(plaintext, (six.text_type, six.string_types)): + # Convert data to bytes + data = plaintext.encode('utf-8') + else: + data = plaintext + # Pad data - data = pkcs5_pad(plaintext) + data = pkcs5_pad(data) # Generate IV iv_bytes = os.urandom(KEYCZAR_AES_BLOCK_SIZE) @@ -230,10 +236,6 @@ def cryptography_symmetric_encrypt(encrypt_key, plaintext): # bytes) so we simply add 5 0's header_bytes = b'00000' - if isinstance(data, (six.text_type, six.string_types)): - # Convert data to bytes - data = data.encode('utf-8') - ciphertext_bytes = encryptor.update(data) + encryptor.finalize() msg_bytes = header_bytes + iv_bytes + ciphertext_bytes @@ -368,7 +370,7 @@ def pkcs5_pad(data): Pad data using PKCS5 """ pad = KEYCZAR_AES_BLOCK_SIZE - len(data) % KEYCZAR_AES_BLOCK_SIZE - data = data + pad * chr(pad) + data = data + pad * chr(pad).encode('utf-8') return data diff --git a/st2common/tests/unit/test_crypto_utils.py b/st2common/tests/unit/test_crypto_utils.py index 54ed22d9b3..2bc62835d8 100644 --- a/st2common/tests/unit/test_crypto_utils.py +++ b/st2common/tests/unit/test_crypto_utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. @@ -53,6 +54,31 @@ def setUpClass(cls): super(CryptoUtilsTestCase, cls).setUpClass() CryptoUtilsTestCase.test_crypto_key = AESKey.generate() + def test_symmetric_encrypt_decrypt_short_string_needs_to_be_padded(self): + original = u'a' + crypto = symmetric_encrypt(CryptoUtilsTestCase.test_crypto_key, original) + plain = symmetric_decrypt(CryptoUtilsTestCase.test_crypto_key, crypto) + self.assertEqual(plain, original) + + def test_symmetric_encrypt_decrypt_utf8_character(self): + values = [ + u'£', + u'£££', + u'££££££', + u'č š hello đ č p ž Ž', + u'hello 💩', + u'💩💩💩💩💩' + u'💩💩💩', + u'💩😁' + ] + + for index, original in enumerate(values): + crypto = symmetric_encrypt(CryptoUtilsTestCase.test_crypto_key, original) + plain = symmetric_decrypt(CryptoUtilsTestCase.test_crypto_key, crypto) + self.assertEqual(plain, original) + + self.assertEqual(index, (len(values) - 1)) + def test_symmetric_encrypt_decrypt(self): original = 'secret' crypto = symmetric_encrypt(CryptoUtilsTestCase.test_crypto_key, original) From 430943e89cbd20a55ad834290ed5671cecca2a6c Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 31 Jan 2019 10:34:47 +0100 Subject: [PATCH 025/105] Add changelog entry for #4528. --- CHANGELOG.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b317533768..a6f4f92eb0 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -23,7 +23,11 @@ Fixed ``st2 action-alias execute 'pack install xxx'``. #4511 Contributed by Nick Maludy (Encore Technologies) - +* Fix datastore value encryption and make sure it also works correctly for unicode (non-ascii) + values. + + Reported by @dswebbthg, @nickbaum. (bug fix) #4513 #4527 #4528 + 2.10.0 - December 13, 2018 -------------------------- From 6684b96ea2ce06ffcb79c71b2f08feb3d1635a44 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 4 Feb 2019 12:38:22 +0100 Subject: [PATCH 026/105] Fix a bug in Orquesta runner and workflow engine and make sure we add "pack" attribute to the execution context. If we don't add that, {{ config_context }} won't work correctly for default parameter values for action executions which are scheduled as part of an Orquesta workflow. --- .../orquesta_runner/orquesta_runner/orquesta_runner.py | 3 ++- st2common/st2common/services/workflows.py | 7 ++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/contrib/runners/orquesta_runner/orquesta_runner/orquesta_runner.py b/contrib/runners/orquesta_runner/orquesta_runner/orquesta_runner.py index b241aad578..5152b212bb 100644 --- a/contrib/runners/orquesta_runner/orquesta_runner/orquesta_runner.py +++ b/contrib/runners/orquesta_runner/orquesta_runner/orquesta_runner.py @@ -68,7 +68,8 @@ def _construct_st2_context(self): 'st2': { 'action_execution_id': str(self.execution.id), 'api_url': api_util.get_full_public_api_url(), - 'user': self.execution.context.get('user', cfg.CONF.system_user.user) + 'user': self.execution.context.get('user', cfg.CONF.system_user.user), + 'pack': self.execution.context.get('pack', None) } } diff --git a/st2common/st2common/services/workflows.py b/st2common/st2common/services/workflows.py index ed98e9697b..65a2a21040 100644 --- a/st2common/st2common/services/workflows.py +++ b/st2common/st2common/services/workflows.py @@ -537,6 +537,7 @@ def request_action_execution(wf_ex_db, task_ex_db, st2_ctx, ac_ex_req, delay=Non # Set context for the action execution. ac_ex_ctx = { + 'pack': st2_ctx.get('pack'), 'user': st2_ctx.get('user'), 'parent': st2_ctx, 'orquesta': { @@ -887,7 +888,11 @@ def request_next_tasks(wf_ex_db, task_ex_id=None): # Pass down appropriate st2 context to the task and action execution(s). root_st2_ctx = wf_ex_db.context.get('st2', {}) - st2_ctx = {'execution_id': wf_ac_ex_id, 'user': root_st2_ctx.get('user')} + st2_ctx = { + 'execution_id': wf_ac_ex_id, + 'user': root_st2_ctx.get('user'), + 'pack': root_st2_ctx.get('pack') + } if root_st2_ctx.get('api_user'): st2_ctx['api_user'] = root_st2_ctx.get('api_user') From bf7763a686eb6c910fe44462e7156f6aa8cdd00e Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 4 Feb 2019 12:48:49 +0100 Subject: [PATCH 027/105] Make the log message more useful when debugging. --- st2common/st2common/util/config_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/st2common/st2common/util/config_loader.py b/st2common/st2common/util/config_loader.py index b23f3832e0..71966a05b4 100644 --- a/st2common/st2common/util/config_loader.py +++ b/st2common/st2common/util/config_loader.py @@ -224,7 +224,7 @@ def _get_datastore_value_for_expression(self, key, value, config_schema_item=Non def get_config(pack, user): """Returns config for given pack and user. """ - LOG.debug('Attempting to get config') + LOG.debug('Attempting to get config for pack "%s" and user "%s"' % (pack, user)) if pack and user: LOG.debug('Pack and user found. Loading config.') config_loader = ContentPackConfigLoader( From 61cd45ab74186de838dba53c9df2861fed7afb2f Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 4 Feb 2019 15:42:21 +0100 Subject: [PATCH 028/105] For consistency between tests and actual end to end runner code, make sure also also add "pack" attribute to LiveActionDB.context attribute inside "create_request" function. We already add it in RunnerContainer.dispatch(), but that's not the best place since some tests don't call that method so that value won't be present. --- st2common/st2common/services/action.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/st2common/st2common/services/action.py b/st2common/st2common/services/action.py index c7ffc8508b..19dd95157e 100644 --- a/st2common/st2common/services/action.py +++ b/st2common/st2common/services/action.py @@ -76,11 +76,11 @@ def create_request(liveaction, action_db=None, runnertype_db=None): # Use the user context from the parent action execution. Subtasks in a workflow # action can be invoked by a system user and so we want to use the user context # from the original workflow action. - parent_context = executions.get_parent_context(liveaction) - if parent_context: - parent_user = parent_context.get('user', None) - if parent_user: - liveaction.context['user'] = parent_user + parent_context = executions.get_parent_context(liveaction) or {} + parent_user = parent_context.get('user', None) + + if parent_user: + liveaction.context['user'] = parent_user # Validate action if not action_db: @@ -97,6 +97,9 @@ def create_request(liveaction, action_db=None, runnertype_db=None): if not hasattr(liveaction, 'parameters'): liveaction.parameters = dict() + # For consistency add pack to the context here in addition to RunnerContainer.dispatch() method + liveaction.context['pack'] = action_db.pack + # Validate action parameters. schema = util_schema.get_schema_for_action_parameters(action_db, runnertype_db) validator = util_schema.get_validator() From c36b3b9173a1eda02d84e97dea9a28d990a12c2f Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 4 Feb 2019 15:53:43 +0100 Subject: [PATCH 029/105] Update orquesta tests to verify that pack is indeed present in the execution context. --- contrib/runners/orquesta_runner/tests/unit/test_basic.py | 6 +++++- contrib/runners/orquesta_runner/tests/unit/test_context.py | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/contrib/runners/orquesta_runner/tests/unit/test_basic.py b/contrib/runners/orquesta_runner/tests/unit/test_basic.py index 1814588394..7855f5c112 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_basic.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_basic.py @@ -138,7 +138,11 @@ def test_run_workflow(self): 'workflow_execution_id': str(wf_ex_db.id), 'action_execution_id': str(ac_ex_db.id), 'api_url': 'http://127.0.0.1/v1', - 'user': username + 'user': username, + 'pack': 'orquesta_tests' + }, + 'parent': { + 'pack': 'orquesta_tests' } } diff --git a/contrib/runners/orquesta_runner/tests/unit/test_context.py b/contrib/runners/orquesta_runner/tests/unit/test_context.py index 9d8c3fa5d1..ed2a75822c 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_context.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_context.py @@ -115,7 +115,8 @@ def test_runtime_context(self): expected_st2_ctx = { 'action_execution_id': str(ac_ex_db.id), 'api_url': 'http://127.0.0.1/v1', - 'user': 'stanley' + 'user': 'stanley', + 'pack': 'orquesta_tests' } expected_st2_ctx_with_wf_ex_id = copy.deepcopy(expected_st2_ctx) From a2685d951fd0fb808f66f8296dc68ac524b80f7f Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 4 Feb 2019 16:17:52 +0100 Subject: [PATCH 030/105] Add orquesta test which verifies "{{ config_context }}" notation for default parameter values works for executions scheduled via Orquesta workflow. --- .../orquesta_runner/tests/unit/test_basic.py | 34 +++++++++++++++++++ .../actions/config-context-action.yaml | 9 +++++ .../actions/config-context.yaml | 7 ++++ .../actions/workflows/config-context.yaml | 13 +++++++ .../packs/orquesta_tests/config.schema.yaml | 6 ++++ 5 files changed, 69 insertions(+) create mode 100644 st2tests/st2tests/fixtures/packs/orquesta_tests/actions/config-context-action.yaml create mode 100644 st2tests/st2tests/fixtures/packs/orquesta_tests/actions/config-context.yaml create mode 100644 st2tests/st2tests/fixtures/packs/orquesta_tests/actions/workflows/config-context.yaml create mode 100644 st2tests/st2tests/fixtures/packs/orquesta_tests/config.schema.yaml diff --git a/contrib/runners/orquesta_runner/tests/unit/test_basic.py b/contrib/runners/orquesta_runner/tests/unit/test_basic.py index 7855f5c112..4f1febebfd 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_basic.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_basic.py @@ -298,6 +298,40 @@ def test_run_workflow_with_unicode_input(self): self.assertDictEqual(lv_ac_db.result, expected_result) self.assertDictEqual(ac_ex_db.result, expected_result) + def test_run_workflow_action_config_context(self): + wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'config-context.yaml') + wf_input = {} + lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'], parameters=wf_input) + lv_ac_db, ac_ex_db = ac_svc.request(lv_ac_db) + + # Assert action execution is running. + lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id)) + self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING, lv_ac_db.result) + wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0] + self.assertEqual(wf_ex_db.status, ac_const.LIVEACTION_STATUS_RUNNING) + + # Assert task1 is already completed. + query_filters = {'workflow_execution': str(wf_ex_db.id), 'task_id': 'task1'} + tk1_ex_db = wf_db_access.TaskExecution.query(**query_filters)[0] + tk1_ac_ex_db = ex_db_access.ActionExecution.query(task_execution=str(tk1_ex_db.id))[0] + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction['id']) + self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) + self.assertTrue(wf_svc.is_action_execution_under_workflow_context(tk1_ac_ex_db)) + + # Manually handle action execution completion. + wf_svc.handle_action_execution_completion(tk1_ac_ex_db) + + # Assert workflow is completed. + wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id) + self.assertEqual(wf_ex_db.status, wf_states.SUCCEEDED) + lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id)) + self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) + ac_ex_db = ex_db_access.ActionExecution.get_by_id(str(ac_ex_db.id)) + self.assertEqual(ac_ex_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) + + # Verify config_context works + self.assertEqual(wf_ex_db.output, {'msg': 'value of config key a'}) + def test_run_workflow_with_action_less_tasks(self): wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'action-less-tasks.yaml') wf_input = {'name': 'Thanos'} diff --git a/st2tests/st2tests/fixtures/packs/orquesta_tests/actions/config-context-action.yaml b/st2tests/st2tests/fixtures/packs/orquesta_tests/actions/config-context-action.yaml new file mode 100644 index 0000000000..1e1dc53aa1 --- /dev/null +++ b/st2tests/st2tests/fixtures/packs/orquesta_tests/actions/config-context-action.yaml @@ -0,0 +1,9 @@ +--- + name: "config-context-action" + runner_type: "local-shell-cmd" + enabled: true + entry_point: "" + parameters: + cmd: + immutable: true + default: "echo \"{{ config_context.config_key_a }}\"" diff --git a/st2tests/st2tests/fixtures/packs/orquesta_tests/actions/config-context.yaml b/st2tests/st2tests/fixtures/packs/orquesta_tests/actions/config-context.yaml new file mode 100644 index 0000000000..8c1c5dca72 --- /dev/null +++ b/st2tests/st2tests/fixtures/packs/orquesta_tests/actions/config-context.yaml @@ -0,0 +1,7 @@ +--- +name: config-context +description: Workflow which tests {{ config_context.foo }} notation works default parameter values for workflow actions. +pack: orquesta_tests +runner_type: orquesta +entry_point: workflows/config-context.yaml +enabled: true diff --git a/st2tests/st2tests/fixtures/packs/orquesta_tests/actions/workflows/config-context.yaml b/st2tests/st2tests/fixtures/packs/orquesta_tests/actions/workflows/config-context.yaml new file mode 100644 index 0000000000..796b80e3ff --- /dev/null +++ b/st2tests/st2tests/fixtures/packs/orquesta_tests/actions/workflows/config-context.yaml @@ -0,0 +1,13 @@ +version: 1.0 + +description: Workflow which tests {{ config_context }} functionality. + +output: + - msg: <% ctx().message %> + +tasks: + task1: + action: orquesta_tests.config-context-action + next: + - when: <% succeeded() %> + publish: message=<% result().stdout %> diff --git a/st2tests/st2tests/fixtures/packs/orquesta_tests/config.schema.yaml b/st2tests/st2tests/fixtures/packs/orquesta_tests/config.schema.yaml new file mode 100644 index 0000000000..35f7289000 --- /dev/null +++ b/st2tests/st2tests/fixtures/packs/orquesta_tests/config.schema.yaml @@ -0,0 +1,6 @@ +--- +config_key_a: + description: "Sample config key." + type: "string" + default: "value of config key a" + required: true From 3e127c4e8e863f24d45153407a965daad7ebb6eb Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 4 Feb 2019 14:57:28 +0100 Subject: [PATCH 031/105] Fix a bug with "serialize_positional_argument" not correctly handling unicode values. This would cause actions which rely on positions arguments (e.g. core.sendmail) not to work when an unicode value was provided. Reported by @johandahlberg. --- st2common/st2common/util/action_db.py | 15 ++++++++++--- st2common/tests/unit/test_action_db_utils.py | 22 ++++++++++++++++++++ 2 files changed, 34 insertions(+), 3 deletions(-) diff --git a/st2common/st2common/util/action_db.py b/st2common/st2common/util/action_db.py index 7bd2c33a6c..bb5983374c 100644 --- a/st2common/st2common/util/action_db.py +++ b/st2common/st2common/util/action_db.py @@ -268,7 +268,16 @@ def serialize_positional_argument(argument_type, argument_value): serialized). """ if argument_type in ['string', 'number', 'float']: - argument_value = str(argument_value) if argument_value else '' + if isinstance(argument_value, (int, float)): + argument_value = str(argument_value) + + if not argument_value: + argument_value = '' + return argument_value + + if not isinstance(argument_value, six.text_type): + # cast string non-unicode values to unicode + argument_value = argument_value.decode('utf-8') elif argument_type == 'boolean': # Booleans are serialized as string "1" and "0" if argument_value is not None: @@ -285,8 +294,8 @@ def serialize_positional_argument(argument_type, argument_value): # None / null is serialized as en empty string argument_value = '' else: - # Other values are simply cast to strings - argument_value = str(argument_value) if argument_value else '' + # Other values are simply cast to unicode string + argument_value = six.text_type(argument_value) if argument_value else '' return argument_value diff --git a/st2common/tests/unit/test_action_db_utils.py b/st2common/tests/unit/test_action_db_utils.py index 3132a2ebd7..061709d61a 100644 --- a/st2common/tests/unit/test_action_db_utils.py +++ b/st2common/tests/unit/test_action_db_utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. @@ -399,6 +400,27 @@ def test_get_args(self): self.assertListEqual(pos_args, expected_pos_args, 'Positional args not parsed / serialized correctly.') + # Test unicode values + params = { + 'actionstr': 'bar č š hello đ č p ž Ž a 💩😁', + 'actionint': 20, + 'runnerint': 555 + } + expected_pos_args = [ + '20', + '', + u'bar č š hello đ č p ž Ž a 💩😁', + '', + '', + '', + '' + ] + pos_args, named_args = action_db_utils.get_args(params, ActionDBUtilsTestCase.action_db) + self.assertListEqual(pos_args, expected_pos_args, 'Positional args not parsed correctly.') + self.assertTrue('actionint' not in named_args) + self.assertTrue('actionstr' not in named_args) + self.assertEqual(named_args.get('runnerint'), 555) + @classmethod def _setup_test_models(cls): ActionDBUtilsTestCase.setup_runner() From 7700b2adf956a3438f0543c99938bffcae219ff5 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 4 Feb 2019 15:21:59 +0100 Subject: [PATCH 032/105] Add a changelog entry for #4533. --- CHANGELOG.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a6f4f92eb0..290cdcf493 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -27,6 +27,13 @@ Fixed values. Reported by @dswebbthg, @nickbaum. (bug fix) #4513 #4527 #4528 +* Fix a bug with action positional parameter serialization used in local and remote script runner + not working correctly with non-ascii (unicode) values. + + This would prevent actions such as ``core.sendmail`` which utilize positional parameters from + working correctly when a unicode value was provided. + + Reported by @johandahlberg (bug fix) #4533 2.10.0 - December 13, 2018 -------------------------- From 7298d8112a8c3c995699d517103724ca2e11f1dc Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 4 Feb 2019 15:24:01 +0100 Subject: [PATCH 033/105] Return early on None value. --- st2common/st2common/util/action_db.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/st2common/st2common/util/action_db.py b/st2common/st2common/util/action_db.py index bb5983374c..96969492e6 100644 --- a/st2common/st2common/util/action_db.py +++ b/st2common/st2common/util/action_db.py @@ -268,13 +268,13 @@ def serialize_positional_argument(argument_type, argument_value): serialized). """ if argument_type in ['string', 'number', 'float']: + if argument_value is None: + argument_value = six.text_type('') + return argument_value + if isinstance(argument_value, (int, float)): argument_value = str(argument_value) - if not argument_value: - argument_value = '' - return argument_value - if not isinstance(argument_value, six.text_type): # cast string non-unicode values to unicode argument_value = argument_value.decode('utf-8') From 5e834b57a44b766df8103b2edc61a7f50ac93b91 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 5 Feb 2019 11:12:16 +0100 Subject: [PATCH 034/105] Try forcing UTF-8 charset in the email. --- contrib/core/actions/send_mail/send_mail | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/core/actions/send_mail/send_mail b/contrib/core/actions/send_mail/send_mail index 1d9bdbdc14..3f45cac00f 100755 --- a/contrib/core/actions/send_mail/send_mail +++ b/contrib/core/actions/send_mail/send_mail @@ -92,7 +92,7 @@ EOF else cat < Date: Tue, 5 Feb 2019 11:21:53 +0100 Subject: [PATCH 035/105] Update the parameter documentation. --- contrib/core/actions/sendmail.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/core/actions/sendmail.yaml b/contrib/core/actions/sendmail.yaml index 9f1582d313..7ff588e8a9 100644 --- a/contrib/core/actions/sendmail.yaml +++ b/contrib/core/actions/sendmail.yaml @@ -29,7 +29,7 @@ parameters: default: True content_type: type: string - description: Content type of message to be sent + description: Content type of message to be sent without the charset (charset is set to UTF-8 inside the script). default: "text/html" position: 4 body: From 33c0f39afd75bc4fbc8fd20fd0ce5c157446b197 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 5 Feb 2019 11:22:56 +0100 Subject: [PATCH 036/105] Add changelog entry. --- CHANGELOG.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 290cdcf493..1a48853d93 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -33,6 +33,10 @@ Fixed This would prevent actions such as ``core.sendmail`` which utilize positional parameters from working correctly when a unicode value was provided. + Reported by @johandahlberg (bug fix) #4533 +* Fix ``core.sendmail`` action so it specifies ``charset=UTF-8`` in the ``Content-Type`` email + header. This way it works correctly when an email body contains unicode data. + Reported by @johandahlberg (bug fix) #4533 2.10.0 - December 13, 2018 From 9bdd32fa4e9507b7758033a1b4021ed73a400ec4 Mon Sep 17 00:00:00 2001 From: Johan Dahlberg Date: Tue, 5 Feb 2019 13:45:21 +0100 Subject: [PATCH 037/105] UTF-8 encode both body and subject of email --- contrib/core/actions/send_mail/send_mail | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contrib/core/actions/send_mail/send_mail b/contrib/core/actions/send_mail/send_mail index 3f45cac00f..c75478c6bc 100755 --- a/contrib/core/actions/send_mail/send_mail +++ b/contrib/core/actions/send_mail/send_mail @@ -52,7 +52,7 @@ if [[ -z $trimmed && $SEND_EMPTY_BODY -eq 1 ]] || [[ -n $trimmed ]]; then cat < Date: Tue, 5 Feb 2019 14:56:13 +0100 Subject: [PATCH 038/105] base64 encode subject --- contrib/core/actions/send_mail/send_mail | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/core/actions/send_mail/send_mail b/contrib/core/actions/send_mail/send_mail index c75478c6bc..c7e756b777 100755 --- a/contrib/core/actions/send_mail/send_mail +++ b/contrib/core/actions/send_mail/send_mail @@ -52,7 +52,7 @@ if [[ -z $trimmed && $SEND_EMPTY_BODY -eq 1 ]] || [[ -n $trimmed ]]; then cat < Date: Tue, 5 Feb 2019 15:01:59 +0100 Subject: [PATCH 039/105] Update changelog entry. --- CHANGELOG.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 1a48853d93..77314bae7e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -35,9 +35,9 @@ Fixed Reported by @johandahlberg (bug fix) #4533 * Fix ``core.sendmail`` action so it specifies ``charset=UTF-8`` in the ``Content-Type`` email - header. This way it works correctly when an email body contains unicode data. - - Reported by @johandahlberg (bug fix) #4533 + header. This way it works correctly when an email subject and / or body contains unicode data. + + Reported by @johandahlberg (bug fix) #4533 4534 2.10.0 - December 13, 2018 -------------------------- From e6a9e93897eb9cedd8b2507b018b6c08b0815ac3 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 5 Feb 2019 17:34:14 +0100 Subject: [PATCH 040/105] Add some unit / integration tests for sendmail action. --- contrib/core/actions/send_mail/send_mail | 25 +- contrib/core/actions/sendmail.yaml | 21 +- contrib/core/requirements-tests.txt | 1 + contrib/core/tests/test_action_sendmail.py | 271 +++++++++++++++++++++ 4 files changed, 305 insertions(+), 13 deletions(-) create mode 100644 contrib/core/requirements-tests.txt create mode 100644 contrib/core/tests/test_action_sendmail.py diff --git a/contrib/core/actions/send_mail/send_mail b/contrib/core/actions/send_mail/send_mail index c7e756b777..6011c43d49 100755 --- a/contrib/core/actions/send_mail/send_mail +++ b/contrib/core/actions/send_mail/send_mail @@ -3,14 +3,27 @@ HOSTNAME=$(hostname -f) LINE_BREAK="" -SENDMAIL=`which sendmail` -if [ $? -ne 0 ]; then - echo "Unable to find sendmail binary in PATH" >&2 - exit 2 +FOOTER="This message was generated by StackStorm action `basename $0` running on `hostname`" + +# Allow user to provide a custom sendmail binary for more flexibility and easier +# testing +SENDMAIL_BINARY=$1 + +if [ "${SENDMAIL_BINARY}" = "None" ]; then + # If path to the sendmail binary is not provided, try to find one in $PATH + SENDMAIL=`which sendmail` + + if [ $? -ne 0 ]; then + echo "Unable to find sendmail binary in PATH" >&2 + exit 2 + fi + + MAIL="$SENDMAIL -t" +else + MAIL="${SENDMAIL_BINARY}" fi +shift -MAIL="$SENDMAIL -t" -FOOTER="This message was generated by StackStorm action `basename $0` running on `hostname`" if [[ $1 =~ '@' ]]; then FROM=$1 else diff --git a/contrib/core/actions/sendmail.yaml b/contrib/core/actions/sendmail.yaml index 7ff588e8a9..3d67275983 100644 --- a/contrib/core/actions/sendmail.yaml +++ b/contrib/core/actions/sendmail.yaml @@ -1,29 +1,36 @@ --- name: sendmail +pack: core description: This sends an email entry_point: send_mail/send_mail runner_type: "local-shell-script" enabled: true parameters: + sendmail_binary: + description: "Optional path to the sendmail binary. If not provided, it uses a system default one." + position: 0 + required: false + type: "string" + default: None from: description: Sender email address. - position: 0 + position: 1 required: false type: string default: "stanley" to: description: Recipient email address. - position: 1 + position: 2 required: true type: string subject: description: Subject of the email. - position: 2 + position: 3 required: true type: string send_empty_body: description: Send a message even if the body is empty. - position: 3 + position: 4 required: false type: boolean default: True @@ -31,16 +38,16 @@ parameters: type: string description: Content type of message to be sent without the charset (charset is set to UTF-8 inside the script). default: "text/html" - position: 4 + position: 5 body: description: Body of the email. - position: 5 + position: 6 required: true type: string sudo: immutable: true attachments: description: Array of attachment file paths, comma-delimited. - position: 6 + position: 7 required: false type: "string" diff --git a/contrib/core/requirements-tests.txt b/contrib/core/requirements-tests.txt new file mode 100644 index 0000000000..1dfe969c80 --- /dev/null +++ b/contrib/core/requirements-tests.txt @@ -0,0 +1 @@ +mail-parser>=3.9.1,<4.0 diff --git a/contrib/core/tests/test_action_sendmail.py b/contrib/core/tests/test_action_sendmail.py new file mode 100644 index 0000000000..dac74ef7c9 --- /dev/null +++ b/contrib/core/tests/test_action_sendmail.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid +import base64 +import tempfile + +import six +import mock +import mailparser + +from st2common.constants import action as action_constants + +from st2tests.fixturesloader import FixturesLoader +from st2tests.base import RunnerTestCase +from st2tests.base import CleanDbTestCase +from st2tests.base import CleanFilesTestCase + +from local_runner.local_shell_script_runner import LocalShellScriptRunner + +__all__ = [ + 'SendmailActionTestCase' +] + +MOCK_EXECUTION = mock.Mock() +MOCK_EXECUTION.id = '598dbf0c0640fd54bffc688b' + + +class SendmailActionTestCase(RunnerTestCase, CleanDbTestCase, CleanFilesTestCase): + fixtures_loader = FixturesLoader() + + def test_sendmail_default_text_html_content_type(self): + action_parameters = { + 'sendmail_binary': 'cat', + + 'from': 'from.user@example.tld1', + 'to': 'to.user@example.tld2', + 'subject': 'this is subject 1', + 'send_empty_body': False, + 'content_type': 'text/html', + 'body': 'Hello there html.', + 'attachments': '' + } + + expected_body = ('Hello there html.\n' + '

\n' + 'This message was generated by StackStorm action ' + 'send_mail running on localhost') + + status, _, email_data, message = self._run_action(action_parameters=action_parameters) + self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) + + # Verify subject contains utf-8 charset and is base64 encoded + self.assertTrue('SUBJECT: =?UTF-8?B?' in email_data) + + self.assertEqual(message.to[0][1], action_parameters['to']) + self.assertEqual(message.from_[0][1], action_parameters['from']) + self.assertEqual(message.subject, action_parameters['subject']) + self.assertEqual(message.body, expected_body) + self.assertEqual(message.content_type, 'text/html; charset=UTF-8') + + def test_sendmail_text_plain_content_type(self): + action_parameters = { + 'sendmail_binary': 'cat', + + 'from': 'from.user@example.tld1', + 'to': 'to.user@example.tld2', + 'subject': 'this is subject 2', + 'send_empty_body': False, + 'content_type': 'text/plain', + 'body': 'Hello there plain.', + 'attachments': '' + } + + expected_body = ('Hello there plain.\n\n' + 'This message was generated by StackStorm action ' + 'send_mail running on localhost') + + status, _, email_data, message = self._run_action(action_parameters=action_parameters) + self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) + + # Verify subject contains utf-8 charset and is base64 encoded + self.assertTrue('SUBJECT: =?UTF-8?B?' in email_data) + + self.assertEqual(message.to[0][1], action_parameters['to']) + self.assertEqual(message.from_[0][1], action_parameters['from']) + self.assertEqual(message.subject, action_parameters['subject']) + self.assertEqual(message.body, expected_body) + self.assertEqual(message.content_type, 'text/plain; charset=UTF-8') + + def test_sendmail_utf8_subject_and_body(self): + # 1. tex/html + action_parameters = { + 'sendmail_binary': 'cat', + + 'from': 'from.user@example.tld1', + 'to': 'to.user@example.tld2', + 'subject': u'Å unicode subject 😃😃', + 'send_empty_body': False, + 'content_type': 'text/html', + 'body': u'Hello there 😃😃.', + 'attachments': '' + } + + if six.PY2: + expected_body = (u'Hello there 😃😃.\n' + '

\n' + u'This message was generated by StackStorm action ' + u'send_mail running on localhost') + else: + expected_body = (u'Hello there \\U0001f603\\U0001f603.\n' + '

\n' + u'This message was generated by StackStorm action ' + u'send_mail running on localhost') + + status, _, email_data, message = self._run_action(action_parameters=action_parameters) + self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) + + # Verify subject contains utf-8 charset and is base64 encoded + self.assertTrue('SUBJECT: =?UTF-8?B?' in email_data) + + self.assertEqual(message.to[0][1], action_parameters['to']) + self.assertEqual(message.from_[0][1], action_parameters['from']) + self.assertEqual(message.subject, action_parameters['subject']) + self.assertEqual(message.body, expected_body) + self.assertEqual(message.content_type, 'text/html; charset=UTF-8') + + # 2. text/plain + action_parameters = { + 'sendmail_binary': 'cat', + + 'from': 'from.user@example.tld1', + 'to': 'to.user@example.tld2', + 'subject': u'Å unicode subject 😃😃', + 'send_empty_body': False, + 'content_type': 'text/plain', + 'body': u'Hello there 😃😃.', + 'attachments': '' + } + + if six.PY2: + expected_body = (u'Hello there 😃😃.\n\n' + u'This message was generated by StackStorm action ' + u'send_mail running on localhost') + else: + expected_body = (u'Hello there \\U0001f603\\U0001f603.\n\n' + u'This message was generated by StackStorm action ' + u'send_mail running on localhost') + + status, _, email_data, message = self._run_action(action_parameters=action_parameters) + self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) + + self.assertEqual(message.to[0][1], action_parameters['to']) + self.assertEqual(message.from_[0][1], action_parameters['from']) + self.assertEqual(message.subject, action_parameters['subject']) + self.assertEqual(message.body, expected_body) + self.assertEqual(message.content_type, 'text/plain; charset=UTF-8') + + def test_sendmail_with_attachments(self): + _, path_1 = tempfile.mkstemp() + _, path_2 = tempfile.mkstemp() + os.chmod(path_1, 0o755) + os.chmod(path_2, 0o755) + + self.to_delete_files.append(path_1) + self.to_delete_files.append(path_2) + + with open(path_1, 'w') as fp: + fp.write('content 1') + + with open(path_2, 'w') as fp: + fp.write('content 2') + + action_parameters = { + 'sendmail_binary': 'cat', + + 'from': 'from.user@example.tld1', + 'to': 'to.user@example.tld2', + 'subject': 'this is email with attachments', + 'send_empty_body': False, + 'content_type': 'text/plain', + 'body': 'Hello there plain.', + 'attachments': '%s,%s' % (path_1, path_2) + } + + expected_body = ('Hello there plain.\n\n' + 'This message was generated by StackStorm action ' + 'send_mail running on localhost') + + status, _, email_data, message = self._run_action(action_parameters=action_parameters) + self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) + + # Verify subject contains utf-8 charset and is base64 encoded + self.assertTrue('SUBJECT: =?UTF-8?B?' in email_data) + + self.assertEqual(message.to[0][1], action_parameters['to']) + self.assertEqual(message.from_[0][1], action_parameters['from']) + self.assertEqual(message.subject, action_parameters['subject']) + self.assertEqual(message.body, expected_body) + self.assertEqual(message.content_type, + 'multipart/mixed; boundary="ZZ_/afg6432dfgkl.94531q"') + + # There should be 3 message parts - 2 for attachments, one for body + self.assertEqual(email_data.count('--ZZ_/afg6432dfgkl.94531q'), 3) + + # There should be 2 attachments + self.assertEqual(email_data.count('Content-Transfer-Encoding: base64'), 2) + self.assertTrue(base64.b64encode(b'content 1').decode('utf-8') in email_data) + self.assertTrue(base64.b64encode(b'content 2').decode('utf-8') in email_data) + + def _run_action(self, action_parameters): + """ + Run action with the provided action parameters, return status output and + parse the output email data. + """ + models = self.fixtures_loader.load_models( + fixtures_pack='packs/core', fixtures_dict={'actions': ['sendmail.yaml']}) + action_db = models['actions']['sendmail.yaml'] + entry_point = self.fixtures_loader.get_fixture_file_path_abs( + 'packs/core', 'actions', 'send_mail/send_mail') + + runner = self._get_runner(action_db, entry_point=entry_point) + runner.pre_run() + status, result, _ = runner.run(action_parameters) + runner.post_run(status, result) + + # Remove footer added by the action which is not part of raw email data and parse + # the message + if 'stdout' in result: + email_data = result['stdout'] + email_data = email_data.split('\n')[:-2] + email_data = '\n'.join(email_data) + + if six.PY2 and isinstance(email_data, six.text_type): + email_data = email_data.encode('utf-8') + + message = mailparser.parse_from_string(email_data) + else: + email_data = None + message = None + + return (status, result, email_data, message) + + def _get_runner(self, action_db, entry_point): + runner = LocalShellScriptRunner(uuid.uuid4().hex) + runner.execution = MOCK_EXECUTION + runner.action = action_db + runner.action_name = action_db.name + runner.liveaction_id = uuid.uuid4().hex + runner.entry_point = entry_point + runner.runner_parameters = {} + runner.context = dict() + runner.callback = dict() + runner.libs_dir_path = None + runner.auth_token = mock.Mock() + runner.auth_token.token = 'mock-token' + return runner From 26cd17ef12e53b168614d32ed95a46a5c63d8688 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 5 Feb 2019 18:19:50 +0100 Subject: [PATCH 041/105] We now also need stanley user for pack tests step. --- .travis.yml | 2 +- contrib/core/tests/test_action_sendmail.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e4912a9e2a..c97a57ffb7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -88,7 +88,7 @@ before_install: install: - ./scripts/travis/install-requirements.sh - - if [ "${TASK}" = 'ci-unit' ] || [ "${TASK}" = 'ci-integration' ] || [ "${TASK}" = 'compilepy3 ci-py3-unit' ] || [ "${TASK}" = 'ci-py3-integration' ]; then sudo .circle/add-itest-user.sh; fi + - if [ "${TASK}" = 'ci-unit' ] || [ "${TASK}" = 'ci-integration' ] || [ "${TASK}" = 'ci-checks ci-packs-tests' ] || [ "${TASK}" = 'compilepy3 ci-py3-unit' ] || [ "${TASK}" = 'ci-py3-integration' ]; then sudo .circle/add-itest-user.sh; fi # Let's enable rabbitmqadmin # See https://github.com/messagebus/lapine/wiki/Testing-on-Travis. diff --git a/contrib/core/tests/test_action_sendmail.py b/contrib/core/tests/test_action_sendmail.py index dac74ef7c9..370056974e 100644 --- a/contrib/core/tests/test_action_sendmail.py +++ b/contrib/core/tests/test_action_sendmail.py @@ -41,6 +41,10 @@ class SendmailActionTestCase(RunnerTestCase, CleanDbTestCase, CleanFilesTestCase): + """ + NOTE: Those tests rely on stanley user being available on the system and having paswordless + sudo access. + """ fixtures_loader = FixturesLoader() def test_sendmail_default_text_html_content_type(self): From 6e50096d3a0f57f89311cabb719761ef52c8b68b Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 5 Feb 2019 18:22:25 +0100 Subject: [PATCH 042/105] Make test more robust and not depend on the static hostname value. --- contrib/core/tests/test_action_sendmail.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/contrib/core/tests/test_action_sendmail.py b/contrib/core/tests/test_action_sendmail.py index 370056974e..4d003aa9be 100644 --- a/contrib/core/tests/test_action_sendmail.py +++ b/contrib/core/tests/test_action_sendmail.py @@ -18,6 +18,7 @@ import uuid import base64 import tempfile +import socket import six import mock @@ -38,6 +39,7 @@ MOCK_EXECUTION = mock.Mock() MOCK_EXECUTION.id = '598dbf0c0640fd54bffc688b' +HOSTNAME = socket.gethostname() class SendmailActionTestCase(RunnerTestCase, CleanDbTestCase, CleanFilesTestCase): @@ -63,7 +65,7 @@ def test_sendmail_default_text_html_content_type(self): expected_body = ('Hello there html.\n' '

\n' 'This message was generated by StackStorm action ' - 'send_mail running on localhost') + 'send_mail running on %s' % (HOSTNAME)) status, _, email_data, message = self._run_action(action_parameters=action_parameters) self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) @@ -92,7 +94,7 @@ def test_sendmail_text_plain_content_type(self): expected_body = ('Hello there plain.\n\n' 'This message was generated by StackStorm action ' - 'send_mail running on localhost') + 'send_mail running on %s' % (HOSTNAME)) status, _, email_data, message = self._run_action(action_parameters=action_parameters) self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) @@ -122,14 +124,14 @@ def test_sendmail_utf8_subject_and_body(self): if six.PY2: expected_body = (u'Hello there 😃😃.\n' - '

\n' + u'

\n' u'This message was generated by StackStorm action ' - u'send_mail running on localhost') + u'send_mail running on %s' % (HOSTNAME)) else: expected_body = (u'Hello there \\U0001f603\\U0001f603.\n' - '

\n' + u'

\n' u'This message was generated by StackStorm action ' - u'send_mail running on localhost') + u'send_mail running on %s' % (HOSTNAME)) status, _, email_data, message = self._run_action(action_parameters=action_parameters) self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) @@ -159,11 +161,11 @@ def test_sendmail_utf8_subject_and_body(self): if six.PY2: expected_body = (u'Hello there 😃😃.\n\n' u'This message was generated by StackStorm action ' - u'send_mail running on localhost') + u'send_mail running on %s' % (HOSTNAME)) else: expected_body = (u'Hello there \\U0001f603\\U0001f603.\n\n' u'This message was generated by StackStorm action ' - u'send_mail running on localhost') + u'send_mail running on %s' % (HOSTNAME)) status, _, email_data, message = self._run_action(action_parameters=action_parameters) self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) @@ -203,7 +205,7 @@ def test_sendmail_with_attachments(self): expected_body = ('Hello there plain.\n\n' 'This message was generated by StackStorm action ' - 'send_mail running on localhost') + 'send_mail running on %s' % (HOSTNAME)) status, _, email_data, message = self._run_action(action_parameters=action_parameters) self.assertEquals(status, action_constants.LIVEACTION_STATUS_SUCCEEDED) From 4cdd07c7c5dec7685cfc7ab06928f8227bc118f9 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 5 Feb 2019 18:30:15 +0100 Subject: [PATCH 043/105] Fix out of date build job name. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c97a57ffb7..1b5615860c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -55,7 +55,7 @@ matrix: name: "Lint Checks, Packs Tests (Python 2.7)" - env: TASK="compilepy3 ci-py3-unit" CACHE_NAME=py3 COMMAND_THRESHOLD=680 python: 3.6 - name: "Unit Tests (Python 3.6)" + name: "Unit Tests, Pack Tests (Python 3.6)" - env: TASK="ci-py3-integration" CACHE_NAME=py3 COMMAND_THRESHOLD=310 python: 3.6 name: "Integration Tests (Python 3.6)" From 4f3ab9ebea7802008c220989444fdcfdab5e8f0c Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 5 Feb 2019 19:57:24 +0100 Subject: [PATCH 044/105] Add some metric instrumentation to the notifer service code. --- st2actions/st2actions/notifier/notifier.py | 33 +++++++++++++++------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/st2actions/st2actions/notifier/notifier.py b/st2actions/st2actions/notifier/notifier.py index b41fb96d08..1743be3178 100644 --- a/st2actions/st2actions/notifier/notifier.py +++ b/st2actions/st2actions/notifier/notifier.py @@ -14,6 +14,7 @@ # limitations under the License. from __future__ import absolute_import + from datetime import datetime import json @@ -45,6 +46,7 @@ from st2common.constants.keyvalue import FULL_SYSTEM_SCOPE, SYSTEM_SCOPE, DATASTORE_PARENT_SCOPE from st2common.services.keyvalues import KeyValueLookup from st2common.transport.queues import NOTIFIER_ACTIONUPDATE_WORK_QUEUE +from st2common.metrics.base import CounterWithTimer __all__ = [ 'Notifier', @@ -73,6 +75,7 @@ def __init__(self, connection, queues, trigger_dispatcher=None): pack=ACTION_TRIGGER_TYPE['pack'], name=ACTION_TRIGGER_TYPE['name']) + @CounterWithTimer(key='notifier.action.executions') def process(self, execution_db): execution_id = str(execution_db.id) extra = {'execution': execution_db} @@ -86,12 +89,16 @@ def process(self, execution_db): # action execution will be applied by the workflow engine. A policy may affect the # final state of the action execution thereby impacting the state of the workflow. if not workflow_service.is_action_execution_under_workflow_context(execution_db): - policy_service.apply_post_run_policies(liveaction_db) + with CounterWithTimer(key='notifier.apply_post_run_policies'): + policy_service.apply_post_run_policies(liveaction_db) if liveaction_db.notify is not None: - self._post_notify_triggers(liveaction_db=liveaction_db, execution_db=execution_db) + with CounterWithTimer(key='notifier.notify_trigger.post'): + self._post_notify_triggers(liveaction_db=liveaction_db, + execution_db=execution_db) - self._post_generic_trigger(liveaction_db=liveaction_db, execution_db=execution_db) + with CounterWithTimer(key='notifier.generic_trigger.post'): + self._post_generic_trigger(liveaction_db=liveaction_db, execution_db=execution_db) def _get_execution_for_liveaction(self, liveaction): execution = ActionExecution.get(liveaction__id=str(liveaction.id)) @@ -142,13 +149,15 @@ def _post_notify_subsection_triggers(self, liveaction_db=None, execution_db=None ) try: - message = self._transform_message(message=message, - context=jinja_context) + with CounterWithTimer(key='notifier.transform_message'): + message = self._transform_message(message=message, + context=jinja_context) except: LOG.exception('Failed (Jinja) transforming `message`.') try: - data = self._transform_data(data=data, context=jinja_context) + with CounterWithTimer(key='notifier.transform_data'): + data = self._transform_data(data=data, context=jinja_context) except: LOG.exception('Failed (Jinja) transforming `data`.') @@ -187,8 +196,10 @@ def _post_notify_subsection_triggers(self, liveaction_db=None, execution_db=None payload['channel'] = route LOG.debug('POSTing %s for %s. Payload - %s.', NOTIFY_TRIGGER_TYPE['name'], liveaction_db.id, payload) - self._trigger_dispatcher.dispatch(self._notify_trigger, payload=payload, - trace_context=trace_context) + + with CounterWithTimer(key='notifier.notify_trigger.dispatch'): + self._trigger_dispatcher.dispatch(self._notify_trigger, payload=payload, + trace_context=trace_context) except: failed_routes.append(route) @@ -254,8 +265,10 @@ def _post_generic_trigger(self, liveaction_db=None, execution_db=None): trace_context = self._get_trace_context(execution_id=execution_id) LOG.debug('POSTing %s for %s. Payload - %s. TraceContext - %s', ACTION_TRIGGER_TYPE['name'], liveaction_db.id, payload, trace_context) - self._trigger_dispatcher.dispatch(self._action_trigger, payload=payload, - trace_context=trace_context) + + with CounterWithTimer(key='notifier.generic_trigger.dispatch'): + self._trigger_dispatcher.dispatch(self._action_trigger, payload=payload, + trace_context=trace_context) def _get_runner_ref(self, action_ref): """ From bb5120926b89f933a556bf4aa39f3e14973e0096 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 5 Feb 2019 20:03:30 +0100 Subject: [PATCH 045/105] Don't submit metric if we don't post generic notify trigger so we don't have useless / confusing data (aka those functions will complete immediately). --- st2actions/st2actions/notifier/notifier.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/st2actions/st2actions/notifier/notifier.py b/st2actions/st2actions/notifier/notifier.py index 1743be3178..c626a67d13 100644 --- a/st2actions/st2actions/notifier/notifier.py +++ b/st2actions/st2actions/notifier/notifier.py @@ -92,13 +92,15 @@ def process(self, execution_db): with CounterWithTimer(key='notifier.apply_post_run_policies'): policy_service.apply_post_run_policies(liveaction_db) - if liveaction_db.notify is not None: + if liveaction_db.notify: with CounterWithTimer(key='notifier.notify_trigger.post'): self._post_notify_triggers(liveaction_db=liveaction_db, execution_db=execution_db) - with CounterWithTimer(key='notifier.generic_trigger.post'): - self._post_generic_trigger(liveaction_db=liveaction_db, execution_db=execution_db) + if cfg.CONF.action_sensor.enable: + with CounterWithTimer(key='notifier.generic_trigger.post'): + self._post_generic_trigger(liveaction_db=liveaction_db, + execution_db=execution_db) def _get_execution_for_liveaction(self, liveaction): execution = ActionExecution.get(liveaction__id=str(liveaction.id)) From 313591cbdeb3a6d2f627ba1a7eb9ff390bca488f Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Wed, 6 Feb 2019 10:01:25 +0100 Subject: [PATCH 046/105] In those cases we are only interested in timing information. --- st2actions/st2actions/notifier/notifier.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/st2actions/st2actions/notifier/notifier.py b/st2actions/st2actions/notifier/notifier.py index c626a67d13..16128622ee 100644 --- a/st2actions/st2actions/notifier/notifier.py +++ b/st2actions/st2actions/notifier/notifier.py @@ -47,6 +47,7 @@ from st2common.services.keyvalues import KeyValueLookup from st2common.transport.queues import NOTIFIER_ACTIONUPDATE_WORK_QUEUE from st2common.metrics.base import CounterWithTimer +from st2common.metrics.base import Timer __all__ = [ 'Notifier', @@ -136,7 +137,7 @@ def _post_notify_subsection_triggers(self, liveaction_db=None, execution_db=None notify_subsection=None, default_message_suffix=None): routes = (getattr(notify_subsection, 'routes') or - getattr(notify_subsection, 'channels', None)) + getattr(notify_subsection, 'channels', [])) or [] execution_id = str(execution_db.id) @@ -151,14 +152,14 @@ def _post_notify_subsection_triggers(self, liveaction_db=None, execution_db=None ) try: - with CounterWithTimer(key='notifier.transform_message'): + with Timer(key='notifier.transform_message'): message = self._transform_message(message=message, context=jinja_context) except: LOG.exception('Failed (Jinja) transforming `message`.') try: - with CounterWithTimer(key='notifier.transform_data'): + with Timer(key='notifier.transform_data'): data = self._transform_data(data=data, context=jinja_context) except: LOG.exception('Failed (Jinja) transforming `data`.') From 056f9be008a159ba3b9be70f27c667ce93b43196 Mon Sep 17 00:00:00 2001 From: W Chan Date: Wed, 6 Feb 2019 00:54:13 +0000 Subject: [PATCH 047/105] Move lock for concurrency policies into scheduler Move the lock for coordinating concurrency policies into the scheduler. With the current approach, when there are more than one schedulers, there is a race in scheduling that results in failure to enforce the concurrency accurately. --- st2actions/st2actions/policies/concurrency.py | 44 +------- .../policies/concurrency_by_attr.py | 43 +------ st2actions/st2actions/scheduler/handler.py | 82 +++++++++++--- .../tests/unit/policies/test_concurrency.py | 40 +++++-- .../unit/policies/test_concurrency_by_attr.py | 54 ++++----- st2common/st2common/constants/policy.py | 25 +++++ st2common/st2common/policies/base.py | 9 -- st2common/st2common/services/policies.py | 14 +++ st2common/tests/unit/services/test_policy.py | 106 ++++++++++++++++++ 9 files changed, 275 insertions(+), 142 deletions(-) create mode 100644 st2common/st2common/constants/policy.py create mode 100644 st2common/tests/unit/services/test_policy.py diff --git a/st2actions/st2actions/policies/concurrency.py b/st2actions/st2actions/policies/concurrency.py index 43dbf287b7..a55c5cd0be 100644 --- a/st2actions/st2actions/policies/concurrency.py +++ b/st2actions/st2actions/policies/concurrency.py @@ -20,7 +20,6 @@ from st2common.persistence import action as action_access from st2common.policies.concurrency import BaseConcurrencyApplicator from st2common.services import action as action_service -from st2common.services import coordination __all__ = [ @@ -89,47 +88,6 @@ def apply_before(self, target): '"%s" cannot be applied. %s', self._policy_ref, target) return target - # Warn users that the coordination service is not configured. - if not coordination.configured(): - LOG.warn('Coordination service is not configured. Policy enforcement is best effort.') - - # Acquire a distributed lock before querying the database to make sure that only one - # scheduler is scheduling execution for this action. Even if the coordination service - # is not configured, the fake driver using zake or the file driver can still acquire - # a lock for the local process or server respectively. - lock_uid = self._get_lock_uid(target) - LOG.debug('%s is attempting to acquire lock "%s".', self.__class__.__name__, lock_uid) - with self.coordinator.get_lock(lock_uid): - target = self._apply_before(target) - - return target - - def _apply_after(self, target): - # Schedule the oldest delayed executions. - requests = action_access.LiveAction.query( - action=target.action, - status=action_constants.LIVEACTION_STATUS_DELAYED, - order_by=['start_timestamp'], - limit=1 - ) - - if requests: - action_service.update_status( - requests[0], - action_constants.LIVEACTION_STATUS_REQUESTED, - publish=True - ) - - def apply_after(self, target): - target = super(ConcurrencyApplicator, self).apply_after(target=target) - - # Acquire a distributed lock before querying the database to make sure that only one - # scheduler is scheduling execution for this action. Even if the coordination service - # is not configured, the fake driver using zake or the file driver can still acquire - # a lock for the local process or server respectively. - lock_uid = self._get_lock_uid(target) - LOG.debug('%s is attempting to acquire lock "%s".', self.__class__.__name__, lock_uid) - with self.coordinator.get_lock(lock_uid): - self._apply_after(target) + target = self._apply_before(target) return target diff --git a/st2actions/st2actions/policies/concurrency_by_attr.py b/st2actions/st2actions/policies/concurrency_by_attr.py index f9d4061147..b4cb160333 100644 --- a/st2actions/st2actions/policies/concurrency_by_attr.py +++ b/st2actions/st2actions/policies/concurrency_by_attr.py @@ -115,47 +115,6 @@ def apply_before(self, target): if not coordination.configured(): LOG.warn('Coordination service is not configured. Policy enforcement is best effort.') - # Acquire a distributed lock before querying the database to make sure that only one - # scheduler is scheduling execution for this action. Even if the coordination service - # is not configured, the fake driver using zake or the file driver can still acquire - # a lock for the local process or server respectively. - lock_uid = self._get_lock_uid(target) - LOG.debug('%s is attempting to acquire lock "%s".', self.__class__.__name__, lock_uid) - with self.coordinator.get_lock(lock_uid): - target = self._apply_before(target) - - return target - - def _apply_after(self, target): - # Schedule the oldest delayed executions. - filters = self._get_filters(target) - filters['status'] = action_constants.LIVEACTION_STATUS_DELAYED - - requests = action_access.LiveAction.query( - order_by=['start_timestamp'], - limit=1, - **filters - ) - - if requests: - action_service.update_status( - requests[0], - action_constants.LIVEACTION_STATUS_REQUESTED, - publish=True - ) - - def apply_after(self, target): - # Warn users that the coordination service is not configured. - if not coordination.configured(): - LOG.warn('Coordination service is not configured. Policy enforcement is best effort.') - - # Acquire a distributed lock before querying the database to make sure that only one - # scheduler is scheduling execution for this action. Even if the coordination service - # is not configured, the fake driver using zake or the file driver can still acquire - # a lock for the local process or server respectively. - lock_uid = self._get_lock_uid(target) - LOG.debug('%s is attempting to acquire lock "%s".', self.__class__.__name__, lock_uid) - with self.coordinator.get_lock(lock_uid): - self._apply_after(target) + target = self._apply_before(target) return target diff --git a/st2actions/st2actions/scheduler/handler.py b/st2actions/st2actions/scheduler/handler.py index fcd50a9292..e93adf6ec9 100644 --- a/st2actions/st2actions/scheduler/handler.py +++ b/st2actions/st2actions/scheduler/handler.py @@ -21,9 +21,11 @@ from st2common import log as logging from st2common.util import date from st2common.constants import action as action_constants +from st2common.constants import policy as policy_constants from st2common.exceptions.db import StackStormDBObjectNotFoundError from st2common.models.db.liveaction import LiveActionDB from st2common.services import action as action_service +from st2common.services import coordination as coordination_service from st2common.services import policies as policy_service from st2common.persistence.liveaction import LiveAction from st2common.persistence.execution_queue import ActionExecutionSchedulingQueue @@ -57,6 +59,7 @@ def __init__(self): self.message_type = LiveActionDB self._shutdown = False self._pool = eventlet.GreenPool(size=cfg.CONF.scheduler.pool_size) + self._coordinator = coordination_service.get_coordinator() def run(self): LOG.debug('Entering scheduler loop') @@ -161,16 +164,41 @@ def _handle_execution(self, execution_queue_item_db): ActionExecutionSchedulingQueue.delete(execution_queue_item_db) raise - liveaction_db = self._apply_pre_run(liveaction_db, execution_queue_item_db) + # Identify if the action has policies that require locking. + action_has_policies_require_lock = policy_service.has_policies( + liveaction_db, + policy_types=policy_constants.POLICY_TYPES_REQUIRING_LOCK + ) - if not liveaction_db: - return + # Acquire a distributed lock if the referenced action has specific policies attached. + if action_has_policies_require_lock: + # Warn users that the coordination service is not configured. + if not coordination_service.configured(): + LOG.warn( + 'Coordination backend is not configured. ' + 'Policy enforcement is best effort.' + ) - if self._is_execution_queue_item_runnable(liveaction_db, execution_queue_item_db): - self._update_to_scheduled(liveaction_db, execution_queue_item_db) + # Acquire a distributed lock before querying the database to make sure that only one + # scheduler is scheduling execution for this action. Even if the coordination service + # is not configured, the fake driver using zake or the file driver can still acquire + # a lock for the local process or server respectively. + lock_uid = liveaction_db.action + LOG.debug('%s is attempting to acquire lock "%s".', self.__class__.__name__, lock_uid) + lock = self._coordinator.get_lock(lock_uid) - @staticmethod - def _apply_pre_run(liveaction_db, execution_queue_item_db): + try: + if lock.acquire(blocking=False): + self._regulate_and_schedule(liveaction_db, execution_queue_item_db) + else: + self._delay(liveaction_db, execution_queue_item_db) + finally: + lock.release() + else: + # Otherwise if there is no policy, then schedule away. + self._schedule(liveaction_db, execution_queue_item_db) + + def _regulate_and_schedule(self, liveaction_db, execution_queue_item_db): # Apply policies defined for the action. liveaction_db = policy_service.apply_pre_run_policies(liveaction_db) @@ -190,10 +218,13 @@ def _apply_pre_run(liveaction_db, execution_queue_item_db): liveaction_db = action_service.update_status( liveaction_db, action_constants.LIVEACTION_STATUS_DELAYED, publish=False ) + + execution_queue_item_db.handling = False execution_queue_item_db.scheduled_start_timestamp = date.append_milliseconds_to_time( date.get_datetime_utc_now(), POLICY_DELAYED_EXECUTION_RESCHEDULE_TIME_MS ) + try: ActionExecutionSchedulingQueue.add_or_update(execution_queue_item_db, publish=False) except db_exc.StackStormDBObjectWriteConflictError: @@ -202,16 +233,40 @@ def _apply_pre_run(liveaction_db, execution_queue_item_db): execution_queue_item_db.id ) - return None + return if (liveaction_db.status in action_constants.LIVEACTION_COMPLETED_STATES or liveaction_db.status in action_constants.LIVEACTION_CANCEL_STATES): ActionExecutionSchedulingQueue.delete(execution_queue_item_db) - return None + return + + self._schedule(liveaction_db, execution_queue_item_db) - return liveaction_db + def _delay(self, liveaction_db, execution_queue_item_db): + liveaction_db = action_service.update_status( + liveaction_db, action_constants.LIVEACTION_STATUS_DELAYED, publish=False + ) + + execution_queue_item_db.scheduled_start_timestamp = date.append_milliseconds_to_time( + date.get_datetime_utc_now(), + POLICY_DELAYED_EXECUTION_RESCHEDULE_TIME_MS + ) - def _is_execution_queue_item_runnable(self, liveaction_db, execution_queue_item_db): + try: + execution_queue_item_db.handling = False + ActionExecutionSchedulingQueue.add_or_update(execution_queue_item_db, publish=False) + except db_exc.StackStormDBObjectWriteConflictError: + LOG.warning( + 'Execution queue item update conflict during scheduling: %s', + execution_queue_item_db.id + ) + + def _schedule(self, liveaction_db, execution_queue_item_db): + if self._is_execution_queue_item_runnable(liveaction_db, execution_queue_item_db): + self._update_to_scheduled(liveaction_db, execution_queue_item_db) + + @staticmethod + def _is_execution_queue_item_runnable(liveaction_db, execution_queue_item_db): """ Return True if a particular execution request is runnable. @@ -228,13 +283,14 @@ def _is_execution_queue_item_runnable(self, liveaction_db, execution_queue_item_ return True LOG.info( - '%s is ignoring %s (id=%s) with "%s" status after policies are applied.', - self.__class__.__name__, + 'Scheduler is ignoring %s (id=%s) with "%s" status after policies are applied.', type(execution_queue_item_db), execution_queue_item_db.id, liveaction_db.status ) + ActionExecutionSchedulingQueue.delete(execution_queue_item_db) + return False @staticmethod diff --git a/st2actions/tests/unit/policies/test_concurrency.py b/st2actions/tests/unit/policies/test_concurrency.py index d8285ba69b..301fae2b7d 100644 --- a/st2actions/tests/unit/policies/test_concurrency.py +++ b/st2actions/tests/unit/policies/test_concurrency.py @@ -149,8 +149,9 @@ def test_over_threshold_delay_executions(self): # Execution is expected to be delayed since concurrency threshold is reached. liveaction = LiveActionDB(action='wolfpack.action-1', parameters={'actionstr': 'foo-last'}) liveaction, _ = action_service.request(liveaction) - expected_num_exec += 1 # This request is expected to be executed. + expected_num_pubs += 1 # Tally requested state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() @@ -158,6 +159,11 @@ def test_over_threshold_delay_executions(self): # Since states are being processed async, wait for the liveaction to go into delayed state. liveaction = self._wait_on_status(liveaction, action_constants.LIVEACTION_STATUS_DELAYED) + expected_num_exec += 0 # This request will not be scheduled for execution. + expected_num_pubs += 0 # The delayed status change should not be published. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) + self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) + # Mark one of the scheduled/running execution as completed. action_service.update_status( scheduled[0], @@ -165,14 +171,16 @@ def test_over_threshold_delay_executions(self): publish=True ) - expected_num_pubs += 1 # Tally requested state. - - # Once capacity freed up, the delayed execution is published as requested again. - expected_num_pubs += 3 # Tally requested, scheduled, and running state. + expected_num_pubs += 1 # Tally succeeded state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() + # Once capacity freed up, the delayed execution is published as scheduled. + expected_num_exec += 1 # This request is expected to be executed. + expected_num_pubs += 2 # Tally scheduled and running state. + # Since states are being processed async, wait for the liveaction to be scheduled. liveaction = self._wait_on_statuses(liveaction, SCHEDULED_STATES) self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) @@ -212,8 +220,9 @@ def test_over_threshold_cancel_executions(self): # Execution is expected to be canceled since concurrency threshold is reached. liveaction = LiveActionDB(action='wolfpack.action-2', parameters={'actionstr': 'foo'}) liveaction, _ = action_service.request(liveaction) - expected_num_exec += 0 # This request will not be scheduled for execution. + expected_num_pubs += 1 # Tally requested state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() @@ -222,6 +231,9 @@ def test_over_threshold_cancel_executions(self): calls = [call(liveaction, action_constants.LIVEACTION_STATUS_CANCELING)] LiveActionPublisher.publish_state.assert_has_calls(calls) expected_num_pubs += 2 # Tally canceling and canceled state changes. + expected_num_exec += 0 # This request will not be scheduled for execution. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) + self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) # Assert the action is canceled. liveaction = LiveAction.get_by_id(str(liveaction.id)) @@ -262,8 +274,9 @@ def test_on_cancellation(self): # Execution is expected to be delayed since concurrency threshold is reached. liveaction = LiveActionDB(action='wolfpack.action-1', parameters={'actionstr': 'foo'}) liveaction, _ = action_service.request(liveaction) - expected_num_exec += 1 # This request will be scheduled for execution. + expected_num_pubs += 1 # Tally requested state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() @@ -271,16 +284,23 @@ def test_on_cancellation(self): # Since states are being processed async, wait for the liveaction to go into delayed state. liveaction = self._wait_on_status(liveaction, action_constants.LIVEACTION_STATUS_DELAYED) + expected_num_exec += 0 # This request will not be scheduled for execution. + expected_num_pubs += 0 # The delayed status change should not be published. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) + self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) + # Cancel execution. action_service.request_cancellation(scheduled[0], 'stanley') expected_num_pubs += 2 # Tally the canceling and canceled states. - - # Once capacity freed up, the delayed execution is published as requested again. - expected_num_pubs += 3 # Tally requested, scheduled, and running state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() + # Once capacity freed up, the delayed execution is published as requested again. + expected_num_exec += 1 # This request is expected to be executed. + expected_num_pubs += 2 # Tally scheduled and running state. + # Execution is expected to be rescheduled. liveaction = LiveAction.get_by_id(str(liveaction.id)) self.assertIn(liveaction.status, SCHEDULED_STATES) diff --git a/st2actions/tests/unit/policies/test_concurrency_by_attr.py b/st2actions/tests/unit/policies/test_concurrency_by_attr.py index b38f4e7412..0056b33a4f 100644 --- a/st2actions/tests/unit/policies/test_concurrency_by_attr.py +++ b/st2actions/tests/unit/policies/test_concurrency_by_attr.py @@ -147,7 +147,9 @@ def test_over_threshold_delay_executions(self): # Execution is expected to be delayed since concurrency threshold is reached. liveaction = LiveActionDB(action='wolfpack.action-1', parameters={'actionstr': 'foo'}) liveaction, _ = action_service.request(liveaction) + expected_num_pubs += 1 # Tally requested state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() @@ -156,9 +158,8 @@ def test_over_threshold_delay_executions(self): # liveaction to go into delayed state. liveaction = self._wait_on_status(liveaction, action_constants.LIVEACTION_STATUS_DELAYED) - # Assert the action is delayed. - delayed = liveaction - self.assertEqual(delayed.status, action_constants.LIVEACTION_STATUS_DELAYED) + expected_num_exec += 0 # This request will not be scheduled for execution. + expected_num_pubs += 0 # The delayed status change should not be published. self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) @@ -166,8 +167,6 @@ def test_over_threshold_delay_executions(self): # The execution with actionstr "fu" is over the threshold but actionstr "bar" is not. liveaction = LiveActionDB(action='wolfpack.action-1', parameters={'actionstr': 'bar'}) liveaction, _ = action_service.request(liveaction) - expected_num_exec += 1 # This request is expected to be executed. - expected_num_pubs += 3 # Tally requested, scheduled, and running states. # Run the scheduler to schedule action executions. self._process_scheduling_queue() @@ -175,6 +174,8 @@ def test_over_threshold_delay_executions(self): # Since states are being processed asynchronously, wait for the # liveaction to go into scheduled state. liveaction = self._wait_on_statuses(liveaction, SCHEDULED_STATES) + expected_num_exec += 1 # This request is expected to be executed. + expected_num_pubs += 3 # Tally requested, scheduled, and running state. self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) @@ -186,17 +187,15 @@ def test_over_threshold_delay_executions(self): ) expected_num_pubs += 1 # Tally succeeded state. - - # Once capacity freed up, the delayed execution is published as requested again. - expected_num_exec += 1 # The delayed request is expected to be executed. - expected_num_pubs += 3 # Tally requested, scheduled, and running state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() - # Since states are being processed asynchronously, wait for the - # liveaction to go into scheduled state. + # Once capacity freed up, the delayed execution is published as requested again. liveaction = self._wait_on_statuses(liveaction, SCHEDULED_STATES) + expected_num_exec += 1 # The delayed request is expected to be executed. + expected_num_pubs += 2 # Tally scheduled and running state. self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) @@ -235,8 +234,9 @@ def test_over_threshold_cancel_executions(self): # Execution is expected to be delayed since concurrency threshold is reached. liveaction = LiveActionDB(action='wolfpack.action-2', parameters={'actionstr': 'foo'}) liveaction, _ = action_service.request(liveaction) - expected_num_exec += 0 # This request will not be scheduled for execution. + expected_num_pubs += 1 # Tally requested state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() @@ -245,12 +245,13 @@ def test_over_threshold_cancel_executions(self): calls = [call(liveaction, action_constants.LIVEACTION_STATUS_CANCELING)] LiveActionPublisher.publish_state.assert_has_calls(calls) expected_num_pubs += 2 # Tally canceling and canceled state changes. + expected_num_exec += 0 # This request will not be scheduled for execution. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) + self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) # Assert the action is canceled. canceled = LiveAction.get_by_id(str(liveaction.id)) self.assertEqual(canceled.status, action_constants.LIVEACTION_STATUS_CANCELED) - self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) - self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) @mock.patch.object( runner.MockActionRunner, 'run', @@ -285,7 +286,9 @@ def test_on_cancellation(self): # Execution is expected to be delayed since concurrency threshold is reached. liveaction = LiveActionDB(action='wolfpack.action-1', parameters={'actionstr': 'foo'}) liveaction, _ = action_service.request(liveaction) + expected_num_pubs += 1 # Tally requested state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() @@ -293,10 +296,10 @@ def test_on_cancellation(self): # Since states are being processed asynchronously, wait for the # liveaction to go into delayed state. liveaction = self._wait_on_status(liveaction, action_constants.LIVEACTION_STATUS_DELAYED) - - # Assert the action is delayed. delayed = liveaction - self.assertEqual(delayed.status, action_constants.LIVEACTION_STATUS_DELAYED) + + expected_num_exec += 0 # This request will not be scheduled for execution. + expected_num_pubs += 0 # The delayed status change should not be published. self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) @@ -304,8 +307,6 @@ def test_on_cancellation(self): # The execution with actionstr "fu" is over the threshold but actionstr "bar" is not. liveaction = LiveActionDB(action='wolfpack.action-1', parameters={'actionstr': 'bar'}) liveaction, _ = action_service.request(liveaction) - expected_num_exec += 1 # This request is expected to be executed. - expected_num_pubs += 3 # Tally requested, scheduled, and running states. # Run the scheduler to schedule action executions. self._process_scheduling_queue() @@ -313,23 +314,26 @@ def test_on_cancellation(self): # Since states are being processed asynchronously, wait for the # liveaction to go into scheduled state. liveaction = self._wait_on_statuses(liveaction, SCHEDULED_STATES) + expected_num_exec += 1 # This request is expected to be executed. + expected_num_pubs += 3 # Tally requested, scheduled, and running states. self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) # Cancel execution. action_service.request_cancellation(scheduled[0], 'stanley') expected_num_pubs += 2 # Tally the canceling and canceled states. - - # Once capacity freed up, the delayed execution is published as requested again. - expected_num_exec += 1 # The delayed request is expected to be executed. - expected_num_pubs += 3 # Tally requested, scheduled, and running state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) # Run the scheduler to schedule action executions. self._process_scheduling_queue() + # Once capacity freed up, the delayed execution is published as requested again. + expected_num_exec += 1 # The delayed request is expected to be executed. + expected_num_pubs += 2 # Tally scheduled and running state. + self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) + self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) + # Since states are being processed asynchronously, wait for the # liveaction to go into scheduled state. liveaction = LiveAction.get_by_id(str(delayed.id)) liveaction = self._wait_on_statuses(liveaction, SCHEDULED_STATES) - self.assertEqual(expected_num_pubs, LiveActionPublisher.publish_state.call_count) - self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) diff --git a/st2common/st2common/constants/policy.py b/st2common/st2common/constants/policy.py new file mode 100644 index 0000000000..b1303ac2af --- /dev/null +++ b/st2common/st2common/constants/policy.py @@ -0,0 +1,25 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__all__ = [ + 'POLICY_TYPES_REQUIRING_LOCK' +] + +# Concurrency policies require scheduler to acquire a distributed lock to prevent race +# in scheduling when there are multiple scheduler instances. +POLICY_TYPES_REQUIRING_LOCK = [ + 'action.concurrency', + 'action.concurrency.attr' +] diff --git a/st2common/st2common/policies/base.py b/st2common/st2common/policies/base.py index 092d847b31..5528397fbe 100644 --- a/st2common/st2common/policies/base.py +++ b/st2common/st2common/policies/base.py @@ -21,7 +21,6 @@ from st2common import log as logging from st2common.persistence import policy as policy_access -from st2common.services import coordination LOG = logging.getLogger(__name__) @@ -48,10 +47,6 @@ def apply_before(self, target): :rtype: ``object`` """ - # Warn users that the coordination service is not configured - if not coordination.configured(): - LOG.warn('Coordination service is not configured. Policy enforcement is best effort.') - return target def apply_after(self, target): @@ -63,10 +58,6 @@ def apply_after(self, target): :rtype: ``object`` """ - # Warn users that the coordination service is not configured - if not coordination.configured(): - LOG.warn('Coordination service is not configured. Policy enforcement is best effort.') - return target def _get_lock_name(self, values): diff --git a/st2common/st2common/services/policies.py b/st2common/st2common/services/policies.py index cd8ce74280..ccb3274339 100644 --- a/st2common/st2common/services/policies.py +++ b/st2common/st2common/services/policies.py @@ -24,6 +24,20 @@ LOG = logging.getLogger(__name__) +def has_policies(lv_ac_db, policy_types=None): + query_params = { + 'resource_ref': lv_ac_db.action, + 'enabled': True + } + + if policy_types: + query_params['policy_type__in'] = policy_types + + policy_dbs = pc_db_access.Policy.query(**query_params) + + return len(policy_dbs) > 0 + + def apply_pre_run_policies(lv_ac_db): LOG.debug('Applying pre-run policies for liveaction "%s".' % str(lv_ac_db.id)) diff --git a/st2common/tests/unit/services/test_policy.py b/st2common/tests/unit/services/test_policy.py new file mode 100644 index 0000000000..274a4ff6e1 --- /dev/null +++ b/st2common/tests/unit/services/test_policy.py @@ -0,0 +1,106 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import st2tests.config as tests_config +tests_config.parse_args() + +import st2common + +from st2common.bootstrap import policiesregistrar as policies_registrar +from st2common.bootstrap import runnersregistrar as runners_registrar +from st2common.constants import action as action_constants +from st2common.constants import policy as policy_constants +from st2common.models.db import action as action_db_models +from st2common.services import action as action_service +from st2common.services import policies as policy_service + +import st2tests +from st2tests import fixturesloader as fixtures + + +PACK = 'generic' + +TEST_FIXTURES = { + 'actions': [ + 'action1.yaml', # wolfpack.action-1 + 'action2.yaml', # wolfpack.action-2 + 'local.yaml' # core.local + ], + 'policies': [ + 'policy_2.yaml', # mock policy on wolfpack.action-1 + 'policy_5.yaml' # concurrency policy on wolfpack.action-2 + ] +} + + +class PolicyServiceTestCase(st2tests.DbTestCase): + + @classmethod + def setUpClass(cls): + super(PolicyServiceTestCase, cls).setUpClass() + + # Register runners + runners_registrar.register_runners() + + # Register common policy types + policies_registrar.register_policy_types(st2common) + + loader = fixtures.FixturesLoader() + loader.save_fixtures_to_db(fixtures_pack=PACK, + fixtures_dict=TEST_FIXTURES) + + def setUp(self): + super(PolicyServiceTestCase, self).setUp() + + params = {'action': 'wolfpack.action-1', 'parameters': {'actionstr': 'foo-last'}} + self.lv_ac_db_1 = action_db_models.LiveActionDB(**params) + self.lv_ac_db_1, _ = action_service.request(self.lv_ac_db_1) + + params = {'action': 'wolfpack.action-2', 'parameters': {'actionstr': 'foo-last'}} + self.lv_ac_db_2 = action_db_models.LiveActionDB(**params) + self.lv_ac_db_2, _ = action_service.request(self.lv_ac_db_2) + + params = {'action': 'core.local', 'parameters': {'cmd': 'date'}} + self.lv_ac_db_3 = action_db_models.LiveActionDB(**params) + self.lv_ac_db_3, _ = action_service.request(self.lv_ac_db_3) + + def tearDown(self): + action_service.update_status(self.lv_ac_db_1, action_constants.LIVEACTION_STATUS_CANCELED) + action_service.update_status(self.lv_ac_db_2, action_constants.LIVEACTION_STATUS_CANCELED) + action_service.update_status(self.lv_ac_db_3, action_constants.LIVEACTION_STATUS_CANCELED) + + def test_action_has_policies(self): + self.assertTrue(policy_service.has_policies(self.lv_ac_db_1)) + + def test_action_does_not_have_policies(self): + self.assertFalse(policy_service.has_policies(self.lv_ac_db_3)) + + def test_action_has_specific_policies(self): + self.assertTrue( + policy_service.has_policies( + self.lv_ac_db_2, + policy_types=policy_constants.POLICY_TYPES_REQUIRING_LOCK + ) + ) + + def test_action_does_not_have_specific_policies(self): + self.assertFalse( + policy_service.has_policies( + self.lv_ac_db_1, + policy_types=policy_constants.POLICY_TYPES_REQUIRING_LOCK + ) + ) From d607c4fa3b4219943d224261e2dfb8672bf1d820 Mon Sep 17 00:00:00 2001 From: W Chan Date: Wed, 6 Feb 2019 19:35:01 +0000 Subject: [PATCH 048/105] Use count instead of len when querying if action has policies Use the count method instead of len so the querying is done server side at MongoDB. --- st2common/st2common/services/policies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/st2common/st2common/services/policies.py b/st2common/st2common/services/policies.py index ccb3274339..94da12444e 100644 --- a/st2common/st2common/services/policies.py +++ b/st2common/st2common/services/policies.py @@ -35,7 +35,7 @@ def has_policies(lv_ac_db, policy_types=None): policy_dbs = pc_db_access.Policy.query(**query_params) - return len(policy_dbs) > 0 + return policy_dbs.count() > 0 def apply_pre_run_policies(lv_ac_db): From 0fdef30d515b8e84ee15dcf5dbbf9d9e06f8e7bf Mon Sep 17 00:00:00 2001 From: W Chan Date: Wed, 6 Feb 2019 23:20:30 +0000 Subject: [PATCH 049/105] Remove commented out code from coordination service Clean up and remove commented out code from the coordination service. --- st2common/st2common/services/coordination.py | 43 -------------------- 1 file changed, 43 deletions(-) diff --git a/st2common/st2common/services/coordination.py b/st2common/st2common/services/coordination.py index 108dc62ca1..e7dcd41937 100644 --- a/st2common/st2common/services/coordination.py +++ b/st2common/st2common/services/coordination.py @@ -180,46 +180,3 @@ def get_coordinator(): COORDINATOR = coordinator_setup() return COORDINATOR - - -# class LockAcquireError(Exception): -# pass - - -# class lock(object): -# def __init__(self, name, timeout=5000): -# self._name = name -# self._lock = None -# self._timeout = timeout - -# def __call__(self, func): -# @wraps(func) -# def with_lock(*args, **kwds): -# with self: -# return func(*args, **kwds) -# return with_lock - -# def _setup(self): -# if COORDINATOR is None: -# get_coordinator() - -# if not self._lock: -# self._lock = COORDINATOR.get_lock(self._name) - -# if self._timeout <= 0: -# LOG.warning("Failed to secure lock for %s.", self._name) -# raise LockAcquireError("Could not acquire lock for %s" % self._name) - -# def __enter__(self): -# self._setup() - -# LOG.debug("Attempting to secure lock for: %s", self._name) -# if not self._lock.acquire(): -# LOG.info("Could not secure lock for %s. Retrying.", self._name) -# self._timeout -= 1 -# eventlet.sleep(.25) -# self.__enter__() - -# def __exit__(self, *_args, **_kwargs): -# LOG.debug("Releasing lock for: %s", self._name) -# self._lock.release() From 9d490ecaa5e4ae5d9e772769743b9c48679d415c Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Wed, 6 Feb 2019 10:51:26 +0100 Subject: [PATCH 050/105] Update service_setup service entry / bootstrap code so we exclude log messages with log level "AUDIT" if log level is set to INFO or higher. This way we avoid issues with duplicate AUDIT messages in production deployments. In production deployments default log level is set to INFO which means that all service log files will also contain AUDIT log messages because AUDIT level if the highest. This is not desired, because we already log AUDIT log messages in a dedicated AUDIT log file. NOTE: Audit messages will still go in the service log file is log level is set to DEBUG (that's desired during debugging). --- st2common/st2common/log.py | 3 +++ st2common/st2common/service_setup.py | 16 ++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/st2common/st2common/log.py b/st2common/st2common/log.py index 1b9186fac3..46efd03bb9 100644 --- a/st2common/st2common/log.py +++ b/st2common/st2common/log.py @@ -46,6 +46,9 @@ 'ignore_statsd_log_messages' ] +# NOTE: We set AUDIT to the highest log level which means AUDIT log messages will always be +# included (e.g. also if log level is set to INFO). To avoid that, we need to explicitly filter +# out AUDIT log level in service setup code. logging.AUDIT = logging.CRITICAL + 10 logging.addLevelName(logging.AUDIT, 'AUDIT') diff --git a/st2common/st2common/service_setup.py b/st2common/st2common/service_setup.py index 61fbea6bce..bf82e00192 100644 --- a/st2common/st2common/service_setup.py +++ b/st2common/st2common/service_setup.py @@ -22,6 +22,7 @@ import os import sys import traceback +import logging as stdlib_logging from oslo_config import cfg @@ -35,6 +36,7 @@ from st2common.models.utils.profiling import enable_profiling from st2common import triggers from st2common.rbac.migrations import run_all as run_all_rbac_migrations +from st2common.logging.filters import LogLevelFilter # Note: This is here for backward compatibility. # Function has been moved in a standalone module to avoid expensive in-direct @@ -112,6 +114,20 @@ def setup(service, config, setup_db=True, register_mq_exchanges=True, else: raise e + exclude_log_levels = [stdlib_logging.AUDIT] + handlers = stdlib_logging.getLoggerClass().manager.root.handlers + + for handler in handlers: + # If log level is not set to DEBUG we filter out "AUDIT" log messages. This way we avoid + # duplicate "AUDIT" messages in production deployments where default service log level is + # set to "INFO" and we already log messages with level AUDIT to a special dedicated log + # file. + ignore_audit_log_messages = (handler.level >= stdlib_logging.INFO and + handler.level < stdlib_logging.AUDIT) + if not is_debug_enabled and ignore_audit_log_messages: + LOG.debug('Excluding log messages with level "AUDIT" for handler "%s"' % (handler)) + handler.addFilter(LogLevelFilter(log_levels=exclude_log_levels)) + if not is_debug_enabled: # NOTE: statsd logger logs everything by default under INFO so we ignore those log # messages unless verbose / debug mode is used From cbf9289b2e49acc8fee9f14dd0767f494d64392e Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Wed, 6 Feb 2019 13:30:23 +0100 Subject: [PATCH 051/105] Add an integration test case which verifies that service setup log level based filtering works correctly. --- .../test_service_setup_log_level_filtering.py | 123 ++++++++++++++++++ .../fixtures/conf/logging.api.audit.conf | 44 +++++++ .../fixtures/conf/logging.api.debug.conf | 44 +++++++ .../fixtures/conf/logging.api.info.conf | 44 +++++++ .../conf/st2.tests.api.audit_log_level.conf | 99 ++++++++++++++ .../conf/st2.tests.api.debug_log_level.conf | 99 ++++++++++++++ .../conf/st2.tests.api.info_log_level.conf | 99 ++++++++++++++ .../conf/st2.tests.api.system_debug_true.conf | 99 ++++++++++++++ .../st2tests/fixtures/conf/st2.tests.conf | 100 ++++++++++++++ 9 files changed, 751 insertions(+) create mode 100644 st2common/tests/integration/test_service_setup_log_level_filtering.py create mode 100644 st2tests/st2tests/fixtures/conf/logging.api.audit.conf create mode 100644 st2tests/st2tests/fixtures/conf/logging.api.debug.conf create mode 100644 st2tests/st2tests/fixtures/conf/logging.api.info.conf create mode 100644 st2tests/st2tests/fixtures/conf/st2.tests.api.audit_log_level.conf create mode 100644 st2tests/st2tests/fixtures/conf/st2.tests.api.debug_log_level.conf create mode 100644 st2tests/st2tests/fixtures/conf/st2.tests.api.info_log_level.conf create mode 100644 st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true.conf create mode 100644 st2tests/st2tests/fixtures/conf/st2.tests.conf diff --git a/st2common/tests/integration/test_service_setup_log_level_filtering.py b/st2common/tests/integration/test_service_setup_log_level_filtering.py new file mode 100644 index 0000000000..077d4a9f38 --- /dev/null +++ b/st2common/tests/integration/test_service_setup_log_level_filtering.py @@ -0,0 +1,123 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import os +import sys +import signal + +import eventlet +from eventlet.green import subprocess + +from st2tests.base import IntegrationTestCase +from st2tests.fixturesloader import get_fixtures_base_path + +__all__ = [ + 'ServiceSetupLogLevelFilteringTestCase' +] + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) + +FIXTURES_DIR = get_fixtures_base_path() + +ST2_CONFIG_INFO_LL_PATH = os.path.join(FIXTURES_DIR, 'conf/st2.tests.api.info_log_level.conf') +ST2_CONFIG_INFO_LL_PATH = os.path.abspath(ST2_CONFIG_INFO_LL_PATH) + +ST2_CONFIG_DEBUG_LL_PATH = os.path.join(FIXTURES_DIR, 'conf/st2.tests.api.debug_log_level.conf') +ST2_CONFIG_DEBUG_LL_PATH = os.path.abspath(ST2_CONFIG_DEBUG_LL_PATH) + +ST2_CONFIG_AUDIT_LL_PATH = os.path.join(FIXTURES_DIR, 'conf/st2.tests.api.audit_log_level.conf') +ST2_CONFIG_AUDIT_LL_PATH = os.path.abspath(ST2_CONFIG_AUDIT_LL_PATH) + +ST2_CONFIG_SYSTEM_DEBUG_PATH = os.path.join(FIXTURES_DIR, + 'conf/st2.tests.api.system_debug_true.conf') +ST2_CONFIG_SYSTEM_DEBUG_PATH = os.path.abspath(ST2_CONFIG_SYSTEM_DEBUG_PATH) + +PYTHON_BINARY = sys.executable + +ST2API_BINARY = os.path.join(BASE_DIR, '../../../st2api/bin/st2api') +ST2API_BINARY = os.path.abspath(ST2API_BINARY) + +CMD = [PYTHON_BINARY, ST2API_BINARY, '--config-file'] + + +class ServiceSetupLogLevelFilteringTestCase(IntegrationTestCase): + def test_audit_log_level_is_filtered_if_log_level_is_not_debug_or_audit(self): + # 1. INFO log level - audit messages should not be included + process = self._start_process(config_path=ST2_CONFIG_INFO_LL_PATH) + self.add_process(process=process) + + # Give it some time to start up + eventlet.sleep(3) + process.send_signal(signal.SIGKILL) + + # First 3 log lines are debug messages about the environment which are always logged + stdout = '\n'.join(process.stdout.read().split('\n')[3:]) + + self.assertTrue('INFO [-]' in stdout) + self.assertTrue('DEBUG [-]' not in stdout) + self.assertTrue('AUDIT [-]' not in stdout) + + # 2. DEBUG log level - audit messages should be included + process = self._start_process(config_path=ST2_CONFIG_DEBUG_LL_PATH) + self.add_process(process=process) + + # Give it some time to start up + eventlet.sleep(3) + process.send_signal(signal.SIGKILL) + + # First 3 log lines are debug messages about the environment which are always logged + stdout = '\n'.join(process.stdout.read().split('\n')[3:]) + + self.assertTrue('INFO [-]' in stdout) + self.assertTrue('DEBUG [-]' in stdout) + self.assertTrue('AUDIT [-]' in stdout) + + # 3. AUDIT log level - audit messages should be included + process = self._start_process(config_path=ST2_CONFIG_AUDIT_LL_PATH) + self.add_process(process=process) + + # Give it some time to start up + eventlet.sleep(3) + process.send_signal(signal.SIGKILL) + + # First 3 log lines are debug messages about the environment which are always logged + stdout = '\n'.join(process.stdout.read().split('\n')[3:]) + + self.assertTrue('INFO [-]' not in stdout) + self.assertTrue('DEBUG [-]' not in stdout) + self.assertTrue('AUDIT [-]' in stdout) + + # 2. INFO log level but system.debug set to True + process = self._start_process(config_path=ST2_CONFIG_SYSTEM_DEBUG_PATH) + self.add_process(process=process) + + # Give it some time to start up + eventlet.sleep(3) + process.send_signal(signal.SIGKILL) + + # First 3 log lines are debug messages about the environment which are always logged + stdout = '\n'.join(process.stdout.read().split('\n')[3:]) + + self.assertTrue('INFO [-]' in stdout) + self.assertTrue('DEBUG [-]' in stdout) + self.assertTrue('AUDIT [-]' in stdout) + + def _start_process(self, config_path): + cmd = CMD + [config_path] + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + shell=False, preexec_fn=os.setsid) + return process diff --git a/st2tests/st2tests/fixtures/conf/logging.api.audit.conf b/st2tests/st2tests/fixtures/conf/logging.api.audit.conf new file mode 100644 index 0000000000..3b5f3005f8 --- /dev/null +++ b/st2tests/st2tests/fixtures/conf/logging.api.audit.conf @@ -0,0 +1,44 @@ +[loggers] +keys=root + +[handlers] +keys=consoleHandler, fileHandler, auditHandler + +[formatters] +keys=simpleConsoleFormatter, verboseConsoleFormatter, gelfFormatter + +[logger_root] +level=AUDIT +handlers=consoleHandler, fileHandler, auditHandler + +[handler_consoleHandler] +class=StreamHandler +level=AUDIT +formatter=simpleConsoleFormatter +args=(sys.stdout,) + +[handler_fileHandler] +class=st2common.log.FormatNamedFileHandler +level=AUDIT +formatter=verboseConsoleFormatter +args=("/tmp/st2api.{timestamp}.log",) + +[handler_auditHandler] +class=st2common.log.FormatNamedFileHandler +level=AUDIT +formatter=gelfFormatter +args=("/tmp/st2api.audit.{timestamp}.log",) + +[formatter_simpleConsoleFormatter] +class=st2common.logging.formatters.ConsoleLogFormatter +format=%(asctime)s %(levelname)s [-] %(message)s +datefmt= + +[formatter_verboseConsoleFormatter] +class=st2common.logging.formatters.ConsoleLogFormatter +format=%(asctime)s %(thread)s %(levelname)s %(module)s [-] %(message)s +datefmt= + +[formatter_gelfFormatter] +class=st2common.logging.formatters.GelfLogFormatter +format=%(message)s diff --git a/st2tests/st2tests/fixtures/conf/logging.api.debug.conf b/st2tests/st2tests/fixtures/conf/logging.api.debug.conf new file mode 100644 index 0000000000..1d7e8ca7ed --- /dev/null +++ b/st2tests/st2tests/fixtures/conf/logging.api.debug.conf @@ -0,0 +1,44 @@ +[loggers] +keys=root + +[handlers] +keys=consoleHandler, fileHandler, auditHandler + +[formatters] +keys=simpleConsoleFormatter, verboseConsoleFormatter, gelfFormatter + +[logger_root] +level=DEBUG +handlers=consoleHandler, fileHandler, auditHandler + +[handler_consoleHandler] +class=StreamHandler +level=DEBUG +formatter=simpleConsoleFormatter +args=(sys.stdout,) + +[handler_fileHandler] +class=st2common.log.FormatNamedFileHandler +level=DEBUG +formatter=verboseConsoleFormatter +args=("/tmp/st2api.{timestamp}.log",) + +[handler_auditHandler] +class=st2common.log.FormatNamedFileHandler +level=AUDIT +formatter=gelfFormatter +args=("/tmp/st2api.audit.{timestamp}.log",) + +[formatter_simpleConsoleFormatter] +class=st2common.logging.formatters.ConsoleLogFormatter +format=%(asctime)s %(levelname)s [-] %(message)s +datefmt= + +[formatter_verboseConsoleFormatter] +class=st2common.logging.formatters.ConsoleLogFormatter +format=%(asctime)s %(thread)s %(levelname)s %(module)s [-] %(message)s +datefmt= + +[formatter_gelfFormatter] +class=st2common.logging.formatters.GelfLogFormatter +format=%(message)s diff --git a/st2tests/st2tests/fixtures/conf/logging.api.info.conf b/st2tests/st2tests/fixtures/conf/logging.api.info.conf new file mode 100644 index 0000000000..f035bcdcb6 --- /dev/null +++ b/st2tests/st2tests/fixtures/conf/logging.api.info.conf @@ -0,0 +1,44 @@ +[loggers] +keys=root + +[handlers] +keys=consoleHandler, fileHandler, auditHandler + +[formatters] +keys=simpleConsoleFormatter, verboseConsoleFormatter, gelfFormatter + +[logger_root] +level=INFO +handlers=consoleHandler, fileHandler, auditHandler + +[handler_consoleHandler] +class=StreamHandler +level=INFO +formatter=simpleConsoleFormatter +args=(sys.stdout,) + +[handler_fileHandler] +class=st2common.log.FormatNamedFileHandler +level=INFO +formatter=verboseConsoleFormatter +args=("/tmp/st2api.{timestamp}.log",) + +[handler_auditHandler] +class=st2common.log.FormatNamedFileHandler +level=AUDIT +formatter=gelfFormatter +args=("/tmp/st2api.audit.{timestamp}.log",) + +[formatter_simpleConsoleFormatter] +class=st2common.logging.formatters.ConsoleLogFormatter +format=%(asctime)s %(levelname)s [-] %(message)s +datefmt= + +[formatter_verboseConsoleFormatter] +class=st2common.logging.formatters.ConsoleLogFormatter +format=%(asctime)s %(thread)s %(levelname)s %(module)s [-] %(message)s +datefmt= + +[formatter_gelfFormatter] +class=st2common.logging.formatters.GelfLogFormatter +format=%(message)s diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.api.audit_log_level.conf b/st2tests/st2tests/fixtures/conf/st2.tests.api.audit_log_level.conf new file mode 100644 index 0000000000..dbe36c36db --- /dev/null +++ b/st2tests/st2tests/fixtures/conf/st2.tests.api.audit_log_level.conf @@ -0,0 +1,99 @@ +# Config file used by integration tests + +[database] +db_name = st2-test + +[api] +# Host and port to bind the API server. +host = 127.0.0.1 +port = 9101 +logging = st2tests/st2tests/fixtures/conf/logging.api.audit.conf +mask_secrets = False +# allow_origin is required for handling CORS in st2 web UI. +# allow_origin = http://myhost1.example.com:3000,http://myhost2.example.com:3000 + +[sensorcontainer] +logging = st2tests/conf/logging.sensorcontainer.conf +sensor_node_name = sensornode1 +partition_provider = name:default + +[rulesengine] +logging = st2reactor/conf/logging.rulesengine.conf + +[timersengine] +logging = st2reactor/conf/logging.timersengine.conf + +[actionrunner] +logging = st2actions/conf/logging.conf + +[auth] +host = 127.0.0.1 +port = 9100 +use_ssl = False +debug = False +enable = False +logging = st2tests/conf/logging.auth.conf + +mode = standalone +backend = flat_file +backend_kwargs = {"file_path": "st2auth/conf/htpasswd_dev"} + +# Base URL to the API endpoint excluding the version (e.g. http://myhost.net:9101/) +api_url = http://127.0.0.1:9101/ + +[system] +debug = False +# This way integration tests can write to this directory +base_path = /tmp + +[garbagecollector] +logging = st2reactor/conf/logging.garbagecollector.conf + +action_executions_ttl = 20 +action_executions_output_ttl = 10 +trigger_instances_ttl = 20 +purge_inquiries = True + +collection_interval = 1 +sleep_delay = 0.1 + +[content] +system_packs_base_path = +packs_base_paths = st2tests/st2tests/fixtures/packs/ + +[syslog] +host = 127.0.0.1 +port = 514 +facility = local7 +protocol = udp + +[webui] +# webui_base_url = https://mywebhost.domain + +[log] +excludes = requests,paramiko +redirect_stderr = False +mask_secrets = False + +[system_user] +user = stanley +ssh_key_file = /home/vagrant/.ssh/stanley_rsa + +[messaging] +url = amqp://guest:guest@127.0.0.1:5672/ + +[ssh_runner] +remote_dir = /tmp + +[resultstracker] +logging = st2actions/conf/logging.resultstracker.conf +query_interval = 0.1 + +[notifier] +logging = st2actions/conf/logging.notifier.conf + +[exporter] +logging = st2exporter/conf/logging.exporter.conf + +[mistral] +jitter_interval = 0 diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.api.debug_log_level.conf b/st2tests/st2tests/fixtures/conf/st2.tests.api.debug_log_level.conf new file mode 100644 index 0000000000..caad395240 --- /dev/null +++ b/st2tests/st2tests/fixtures/conf/st2.tests.api.debug_log_level.conf @@ -0,0 +1,99 @@ +# Config file used by integration tests + +[database] +db_name = st2-test + +[api] +# Host and port to bind the API server. +host = 127.0.0.1 +port = 9101 +logging = st2tests/st2tests/fixtures/conf/logging.api.debug.conf +mask_secrets = False +# allow_origin is required for handling CORS in st2 web UI. +# allow_origin = http://myhost1.example.com:3000,http://myhost2.example.com:3000 + +[sensorcontainer] +logging = st2tests/conf/logging.sensorcontainer.conf +sensor_node_name = sensornode1 +partition_provider = name:default + +[rulesengine] +logging = st2reactor/conf/logging.rulesengine.conf + +[timersengine] +logging = st2reactor/conf/logging.timersengine.conf + +[actionrunner] +logging = st2actions/conf/logging.conf + +[auth] +host = 127.0.0.1 +port = 9100 +use_ssl = False +debug = False +enable = False +logging = st2tests/conf/logging.auth.conf + +mode = standalone +backend = flat_file +backend_kwargs = {"file_path": "st2auth/conf/htpasswd_dev"} + +# Base URL to the API endpoint excluding the version (e.g. http://myhost.net:9101/) +api_url = http://127.0.0.1:9101/ + +[system] +debug = False +# This way integration tests can write to this directory +base_path = /tmp + +[garbagecollector] +logging = st2reactor/conf/logging.garbagecollector.conf + +action_executions_ttl = 20 +action_executions_output_ttl = 10 +trigger_instances_ttl = 20 +purge_inquiries = True + +collection_interval = 1 +sleep_delay = 0.1 + +[content] +system_packs_base_path = +packs_base_paths = st2tests/st2tests/fixtures/packs/ + +[syslog] +host = 127.0.0.1 +port = 514 +facility = local7 +protocol = udp + +[webui] +# webui_base_url = https://mywebhost.domain + +[log] +excludes = requests,paramiko +redirect_stderr = False +mask_secrets = False + +[system_user] +user = stanley +ssh_key_file = /home/vagrant/.ssh/stanley_rsa + +[messaging] +url = amqp://guest:guest@127.0.0.1:5672/ + +[ssh_runner] +remote_dir = /tmp + +[resultstracker] +logging = st2actions/conf/logging.resultstracker.conf +query_interval = 0.1 + +[notifier] +logging = st2actions/conf/logging.notifier.conf + +[exporter] +logging = st2exporter/conf/logging.exporter.conf + +[mistral] +jitter_interval = 0 diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.api.info_log_level.conf b/st2tests/st2tests/fixtures/conf/st2.tests.api.info_log_level.conf new file mode 100644 index 0000000000..5cd4e6cd33 --- /dev/null +++ b/st2tests/st2tests/fixtures/conf/st2.tests.api.info_log_level.conf @@ -0,0 +1,99 @@ +# Config file used by integration tests + +[database] +db_name = st2-test + +[api] +# Host and port to bind the API server. +host = 127.0.0.1 +port = 9101 +logging = st2tests/st2tests/fixtures/conf/logging.api.info.conf +mask_secrets = False +# allow_origin is required for handling CORS in st2 web UI. +# allow_origin = http://myhost1.example.com:3000,http://myhost2.example.com:3000 + +[sensorcontainer] +logging = st2tests/conf/logging.sensorcontainer.conf +sensor_node_name = sensornode1 +partition_provider = name:default + +[rulesengine] +logging = st2reactor/conf/logging.rulesengine.conf + +[timersengine] +logging = st2reactor/conf/logging.timersengine.conf + +[actionrunner] +logging = st2actions/conf/logging.conf + +[auth] +host = 127.0.0.1 +port = 9100 +use_ssl = False +debug = False +enable = False +logging = st2tests/conf/logging.auth.conf + +mode = standalone +backend = flat_file +backend_kwargs = {"file_path": "st2auth/conf/htpasswd_dev"} + +# Base URL to the API endpoint excluding the version (e.g. http://myhost.net:9101/) +api_url = http://127.0.0.1:9101/ + +[system] +debug = False +# This way integration tests can write to this directory +base_path = /tmp + +[garbagecollector] +logging = st2reactor/conf/logging.garbagecollector.conf + +action_executions_ttl = 20 +action_executions_output_ttl = 10 +trigger_instances_ttl = 20 +purge_inquiries = True + +collection_interval = 1 +sleep_delay = 0.1 + +[content] +system_packs_base_path = +packs_base_paths = st2tests/st2tests/fixtures/packs/ + +[syslog] +host = 127.0.0.1 +port = 514 +facility = local7 +protocol = udp + +[webui] +# webui_base_url = https://mywebhost.domain + +[log] +excludes = requests,paramiko +redirect_stderr = False +mask_secrets = False + +[system_user] +user = stanley +ssh_key_file = /home/vagrant/.ssh/stanley_rsa + +[messaging] +url = amqp://guest:guest@127.0.0.1:5672/ + +[ssh_runner] +remote_dir = /tmp + +[resultstracker] +logging = st2actions/conf/logging.resultstracker.conf +query_interval = 0.1 + +[notifier] +logging = st2actions/conf/logging.notifier.conf + +[exporter] +logging = st2exporter/conf/logging.exporter.conf + +[mistral] +jitter_interval = 0 diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true.conf b/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true.conf new file mode 100644 index 0000000000..3317c11abf --- /dev/null +++ b/st2tests/st2tests/fixtures/conf/st2.tests.api.system_debug_true.conf @@ -0,0 +1,99 @@ +# Config file used by integration tests + +[database] +db_name = st2-test + +[api] +# Host and port to bind the API server. +host = 127.0.0.1 +port = 9101 +logging = st2tests/st2tests/fixtures/conf/logging.api.info.conf +mask_secrets = False +# allow_origin is required for handling CORS in st2 web UI. +# allow_origin = http://myhost1.example.com:3000,http://myhost2.example.com:3000 + +[sensorcontainer] +logging = st2tests/conf/logging.sensorcontainer.conf +sensor_node_name = sensornode1 +partition_provider = name:default + +[rulesengine] +logging = st2reactor/conf/logging.rulesengine.conf + +[timersengine] +logging = st2reactor/conf/logging.timersengine.conf + +[actionrunner] +logging = st2actions/conf/logging.conf + +[auth] +host = 127.0.0.1 +port = 9100 +use_ssl = False +debug = False +enable = False +logging = st2tests/conf/logging.auth.conf + +mode = standalone +backend = flat_file +backend_kwargs = {"file_path": "st2auth/conf/htpasswd_dev"} + +# Base URL to the API endpoint excluding the version (e.g. http://myhost.net:9101/) +api_url = http://127.0.0.1:9101/ + +[system] +debug = True +# This way integration tests can write to this directory +base_path = /tmp + +[garbagecollector] +logging = st2reactor/conf/logging.garbagecollector.conf + +action_executions_ttl = 20 +action_executions_output_ttl = 10 +trigger_instances_ttl = 20 +purge_inquiries = True + +collection_interval = 1 +sleep_delay = 0.1 + +[content] +system_packs_base_path = +packs_base_paths = st2tests/st2tests/fixtures/packs/ + +[syslog] +host = 127.0.0.1 +port = 514 +facility = local7 +protocol = udp + +[webui] +# webui_base_url = https://mywebhost.domain + +[log] +excludes = requests,paramiko +redirect_stderr = False +mask_secrets = False + +[system_user] +user = stanley +ssh_key_file = /home/vagrant/.ssh/stanley_rsa + +[messaging] +url = amqp://guest:guest@127.0.0.1:5672/ + +[ssh_runner] +remote_dir = /tmp + +[resultstracker] +logging = st2actions/conf/logging.resultstracker.conf +query_interval = 0.1 + +[notifier] +logging = st2actions/conf/logging.notifier.conf + +[exporter] +logging = st2exporter/conf/logging.exporter.conf + +[mistral] +jitter_interval = 0 diff --git a/st2tests/st2tests/fixtures/conf/st2.tests.conf b/st2tests/st2tests/fixtures/conf/st2.tests.conf new file mode 100644 index 0000000000..bb97039f03 --- /dev/null +++ b/st2tests/st2tests/fixtures/conf/st2.tests.conf @@ -0,0 +1,100 @@ +# Config file used by integration tests + +[database] +db_name = st2-test + +[api] +# Host and port to bind the API server. +host = 127.0.0.1 +port = 9101 +logging = st2tests/conf/logging.api.conf +mask_secrets = False +# allow_origin is required for handling CORS in st2 web UI. +# allow_origin = http://myhost1.example.com:3000,http://myhost2.example.com:3000 + +[sensorcontainer] +logging = st2tests/conf/logging.sensorcontainer.conf +sensor_node_name = sensornode1 +partition_provider = name:default + +[rulesengine] +logging = st2reactor/conf/logging.rulesengine.conf + +[timersengine] +logging = st2reactor/conf/logging.timersengine.conf + +[actionrunner] +logging = st2actions/conf/logging.conf + +[auth] +host = 127.0.0.1 +port = 9100 +use_ssl = False +debug = False +enable = False +logging = st2tests/conf/logging.auth.conf + +mode = standalone +backend = flat_file +backend_kwargs = {"file_path": "st2auth/conf/htpasswd_dev"} + +# Base URL to the API endpoint excluding the version (e.g. http://myhost.net:9101/) +api_url = http://127.0.0.1:9101/ + +[system] +debug = False + +# This way integration tests can write to this directory +base_path = /tmp + +[garbagecollector] +logging = st2reactor/conf/logging.garbagecollector.conf + +action_executions_ttl = 20 +action_executions_output_ttl = 10 +trigger_instances_ttl = 20 +purge_inquiries = True + +collection_interval = 1 +sleep_delay = 0.1 + +[content] +system_packs_base_path = +packs_base_paths = st2tests/st2tests/fixtures/packs/ + +[syslog] +host = 127.0.0.1 +port = 514 +facility = local7 +protocol = udp + +[webui] +# webui_base_url = https://mywebhost.domain + +[log] +excludes = requests,paramiko +redirect_stderr = False +mask_secrets = False + +[system_user] +user = stanley +ssh_key_file = /home/vagrant/.ssh/stanley_rsa + +[messaging] +url = amqp://guest:guest@127.0.0.1:5672/ + +[ssh_runner] +remote_dir = /tmp + +[resultstracker] +logging = st2actions/conf/logging.resultstracker.conf +query_interval = 0.1 + +[notifier] +logging = st2actions/conf/logging.notifier.conf + +[exporter] +logging = st2exporter/conf/logging.exporter.conf + +[mistral] +jitter_interval = 0 From dbfc25d3452f60bf2574b0c8ac42fd2a31dfdec5 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Wed, 6 Feb 2019 13:48:54 +0100 Subject: [PATCH 052/105] Use longer sleep to avoid false negatives. --- .../integration/test_service_setup_log_level_filtering.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/st2common/tests/integration/test_service_setup_log_level_filtering.py b/st2common/tests/integration/test_service_setup_log_level_filtering.py index 077d4a9f38..1e177dec45 100644 --- a/st2common/tests/integration/test_service_setup_log_level_filtering.py +++ b/st2common/tests/integration/test_service_setup_log_level_filtering.py @@ -76,7 +76,7 @@ def test_audit_log_level_is_filtered_if_log_level_is_not_debug_or_audit(self): self.add_process(process=process) # Give it some time to start up - eventlet.sleep(3) + eventlet.sleep(5) process.send_signal(signal.SIGKILL) # First 3 log lines are debug messages about the environment which are always logged @@ -91,7 +91,7 @@ def test_audit_log_level_is_filtered_if_log_level_is_not_debug_or_audit(self): self.add_process(process=process) # Give it some time to start up - eventlet.sleep(3) + eventlet.sleep(5) process.send_signal(signal.SIGKILL) # First 3 log lines are debug messages about the environment which are always logged @@ -106,7 +106,7 @@ def test_audit_log_level_is_filtered_if_log_level_is_not_debug_or_audit(self): self.add_process(process=process) # Give it some time to start up - eventlet.sleep(3) + eventlet.sleep(5) process.send_signal(signal.SIGKILL) # First 3 log lines are debug messages about the environment which are always logged From ffd5fa05202a8d5a8f47854a3adb7794bb41149b Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Wed, 6 Feb 2019 13:51:55 +0100 Subject: [PATCH 053/105] Fix Python 3 compatibility. --- .../integration/test_service_setup_log_level_filtering.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/st2common/tests/integration/test_service_setup_log_level_filtering.py b/st2common/tests/integration/test_service_setup_log_level_filtering.py index 1e177dec45..432834d486 100644 --- a/st2common/tests/integration/test_service_setup_log_level_filtering.py +++ b/st2common/tests/integration/test_service_setup_log_level_filtering.py @@ -65,7 +65,7 @@ def test_audit_log_level_is_filtered_if_log_level_is_not_debug_or_audit(self): process.send_signal(signal.SIGKILL) # First 3 log lines are debug messages about the environment which are always logged - stdout = '\n'.join(process.stdout.read().split('\n')[3:]) + stdout = '\n'.join(process.stdout.read().decode('utf-8').split('\n')[3:]) self.assertTrue('INFO [-]' in stdout) self.assertTrue('DEBUG [-]' not in stdout) @@ -80,7 +80,7 @@ def test_audit_log_level_is_filtered_if_log_level_is_not_debug_or_audit(self): process.send_signal(signal.SIGKILL) # First 3 log lines are debug messages about the environment which are always logged - stdout = '\n'.join(process.stdout.read().split('\n')[3:]) + stdout = '\n'.join(process.stdout.read().decode('utf-8').split('\n')[3:]) self.assertTrue('INFO [-]' in stdout) self.assertTrue('DEBUG [-]' in stdout) @@ -95,7 +95,7 @@ def test_audit_log_level_is_filtered_if_log_level_is_not_debug_or_audit(self): process.send_signal(signal.SIGKILL) # First 3 log lines are debug messages about the environment which are always logged - stdout = '\n'.join(process.stdout.read().split('\n')[3:]) + stdout = '\n'.join(process.stdout.read().decode('utf-8').split('\n')[3:]) self.assertTrue('INFO [-]' not in stdout) self.assertTrue('DEBUG [-]' not in stdout) @@ -110,7 +110,7 @@ def test_audit_log_level_is_filtered_if_log_level_is_not_debug_or_audit(self): process.send_signal(signal.SIGKILL) # First 3 log lines are debug messages about the environment which are always logged - stdout = '\n'.join(process.stdout.read().split('\n')[3:]) + stdout = '\n'.join(process.stdout.read().decode('utf-8').split('\n')[3:]) self.assertTrue('INFO [-]' in stdout) self.assertTrue('DEBUG [-]' in stdout) From 1c2c9e8574022339143c79341d8a0116953303cd Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Fri, 8 Feb 2019 10:20:51 +0100 Subject: [PATCH 054/105] Fix accidental regression in pylint Makefile check. It looks like a couple of lines were accidentely removed in dc2617040d637fb4857b16ac1c40d9ef60c71830 and since then we didn't perform any pylint checks on StackStorm components and runners code. --- Makefile | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/Makefile b/Makefile index 9a769aca53..61c758f146 100644 --- a/Makefile +++ b/Makefile @@ -175,6 +175,19 @@ configgen: requirements .configgen @echo "================== pylint ====================" @echo # Lint st2 components + @for component in $(COMPONENTS); do\ + echo "==========================================================="; \ + echo "Running pylint on" $$component; \ + echo "==========================================================="; \ + $(VIRTUALENV_DIR)/bin/pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models --load-plugins=pylint_plugins.db_models $$component/$$component || exit 1; \ + done + # Lint runner modules and packages + @for component in $(COMPONENTS_RUNNERS); do\ + echo "==========================================================="; \ + echo "Running pylint on" $$component; \ + echo "==========================================================="; \ + $(VIRTUALENV_DIR)/bin/pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models --load-plugins=pylint_plugins.db_models $$component/*.py || exit 1; \ + done # Lint Python pack management actions . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models contrib/packs/actions/*.py || exit 1; . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models contrib/packs/actions/*/*.py || exit 1; From cc1c23b4ab621e2f87cc0cc5e8072ddda63386ff Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Sun, 10 Feb 2019 11:51:14 +0100 Subject: [PATCH 055/105] Fix syntax so PYTHONPATH is set correctly. --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 61c758f146..70085b4072 100644 --- a/Makefile +++ b/Makefile @@ -179,14 +179,14 @@ configgen: requirements .configgen echo "==========================================================="; \ echo "Running pylint on" $$component; \ echo "==========================================================="; \ - $(VIRTUALENV_DIR)/bin/pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models --load-plugins=pylint_plugins.db_models $$component/$$component || exit 1; \ + . $(VIRTUALENV_DIR)/bin/activate ; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models --load-plugins=pylint_plugins.db_models $$component/$$component || exit 1; \ done # Lint runner modules and packages @for component in $(COMPONENTS_RUNNERS); do\ echo "==========================================================="; \ echo "Running pylint on" $$component; \ echo "==========================================================="; \ - $(VIRTUALENV_DIR)/bin/pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models --load-plugins=pylint_plugins.db_models $$component/*.py || exit 1; \ + . $(VIRTUALENV_DIR)/bin/activate ; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models --load-plugins=pylint_plugins.db_models $$component/*.py || exit 1; \ done # Lint Python pack management actions . $(VIRTUALENV_DIR)/bin/activate; pylint -j $(PYLINT_CONCURRENCY) -E --rcfile=./lint-configs/python/.pylintrc --load-plugins=pylint_plugins.api_models contrib/packs/actions/*.py || exit 1; From ddcf5bdc925402f88f73392d946d6b05de517c04 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Thu, 7 Feb 2019 19:01:05 +0100 Subject: [PATCH 056/105] Fix invalid / broken comparison. --- st2common/st2common/models/db/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/st2common/st2common/models/db/__init__.py b/st2common/st2common/models/db/__init__.py index 2daffd4697..04e0b3127f 100644 --- a/st2common/st2common/models/db/__init__.py +++ b/st2common/st2common/models/db/__init__.py @@ -306,11 +306,11 @@ def _get_ssl_kwargs(ssl=False, ssl_keyfile=None, ssl_certfile=None, ssl_cert_req ssl_kwargs['ssl'] = True ssl_kwargs['ssl_certfile'] = ssl_certfile if ssl_cert_reqs: - if ssl_cert_reqs is 'none': + if ssl_cert_reqs == 'none': ssl_cert_reqs = ssl_lib.CERT_NONE - elif ssl_cert_reqs is 'optional': + elif ssl_cert_reqs == 'optional': ssl_cert_reqs = ssl_lib.CERT_OPTIONAL - elif ssl_cert_reqs is 'required': + elif ssl_cert_reqs == 'required': ssl_cert_reqs = ssl_lib.CERT_REQUIRED ssl_kwargs['ssl_cert_reqs'] = ssl_cert_reqs if ssl_ca_certs: From 0ecb2267fc987fe367501e8a9678cded6e77d15e Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Fri, 8 Feb 2019 09:42:16 +0100 Subject: [PATCH 057/105] Add support for client side certificate authentication and verifying server certificate using provided CA bundle for message bus (RabbitMQ) connections. Option names are consistent with the same option names for MongoDB. Update affected code so connection and URLs are only retrieved in a single place. --- st2actions/st2actions/notifier/notifier.py | 3 +- .../resultstracker/resultstracker.py | 4 +- st2actions/st2actions/scheduler/entrypoint.py | 3 +- st2actions/st2actions/worker.py | 4 +- .../integration/test_action_state_consumer.py | 4 +- st2common/st2common/config.py | 23 ++++- st2common/st2common/persistence/execution.py | 7 +- .../st2common/persistence/executionstate.py | 9 +- st2common/st2common/persistence/liveaction.py | 9 +- st2common/st2common/persistence/sensor.py | 9 +- st2common/st2common/persistence/trigger.py | 11 ++- st2common/st2common/persistence/workflow.py | 4 +- .../st2common/services/sensor_watcher.py | 3 +- .../st2common/services/triggerwatcher.py | 4 +- st2common/st2common/stream/listener.py | 5 +- .../transport/actionexecutionstate.py | 3 +- st2common/st2common/transport/announcement.py | 10 +- .../st2common/transport/bootstrap_utils.py | 4 +- st2common/st2common/transport/execution.py | 8 +- st2common/st2common/transport/liveaction.py | 7 +- st2common/st2common/transport/publishers.py | 31 +++++-- st2common/st2common/transport/reactor.py | 13 +-- st2common/st2common/transport/utils.py | 92 ++++++++++++++++++- st2common/tests/unit/test_state_publisher.py | 10 +- st2exporter/st2exporter/worker.py | 3 +- st2reactor/st2reactor/rules/worker.py | 4 +- tools/migrate_messaging_setup.py | 4 +- tools/queue_consumer.py | 6 +- tools/queue_producer.py | 7 +- 29 files changed, 220 insertions(+), 84 deletions(-) diff --git a/st2actions/st2actions/notifier/notifier.py b/st2actions/st2actions/notifier/notifier.py index 16128622ee..b626bb5d13 100644 --- a/st2actions/st2actions/notifier/notifier.py +++ b/st2actions/st2actions/notifier/notifier.py @@ -18,7 +18,6 @@ from datetime import datetime import json -from kombu import Connection from oslo_config import cfg from st2common import log as logging @@ -284,6 +283,6 @@ def _get_runner_ref(self, action_ref): def get_notifier(): - with Connection(transport_utils.get_messaging_urls()) as conn: + with transport_utils.get_connection() as conn: return Notifier(conn, [NOTIFIER_ACTIONUPDATE_WORK_QUEUE], trigger_dispatcher=TriggerDispatcher(LOG)) diff --git a/st2actions/st2actions/resultstracker/resultstracker.py b/st2actions/st2actions/resultstracker/resultstracker.py index 954b1da42e..62f6e746ff 100644 --- a/st2actions/st2actions/resultstracker/resultstracker.py +++ b/st2actions/st2actions/resultstracker/resultstracker.py @@ -14,11 +14,11 @@ # limitations under the License. from __future__ import absolute_import + import eventlet import six from collections import defaultdict -from kombu import Connection from st2common.query.base import QueryContext from st2common import log as logging @@ -111,5 +111,5 @@ def get_querier(self, query_module_name): def get_tracker(): - with Connection(transport_utils.get_messaging_urls()) as conn: + with transport_utils.get_connection() as conn: return ResultsTracker(conn, [RESULTSTRACKER_ACTIONSTATE_WORK_QUEUE]) diff --git a/st2actions/st2actions/scheduler/entrypoint.py b/st2actions/st2actions/scheduler/entrypoint.py index 752d6849ea..811a1f7d80 100644 --- a/st2actions/st2actions/scheduler/entrypoint.py +++ b/st2actions/st2actions/scheduler/entrypoint.py @@ -14,7 +14,6 @@ # limitations under the License. from __future__ import absolute_import -from kombu import Connection from st2common import log as logging from st2common.util import date @@ -105,5 +104,5 @@ def _create_execution_queue_item_db_from_liveaction(self, liveaction, delay=None def get_scheduler_entrypoint(): - with Connection(transport_utils.get_messaging_urls()) as conn: + with transport_utils.get_connection() as conn: return SchedulerEntrypoint(conn, [ACTIONSCHEDULER_REQUEST_QUEUE]) diff --git a/st2actions/st2actions/worker.py b/st2actions/st2actions/worker.py index fdb39f0ab5..e36eb0489a 100644 --- a/st2actions/st2actions/worker.py +++ b/st2actions/st2actions/worker.py @@ -17,8 +17,6 @@ import sys import traceback -from kombu import Connection - from st2actions.container.base import RunnerContainer from st2common import log as logging from st2common.constants import action as action_constants @@ -250,5 +248,5 @@ def _resume_action(self, liveaction_db): def get_worker(): - with Connection(transport_utils.get_messaging_urls()) as conn: + with transport_utils.get_connection() as conn: return ActionExecutionDispatcher(conn, ACTIONRUNNER_QUEUES) diff --git a/st2actions/tests/integration/test_action_state_consumer.py b/st2actions/tests/integration/test_action_state_consumer.py index 668ac5c339..3061677ef0 100644 --- a/st2actions/tests/integration/test_action_state_consumer.py +++ b/st2actions/tests/integration/test_action_state_consumer.py @@ -20,8 +20,6 @@ import mock -from kombu import Connection - from st2common.transport.queues import RESULTSTRACKER_ACTIONSTATE_WORK_QUEUE from st2actions.resultstracker.resultstracker import ResultsTracker from st2common.models.db.executionstate import ActionExecutionStateDB @@ -63,7 +61,7 @@ def setUpClass(cls): @mock.patch.object(TestQuerier, 'query', mock.MagicMock(return_value=(False, {}))) def test_process_message(self): - with Connection(transport_utils.get_messaging_urls()) as conn: + with transport_utils.get_connection() as conn: tracker = ResultsTracker(conn, [RESULTSTRACKER_ACTIONSTATE_WORK_QUEUE]) tracker._bootstrap() state = ActionStateConsumerTests.get_state( diff --git a/st2common/st2common/config.py b/st2common/st2common/config.py index 100f49faf4..6f69db6796 100644 --- a/st2common/st2common/config.py +++ b/st2common/st2common/config.py @@ -226,7 +226,28 @@ def register_opts(ignore_errors=False): help='How many times should we retry connection before failing.'), cfg.IntOpt( 'connection_retry_wait', default=10000, - help='How long should we wait between connection retries.') + help='How long should we wait between connection retries.'), + cfg.BoolOpt( + 'ssl', default=False, + help='Use SSL / TLS to connection to the messaging server. Same as ' + 'appending "?ssl=true" at the end of the connection URL string.'), + cfg.StrOpt( + 'ssl_keyfile', default=None, + help='Private keyfile used to identify the local connection against RabbitMQ.'), + cfg.StrOpt( + 'ssl_certfile', default=None, + help='Certificate file used to identify the local connection (client).'), + cfg.StrOpt( + 'ssl_cert_reqs', default=None, choices='none, optional, required', + help='Specifies whether a certificate is required from the other side of the ' + 'connection, and whether it will be validated if provided.'), + cfg.StrOpt( + 'ssl_ca_certs', default=None, + help='ca_certs file contains a set of concatenated CA certificates, which are ' + 'used to validate certificates passed from RabbitMQ.'), + cfg.StrOpt( + 'login_method', default=None, + help='Login method to use (AMQPLAIN, PLAIN, EXTERNAL, etc.).') ] do_register_opts(messaging_opts, 'messaging', ignore_errors) diff --git a/st2common/st2common/persistence/execution.py b/st2common/st2common/persistence/execution.py index 4df940566f..83d5a32288 100644 --- a/st2common/st2common/persistence/execution.py +++ b/st2common/st2common/persistence/execution.py @@ -19,7 +19,6 @@ from st2common.models.db.execution import ActionExecutionDB from st2common.models.db.execution import ActionExecutionOutputDB from st2common.persistence.base import Access -from st2common.transport import utils as transport_utils __all__ = [ 'ActionExecution', @@ -38,8 +37,7 @@ def _get_impl(cls): @classmethod def _get_publisher(cls): if not cls.publisher: - cls.publisher = transport.execution.ActionExecutionPublisher( - urls=transport_utils.get_messaging_urls()) + cls.publisher = transport.execution.ActionExecutionPublisher() return cls.publisher @classmethod @@ -57,8 +55,7 @@ def _get_impl(cls): @classmethod def _get_publisher(cls): if not cls.publisher: - cls.publisher = transport.execution.ActionExecutionOutputPublisher( - urls=transport_utils.get_messaging_urls()) + cls.publisher = transport.execution.ActionExecutionOutputPublisher() return cls.publisher @classmethod diff --git a/st2common/st2common/persistence/executionstate.py b/st2common/st2common/persistence/executionstate.py index 5f087e4647..f636a0008d 100644 --- a/st2common/st2common/persistence/executionstate.py +++ b/st2common/st2common/persistence/executionstate.py @@ -14,10 +14,14 @@ # limitations under the License. from __future__ import absolute_import + from st2common import transport from st2common.models.db.executionstate import actionexecstate_access from st2common.persistence import base as persistence -from st2common.transport import utils as transport_utils + +__all__ = [ + 'ActionExecutionState' +] class ActionExecutionState(persistence.Access): @@ -31,6 +35,5 @@ def _get_impl(cls): @classmethod def _get_publisher(cls): if not cls.publisher: - cls.publisher = transport.actionexecutionstate.ActionExecutionStatePublisher( - urls=transport_utils.get_messaging_urls()) + cls.publisher = transport.actionexecutionstate.ActionExecutionStatePublisher() return cls.publisher diff --git a/st2common/st2common/persistence/liveaction.py b/st2common/st2common/persistence/liveaction.py index 88ddcaad59..2eb6015e65 100644 --- a/st2common/st2common/persistence/liveaction.py +++ b/st2common/st2common/persistence/liveaction.py @@ -14,10 +14,14 @@ # limitations under the License. from __future__ import absolute_import + from st2common import transport from st2common.models.db.liveaction import liveaction_access from st2common.persistence import base as persistence -from st2common.transport import utils as transport_utils + +__all__ = [ + 'LiveAction' +] class LiveAction(persistence.StatusBasedResource): @@ -31,8 +35,7 @@ def _get_impl(cls): @classmethod def _get_publisher(cls): if not cls.publisher: - cls.publisher = transport.liveaction.LiveActionPublisher( - urls=transport_utils.get_messaging_urls()) + cls.publisher = transport.liveaction.LiveActionPublisher() return cls.publisher @classmethod diff --git a/st2common/st2common/persistence/sensor.py b/st2common/st2common/persistence/sensor.py index c7547bcf61..e941c2bca5 100644 --- a/st2common/st2common/persistence/sensor.py +++ b/st2common/st2common/persistence/sensor.py @@ -14,10 +14,14 @@ # limitations under the License. from __future__ import absolute_import + from st2common import transport from st2common.models.db.sensor import sensor_type_access from st2common.persistence.base import ContentPackResource -from st2common.transport import utils as transport_utils + +__all__ = [ + 'SensorType' +] class SensorType(ContentPackResource): @@ -31,6 +35,5 @@ def _get_impl(cls): @classmethod def _get_publisher(cls): if not cls.publisher: - cls.publisher = transport.reactor.SensorCUDPublisher( - urls=transport_utils.get_messaging_urls()) + cls.publisher = transport.reactor.SensorCUDPublisher() return cls.publisher diff --git a/st2common/st2common/persistence/trigger.py b/st2common/st2common/persistence/trigger.py index bbe207c263..01787ce38d 100644 --- a/st2common/st2common/persistence/trigger.py +++ b/st2common/st2common/persistence/trigger.py @@ -14,12 +14,18 @@ # limitations under the License. from __future__ import absolute_import + from st2common import log as logging from st2common import transport from st2common.exceptions.db import StackStormDBObjectNotFoundError from st2common.models.db.trigger import triggertype_access, trigger_access, triggerinstance_access from st2common.persistence.base import (Access, ContentPackResource) -from st2common.transport import utils as transport_utils + +__all__ = [ + 'TriggerType', + 'Trigger', + 'TriggerInstance' +] LOG = logging.getLogger(__name__) @@ -43,8 +49,7 @@ def _get_impl(cls): @classmethod def _get_publisher(cls): if not cls.publisher: - cls.publisher = transport.reactor.TriggerCUDPublisher( - urls=transport_utils.get_messaging_urls()) + cls.publisher = transport.reactor.TriggerCUDPublisher() return cls.publisher @classmethod diff --git a/st2common/st2common/persistence/workflow.py b/st2common/st2common/persistence/workflow.py index 3063dd4a9d..933460b9aa 100644 --- a/st2common/st2common/persistence/workflow.py +++ b/st2common/st2common/persistence/workflow.py @@ -19,7 +19,6 @@ from st2common.models import db from st2common.models.db import workflow as wf_db_models from st2common.persistence import base as persistence -from st2common.transport import utils as transport_utils __all__ = [ @@ -39,8 +38,7 @@ def _get_impl(cls): @classmethod def _get_publisher(cls): if not cls.publisher: - cls.publisher = transport.workflow.WorkflowExecutionPublisher( - urls=transport_utils.get_messaging_urls()) + cls.publisher = transport.workflow.WorkflowExecutionPublisher() return cls.publisher diff --git a/st2common/st2common/services/sensor_watcher.py b/st2common/st2common/services/sensor_watcher.py index 41bcecc022..27892f055e 100644 --- a/st2common/st2common/services/sensor_watcher.py +++ b/st2common/st2common/services/sensor_watcher.py @@ -20,7 +20,6 @@ from __future__ import absolute_import import eventlet from kombu.mixins import ConsumerMixin -from kombu import Connection from st2common import log as logging from st2common.transport import reactor, publishers @@ -89,7 +88,7 @@ def process_task(self, body, message): def start(self): try: - self.connection = Connection(transport_utils.get_messaging_urls()) + self.connection = transport_utils.get_connection() self._updates_thread = eventlet.spawn(self.run) except: LOG.exception('Failed to start sensor_watcher.') diff --git a/st2common/st2common/services/triggerwatcher.py b/st2common/st2common/services/triggerwatcher.py index 787f85c489..2cbe5839fc 100644 --- a/st2common/st2common/services/triggerwatcher.py +++ b/st2common/st2common/services/triggerwatcher.py @@ -15,9 +15,9 @@ # pylint: disable=assignment-from-none from __future__ import absolute_import + import eventlet from kombu.mixins import ConsumerMixin -from kombu import Connection from st2common import log as logging from st2common.persistence.trigger import Trigger @@ -108,7 +108,7 @@ def process_task(self, body, message): def start(self): try: - self.connection = Connection(transport_utils.get_messaging_urls()) + self.connection = transport_utils.get_connection() self._updates_thread = eventlet.spawn(self.run) self._load_thread = eventlet.spawn(self._load_triggers_from_db) except: diff --git a/st2common/st2common/stream/listener.py b/st2common/st2common/stream/listener.py index 73b4962524..e5b5529f24 100644 --- a/st2common/st2common/stream/listener.py +++ b/st2common/st2common/stream/listener.py @@ -18,7 +18,6 @@ import eventlet -from kombu import Connection from kombu.mixins import ConsumerMixin from oslo_config import cfg @@ -233,13 +232,13 @@ def get_listener(name): if name == 'stream': if not _stream_listener: - with Connection(transport_utils.get_messaging_urls()) as conn: + with transport_utils.get_connection() as conn: _stream_listener = StreamListener(conn) eventlet.spawn_n(listen, _stream_listener) return _stream_listener elif name == 'execution_output': if not _execution_output_listener: - with Connection(transport_utils.get_messaging_urls()) as conn: + with transport_utils.get_connection() as conn: _execution_output_listener = ExecutionOutputListener(conn) eventlet.spawn_n(listen, _execution_output_listener) return _execution_output_listener diff --git a/st2common/st2common/transport/actionexecutionstate.py b/st2common/st2common/transport/actionexecutionstate.py index a46d202f4f..39f2e653ae 100644 --- a/st2common/st2common/transport/actionexecutionstate.py +++ b/st2common/st2common/transport/actionexecutionstate.py @@ -26,7 +26,8 @@ class ActionExecutionStatePublisher(publishers.CUDPublisher): def __init__(self, urls): - super(ActionExecutionStatePublisher, self).__init__(urls, ACTIONEXECUTIONSTATE_XCHG) + super(ActionExecutionStatePublisher, self).__init__(exchange=ACTIONEXECUTIONSTATE_XCHG, + urls=urls) def get_queue(name, routing_key): diff --git a/st2common/st2common/transport/announcement.py b/st2common/st2common/transport/announcement.py index 72504806af..4559afdab6 100644 --- a/st2common/st2common/transport/announcement.py +++ b/st2common/st2common/transport/announcement.py @@ -20,16 +20,20 @@ from st2common.constants.trace import TRACE_CONTEXT from st2common.models.api.trace import TraceContext from st2common.transport import publishers -from st2common.transport import utils as transport_utils LOG = logging.getLogger(__name__) # Exchange for Announcements ANNOUNCEMENT_XCHG = Exchange('st2.announcement', type='topic') +__all__ = [ + 'AnnouncementPublisher', + 'AnnouncementDispatcher' +] + class AnnouncementPublisher(object): - def __init__(self, urls): + def __init__(self, urls=None): self._publisher = publishers.PoolPublisher(urls=urls) def publish(self, payload, routing_key): @@ -42,7 +46,7 @@ class AnnouncementDispatcher(object): """ def __init__(self, logger=LOG): - self._publisher = AnnouncementPublisher(urls=transport_utils.get_messaging_urls()) + self._publisher = AnnouncementPublisher() self._logger = logger def dispatch(self, routing_key, payload, trace_context=None): diff --git a/st2common/st2common/transport/bootstrap_utils.py b/st2common/st2common/transport/bootstrap_utils.py index 97acc622d8..eda88f2146 100644 --- a/st2common/st2common/transport/bootstrap_utils.py +++ b/st2common/st2common/transport/bootstrap_utils.py @@ -20,7 +20,6 @@ import six import retrying from oslo_config import cfg -from kombu import Connection from kombu.serialization import register from kombu.serialization import pickle from kombu.serialization import pickle_protocol @@ -141,7 +140,8 @@ def _do_predeclare_queue(channel, queue): def register_exchanges(): LOG.debug('Registering exchanges...') connection_urls = transport_utils.get_messaging_urls() - with Connection(connection_urls) as conn: + + with transport_utils.get_connection() as conn: # Use ConnectionRetryWrapper to deal with rmq clustering etc. retry_wrapper = ConnectionRetryWrapper(cluster_size=len(connection_urls), logger=LOG) diff --git a/st2common/st2common/transport/execution.py b/st2common/st2common/transport/execution.py index 0256573e93..660f747a8f 100644 --- a/st2common/st2common/transport/execution.py +++ b/st2common/st2common/transport/execution.py @@ -32,13 +32,15 @@ class ActionExecutionPublisher(publishers.CUDPublisher): - def __init__(self, urls): - super(ActionExecutionPublisher, self).__init__(urls, EXECUTION_XCHG) + def __init__(self, urls=None): + super(ActionExecutionPublisher, self).__init__(exchange=EXECUTION_XCHG, + urls=urls) class ActionExecutionOutputPublisher(publishers.CUDPublisher): def __init__(self, urls): - super(ActionExecutionOutputPublisher, self).__init__(urls, EXECUTION_OUTPUT_XCHG) + super(ActionExecutionOutputPublisher, self).__init__(exchange=EXECUTION_OUTPUT_XCHG, + urls=urls) def get_queue(name=None, routing_key=None, exclusive=False, auto_delete=False): diff --git a/st2common/st2common/transport/liveaction.py b/st2common/st2common/transport/liveaction.py index 996d57d5b4..7775034dad 100644 --- a/st2common/st2common/transport/liveaction.py +++ b/st2common/st2common/transport/liveaction.py @@ -26,9 +26,10 @@ class LiveActionPublisher(publishers.CUDPublisher, publishers.StatePublisherMixin): - def __init__(self, urls): - publishers.CUDPublisher.__init__(self, urls, LIVEACTION_XCHG) - publishers.StatePublisherMixin.__init__(self, urls, LIVEACTION_STATUS_MGMT_XCHG) + def __init__(self, urls=None): + publishers.CUDPublisher.__init__(self, exchange=LIVEACTION_XCHG, urls=urls) + publishers.StatePublisherMixin.__init__(self, exchange=LIVEACTION_STATUS_MGMT_XCHG, + urls=urls) def get_queue(name, routing_key): diff --git a/st2common/st2common/transport/publishers.py b/st2common/st2common/transport/publishers.py index 76f309bc55..2a2c785b51 100644 --- a/st2common/st2common/transport/publishers.py +++ b/st2common/st2common/transport/publishers.py @@ -14,15 +14,23 @@ # limitations under the License. from __future__ import absolute_import + import copy -from kombu import Connection from kombu.messaging import Producer from st2common import log as logging from st2common.metrics.base import Timer +from st2common.transport import utils as transport_utils from st2common.transport.connection_retry_wrapper import ConnectionRetryWrapper +__all__ = [ + 'PoolPublisher', + 'SharedPoolPublishers', + 'CUDPublisher', + 'StatePublisherMixin' +] + ANY_RK = '*' CREATE_RK = 'create' UPDATE_RK = 'update' @@ -32,8 +40,17 @@ class PoolPublisher(object): - def __init__(self, urls): - self.pool = Connection(urls, failover_strategy='round-robin').Pool(limit=10) + def __init__(self, urls=None): + """ + :param urls: Connection URLs to use. If not provided it uses a default value from the + config. + :type urls: ``list`` + """ + urls = urls or transport_utils.get_messaging_urls() + connection = transport_utils.get_connection(urls=urls, + connection_kwargs={'failover_strategy': + 'round-robin'}) + self.pool = connection.Pool(limit=10) self.cluster_size = len(urls) def errback(self, exc, interval): @@ -92,13 +109,14 @@ def get_publisher(self, urls): class CUDPublisher(object): - def __init__(self, urls, exchange): + def __init__(self, exchange, urls=None): + urls = urls or transport_utils.get_messaging_urls() self._publisher = SharedPoolPublishers().get_publisher(urls=urls) self._exchange = exchange def publish_create(self, payload): with Timer(key='amqp.publish.create'): - self._publisher.publish(payload, self._exchange, CREATE_RK) + self._publisher.publgish(payload, self._exchange, CREATE_RK) def publish_update(self, payload): with Timer(key='amqp.publish.update'): @@ -110,7 +128,8 @@ def publish_delete(self, payload): class StatePublisherMixin(object): - def __init__(self, urls, exchange): + def __init__(self, exchange, urls=None): + urls = urls or transport_utils.get_messaging_urls() self._state_publisher = SharedPoolPublishers().get_publisher(urls=urls) self._state_exchange = exchange diff --git a/st2common/st2common/transport/reactor.py b/st2common/st2common/transport/reactor.py index 65670c8ffe..dc3abfcc40 100644 --- a/st2common/st2common/transport/reactor.py +++ b/st2common/st2common/transport/reactor.py @@ -20,7 +20,6 @@ from st2common.constants.trace import TRACE_CONTEXT from st2common.models.api.trace import TraceContext from st2common.transport import publishers -from st2common.transport import utils as transport_utils __all__ = [ 'TriggerCUDPublisher', @@ -50,8 +49,9 @@ class SensorCUDPublisher(publishers.CUDPublisher): Publisher responsible for publishing Trigger model CUD events. """ - def __init__(self, urls): - super(SensorCUDPublisher, self).__init__(urls, SENSOR_CUD_XCHG) + def __init__(self, urls=None): + super(SensorCUDPublisher, self).__init__(exchange=SENSOR_CUD_XCHG, + urls=urls) class TriggerCUDPublisher(publishers.CUDPublisher): @@ -60,11 +60,12 @@ class TriggerCUDPublisher(publishers.CUDPublisher): """ def __init__(self, urls): - super(TriggerCUDPublisher, self).__init__(urls, TRIGGER_CUD_XCHG) + super(TriggerCUDPublisher, self).__init__(exchange=TRIGGER_CUD_XCHG, + urls=urls) class TriggerInstancePublisher(object): - def __init__(self, urls): + def __init__(self, urls=None): self._publisher = publishers.PoolPublisher(urls=urls) def publish_trigger(self, payload=None, routing_key=None): @@ -78,7 +79,7 @@ class TriggerDispatcher(object): """ def __init__(self, logger=LOG): - self._publisher = TriggerInstancePublisher(urls=transport_utils.get_messaging_urls()) + self._publisher = TriggerInstancePublisher() self._logger = logger def dispatch(self, trigger, payload=None, trace_context=None): diff --git a/st2common/st2common/transport/utils.py b/st2common/st2common/transport/utils.py index c416578376..f71e91cc66 100644 --- a/st2common/st2common/transport/utils.py +++ b/st2common/st2common/transport/utils.py @@ -14,13 +14,21 @@ # limitations under the License. from __future__ import absolute_import + +import ssl as ssl_lib + from oslo_config import cfg +from kombu import Connection + +from st2common import log as logging __all__ = [ + 'get_connection', + 'get_messaging_urls' ] -CONF = cfg.CONF +LOG = logging.getLogger(__name__) def get_messaging_urls(): @@ -30,6 +38,82 @@ def get_messaging_urls(): :rtype: ``list`` ''' - if CONF.messaging.cluster_urls: - return CONF.messaging.cluster_urls - return [CONF.messaging.url] + if cfg.CONF.messaging.cluster_urls: + return cfg.CONF.messaging.cluster_urls + return [cfg.CONF.messaging.url] + + +def get_connection(urls=None, connection_kwargs=None): + """ + Retrieve kombu "Conection" class instance configured with all the correct + options using values from the config and provided values. + + :param connection_kwargs: Any additional connection keyword arguments passed directly to the + Connection class constructor. + :type connection_kwargs: ``dict`` + """ + urls = urls or get_messaging_urls() + connection_kwargs = connection_kwargs or {} + + kwargs = {} + + ssl_kwargs = _get_ssl_kwargs(ssl=cfg.CONF.messaging.ssl, + ssl_keyfile=cfg.CONF.messaging.ssl_keyfile, + ssl_certfile=cfg.CONF.messaging.ssl_certfile, + ssl_cert_reqs=cfg.CONF.messaging.ssl_cert_reqs, + ssl_ca_certs=cfg.CONF.messaging.ssl_ca_certs, + login_method=cfg.CONF.messaging.login_method) + + # NOTE: "connection_kwargs" argument passed to this function has precedence over config values + if len(ssl_kwargs) == 1 and ssl_kwargs['ssl'] is True: + kwargs.update({'ssl': True}) + elif len(ssl_kwargs) >= 2: + ssl_kwargs.pop('ssl') + kwargs.update({'ssl': ssl_kwargs}) + + kwargs['login_method'] = cfg.CONF.messaging.login_method + + kwargs.update(connection_kwargs) + + # NOTE: This line contains no secret values so it's OK to log it + LOG.debug('Using SSL context for RabbitMQ connection: %s' % (ssl_kwargs)) + + connection = Connection(urls, **kwargs) + return connection + + +def _get_ssl_kwargs(ssl=False, ssl_keyfile=None, ssl_certfile=None, ssl_cert_reqs=None, + ssl_ca_certs=None, login_method=None): + """ + Return SSL keyword arguments to be used with the kombu.Connection class. + """ + ssl_kwargs = {} + + # NOTE: If "ssl" is not set to True we don't pass "ssl=False" argument to the constructor + # because user could still specify to use SSL by including "?ssl=true" query param at the + # end of the connection URL string + if ssl is True: + ssl_kwargs['ssl'] = True + + if ssl_keyfile: + ssl_kwargs['ssl'] = True + ssl_kwargs['keyfile'] = ssl_keyfile + + if ssl_certfile: + ssl_kwargs['ssl'] = True + ssl_kwargs['certfile'] = ssl_certfile + + if ssl_cert_reqs: + if ssl_cert_reqs == 'none': + ssl_cert_reqs = ssl_lib.CERT_NONE + elif ssl_cert_reqs == 'optional': + ssl_cert_reqs = ssl_lib.CERT_OPTIONAL + elif ssl_cert_reqs == 'required': + ssl_cert_reqs = ssl_lib.CERT_REQUIRED + ssl_kwargs['cert_reqs'] = ssl_cert_reqs + + if ssl_ca_certs: + ssl_kwargs['ssl'] = True + ssl_kwargs['ca_certs'] = ssl_ca_certs + + return ssl_kwargs diff --git a/st2common/tests/unit/test_state_publisher.py b/st2common/tests/unit/test_state_publisher.py index 180805a54c..f1eef1d05d 100644 --- a/st2common/tests/unit/test_state_publisher.py +++ b/st2common/tests/unit/test_state_publisher.py @@ -14,6 +14,7 @@ # limitations under the License. from __future__ import absolute_import + import kombu import mock import mongoengine as me @@ -22,7 +23,7 @@ from st2common.models.db import stormbase from st2common.persistence import base as persistence from st2common.transport import publishers -from st2common.transport import utils as transport_utils + from st2tests import DbTestCase @@ -30,8 +31,9 @@ class FakeModelPublisher(publishers.StatePublisherMixin): - def __init__(self, url): - super(FakeModelPublisher, self).__init__(url, FAKE_STATE_MGMT_XCHG) + def __init__(self, urls=None): + super(FakeModelPublisher, self).__init__(exchange=FAKE_STATE_MGMT_XCHG, + urls=urls) class FakeModelDB(stormbase.StormBaseDB): @@ -49,7 +51,7 @@ def _get_impl(cls): @classmethod def _get_publisher(cls): if not cls.publisher: - cls.publisher = FakeModelPublisher(transport_utils.get_messaging_urls()) + cls.publisher = FakeModelPublisher() return cls.publisher @classmethod diff --git a/st2exporter/st2exporter/worker.py b/st2exporter/st2exporter/worker.py index 49ba9a8ad0..d5a4dcd55b 100644 --- a/st2exporter/st2exporter/worker.py +++ b/st2exporter/st2exporter/worker.py @@ -15,7 +15,6 @@ import eventlet from six.moves import queue -from kombu import Connection from oslo_config import cfg from st2common import log as logging @@ -124,5 +123,5 @@ def _get_all_executions_from_db(self): def get_worker(): - with Connection(transport_utils.get_messaging_urls()) as conn: + with transport_utils.get_connection() as conn: return ExecutionsExporter(conn, [EXPORTER_WORK_QUEUE]) diff --git a/st2reactor/st2reactor/rules/worker.py b/st2reactor/st2reactor/rules/worker.py index 5d8a049e16..32c30cb14f 100644 --- a/st2reactor/st2reactor/rules/worker.py +++ b/st2reactor/st2reactor/rules/worker.py @@ -15,8 +15,6 @@ from __future__ import absolute_import -from kombu import Connection - from st2common import log as logging from st2common.constants.trace import TRACE_CONTEXT, TRACE_ID from st2common.constants import triggers as trigger_constants @@ -119,5 +117,5 @@ def _decompose_pre_ack_process_response(response): def get_worker(): - with Connection(transport_utils.get_messaging_urls()) as conn: + with transport_utils.get_connection() as conn: return TriggerInstanceDispatcher(conn, [RULESENGINE_WORK_QUEUE]) diff --git a/tools/migrate_messaging_setup.py b/tools/migrate_messaging_setup.py index ff2396f636..6232e70f7f 100755 --- a/tools/migrate_messaging_setup.py +++ b/tools/migrate_messaging_setup.py @@ -19,9 +19,9 @@ """ from __future__ import absolute_import + import traceback -from kombu import Connection from st2common import config from st2common.transport import reactor from st2common.transport import utils as transport_utils @@ -47,7 +47,7 @@ def migrate(self): self._cleanup_old_queues() def _cleanup_old_queues(self): - with Connection(transport_utils.get_messaging_urls()) as connection: + with transport_utils.get_connection() as connection: for q in self.OLD_QS: bound_q = q(connection.default_channel) try: diff --git a/tools/queue_consumer.py b/tools/queue_consumer.py index 5c7ef19989..91b1aad4cf 100755 --- a/tools/queue_consumer.py +++ b/tools/queue_consumer.py @@ -19,12 +19,13 @@ """ from __future__ import absolute_import + import random import argparse from pprint import pprint from kombu.mixins import ConsumerMixin -from kombu import Connection, Exchange, Queue +from kombu import Exchange, Queue from st2common import config from st2common.transport import utils as transport_utils @@ -59,7 +60,8 @@ def main(queue, exchange, routing_key='#'): queue = Queue(name=queue, exchange=exchange, routing_key=routing_key, auto_delete=True) - with Connection(transport_utils.get_messaging_urls()) as connection: + with transport_utils.get_connection() as connection: + connection.connect() watcher = QueueConsumer(connection=connection, queue=queue) watcher.run() diff --git a/tools/queue_producer.py b/tools/queue_producer.py index c9b01a47cd..9d98c9151e 100755 --- a/tools/queue_producer.py +++ b/tools/queue_producer.py @@ -19,20 +19,21 @@ """ from __future__ import absolute_import + import argparse +import eventlet from kombu import Exchange from st2common import config - -from st2common.transport import utils as transport_utils from st2common.transport.publishers import PoolPublisher def main(exchange, routing_key, payload): exchange = Exchange(exchange, type='topic') - publisher = PoolPublisher(urls=transport_utils.get_messaging_urls()) + publisher = PoolPublisher() publisher.publish(payload=payload, exchange=exchange, routing_key=routing_key) + eventlet.sleep(0.5) if __name__ == '__main__': From 13491432b4795f65528970eb513244bfff808026 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 11 Feb 2019 11:06:37 +0100 Subject: [PATCH 058/105] Fix typo. --- st2common/st2common/transport/publishers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/st2common/st2common/transport/publishers.py b/st2common/st2common/transport/publishers.py index 2a2c785b51..6ab50d98a1 100644 --- a/st2common/st2common/transport/publishers.py +++ b/st2common/st2common/transport/publishers.py @@ -116,7 +116,7 @@ def __init__(self, exchange, urls=None): def publish_create(self, payload): with Timer(key='amqp.publish.create'): - self._publisher.publgish(payload, self._exchange, CREATE_RK) + self._publisher.publish(payload, self._exchange, CREATE_RK) def publish_update(self, payload): with Timer(key='amqp.publish.update'): From da65b3dfe03bfa43b5f75da7f2680e1a55c9bd7a Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 11 Feb 2019 11:13:46 +0100 Subject: [PATCH 059/105] Add tests for "_get_ssl_kwargs" function which we were missing. --- st2common/tests/unit/test_db.py | 53 ++++++++++++++++++++++++++++++++- 1 file changed, 52 insertions(+), 1 deletion(-) diff --git a/st2common/tests/unit/test_db.py b/st2common/tests/unit/test_db.py index 856d1919d4..bd1cc93e5b 100644 --- a/st2common/tests/unit/test_db.py +++ b/st2common/tests/unit/test_db.py @@ -14,8 +14,10 @@ # limitations under the License. from __future__ import absolute_import -import jsonschema +import ssl + +import jsonschema import mock import mongoengine.connection from oslo_config import cfg @@ -79,6 +81,55 @@ def test_get_ssl_kwargs(self): 'authentication_mechanism': 'MONGODB-X509' }) + # 3. ssl_keyfile provided + ssl_kwargs = _get_ssl_kwargs(ssl_keyfile='/tmp/keyfile') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ssl_keyfile': '/tmp/keyfile', + 'ssl_match_hostname': True + }) + + # 4. ssl_certfile provided + ssl_kwargs = _get_ssl_kwargs(ssl_certfile='/tmp/certfile') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ssl_certfile': '/tmp/certfile', + 'ssl_match_hostname': True + }) + + # 5. ssl_ca_certs provided + ssl_kwargs = _get_ssl_kwargs(ssl_ca_certs='/tmp/ca_certs') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ssl_ca_certs': '/tmp/ca_certs', + 'ssl_match_hostname': True + }) + + # 6. ssl_ca_certs and ssl_cert_reqs combinations + ssl_kwargs = _get_ssl_kwargs(ssl_ca_certs='/tmp/ca_certs', ssl_cert_reqs='none') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ssl_ca_certs': '/tmp/ca_certs', + 'ssl_cert_reqs': ssl.CERT_NONE, + 'ssl_match_hostname': True + }) + + ssl_kwargs = _get_ssl_kwargs(ssl_ca_certs='/tmp/ca_certs', ssl_cert_reqs='optional') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ssl_ca_certs': '/tmp/ca_certs', + 'ssl_cert_reqs': ssl.CERT_OPTIONAL, + 'ssl_match_hostname': True + }) + + ssl_kwargs = _get_ssl_kwargs(ssl_ca_certs='/tmp/ca_certs', ssl_cert_reqs='required') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ssl_ca_certs': '/tmp/ca_certs', + 'ssl_cert_reqs': ssl.CERT_REQUIRED, + 'ssl_match_hostname': True + }) + @mock.patch('st2common.models.db.mongoengine') def test_db_setup(self, mock_mongoengine): db_setup(db_name='name', db_host='host', db_port=12345, username='username', From 76b63c7319fe4f0710695ba5d64da0757201149d Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 11 Feb 2019 11:23:16 +0100 Subject: [PATCH 060/105] Add tests for st2common.transport.utils._get_ssl_kwargs function. --- st2common/tests/unit/test_transport.py | 80 ++++++++++++++++++++++++++ 1 file changed, 80 insertions(+) create mode 100644 st2common/tests/unit/test_transport.py diff --git a/st2common/tests/unit/test_transport.py b/st2common/tests/unit/test_transport.py new file mode 100644 index 0000000000..6c217c3347 --- /dev/null +++ b/st2common/tests/unit/test_transport.py @@ -0,0 +1,80 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import ssl + +import unittest2 + +from st2common.transport.utils import _get_ssl_kwargs + +__all__ = [ + 'TransportUtilsTestCase' +] + + +class TransportUtilsTestCase(unittest2.TestCase): + def test_get_ssl_kwargs(self): + # 1. No SSL kwargs provided + ssl_kwargs = _get_ssl_kwargs() + self.assertEqual(ssl_kwargs, {}) + + # 2. ssl kwarg provided + ssl_kwargs = _get_ssl_kwargs(ssl=True) + self.assertEqual(ssl_kwargs, { + 'ssl': True + }) + + # 3. ssl_keyfile provided + ssl_kwargs = _get_ssl_kwargs(ssl_keyfile='/tmp/keyfile') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'keyfile': '/tmp/keyfile' + }) + + # 4. ssl_certfile provided + ssl_kwargs = _get_ssl_kwargs(ssl_certfile='/tmp/certfile') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'certfile': '/tmp/certfile' + }) + + # 5. ssl_ca_certs provided + ssl_kwargs = _get_ssl_kwargs(ssl_ca_certs='/tmp/ca_certs') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ca_certs': '/tmp/ca_certs' + }) + + # 6. ssl_ca_certs and ssl_cert_reqs combinations + ssl_kwargs = _get_ssl_kwargs(ssl_ca_certs='/tmp/ca_certs', ssl_cert_reqs='none') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ca_certs': '/tmp/ca_certs', + 'cert_reqs': ssl.CERT_NONE + }) + + ssl_kwargs = _get_ssl_kwargs(ssl_ca_certs='/tmp/ca_certs', ssl_cert_reqs='optional') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ca_certs': '/tmp/ca_certs', + 'cert_reqs': ssl.CERT_OPTIONAL + }) + + ssl_kwargs = _get_ssl_kwargs(ssl_ca_certs='/tmp/ca_certs', ssl_cert_reqs='required') + self.assertEqual(ssl_kwargs, { + 'ssl': True, + 'ca_certs': '/tmp/ca_certs', + 'cert_reqs': ssl.CERT_REQUIRED + }) From 9b7dba00f8663d3bdf11d69e861f8f33493b3f0d Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 11 Feb 2019 11:38:27 +0100 Subject: [PATCH 061/105] Update help string. --- st2common/st2common/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/st2common/st2common/config.py b/st2common/st2common/config.py index 6f69db6796..58da172a88 100644 --- a/st2common/st2common/config.py +++ b/st2common/st2common/config.py @@ -229,7 +229,7 @@ def register_opts(ignore_errors=False): help='How long should we wait between connection retries.'), cfg.BoolOpt( 'ssl', default=False, - help='Use SSL / TLS to connection to the messaging server. Same as ' + help='Use SSL / TLS to connect to the messaging server. Same as ' 'appending "?ssl=true" at the end of the connection URL string.'), cfg.StrOpt( 'ssl_keyfile', default=None, From 01cb47dc4071553959b6e850596b7f4a6f9fc7fd Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 11 Feb 2019 11:40:35 +0100 Subject: [PATCH 062/105] Re-generate sample config. --- conf/st2.conf.sample | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/conf/st2.conf.sample b/conf/st2.conf.sample index c913610639..8a16afad77 100644 --- a/conf/st2.conf.sample +++ b/conf/st2.conf.sample @@ -175,14 +175,26 @@ mask_secrets_blacklist = # comma separated list allowed here. mask_secrets = True [messaging] -# URL of the messaging server. -url = amqp://guest:guest@127.0.0.1:5672// -# How long should we wait between connection retries. -connection_retry_wait = 10000 +# Certificate file used to identify the local connection (client). +ssl_certfile = None # How many times should we retry connection before failing. connection_retries = 10 +# Use SSL / TLS to connect to the messaging server. Same as appending "?ssl=true" at the end of the connection URL string. +ssl = False +# URL of the messaging server. +url = amqp://guest:guest@127.0.0.1:5672// +# Specifies whether a certificate is required from the other side of the connection, and whether it will be validated if provided. +ssl_cert_reqs = None # URL of all the nodes in a messaging service cluster. cluster_urls = # comma separated list allowed here. +# How long should we wait between connection retries. +connection_retry_wait = 10000 +# Private keyfile used to identify the local connection against RabbitMQ. +ssl_keyfile = None +# ca_certs file contains a set of concatenated CA certificates, which are used to validate certificates passed from RabbitMQ. +ssl_ca_certs = None +# Login method to use (AMQPLAIN, PLAIN, EXTERNAL, etc.). +login_method = None [metrics] # Randomly sample and only send metrics for X% of metric operations to the backend. Default value of 1 means no sampling is done and all the metrics are sent to the backend. E.g. 0.1 would mean 10% of operations are sampled. From 8bf195f9f486d60da4455955ac5a4b1badd2e6af Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 11 Feb 2019 11:40:56 +0100 Subject: [PATCH 063/105] Fix typos. --- tools/config_gen.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/config_gen.py b/tools/config_gen.py index 4f593b2389..c3e2c17b2f 100755 --- a/tools/config_gen.py +++ b/tools/config_gen.py @@ -41,7 +41,7 @@ SKIP_GROUPS = ['api_pecan', 'rbac', 'results_tracker'] -# We group auth options together to nake it a bit more clear what applies where +# We group auth options together to make it a bit more clear what applies where AUTH_OPTIONS = { 'common': [ 'enable', @@ -63,7 +63,7 @@ ] } -# Some of the config values change depenending on the environment where this script is ran so we +# Some of the config values change depending on the environment where this script is ran so we # set them to static values to ensure consistent and stable output STATIC_OPTION_VALUES = { 'actionrunner': { From b76ae85466db019d0bceab21a7918e054982d582 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 11 Feb 2019 11:49:43 +0100 Subject: [PATCH 064/105] Fix a default value. --- st2common/st2common/transport/reactor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/st2common/st2common/transport/reactor.py b/st2common/st2common/transport/reactor.py index dc3abfcc40..9ddbe4d409 100644 --- a/st2common/st2common/transport/reactor.py +++ b/st2common/st2common/transport/reactor.py @@ -59,7 +59,7 @@ class TriggerCUDPublisher(publishers.CUDPublisher): Publisher responsible for publishing Trigger model CUD events. """ - def __init__(self, urls): + def __init__(self, urls=None): super(TriggerCUDPublisher, self).__init__(exchange=TRIGGER_CUD_XCHG, urls=urls) From b959df1a2bf046ccb3437cde115e744ac03168db Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 11 Feb 2019 12:17:55 +0100 Subject: [PATCH 065/105] Update more affected code. --- st2common/st2common/transport/actionexecutionstate.py | 2 +- st2common/st2common/transport/execution.py | 2 +- st2common/st2common/transport/workflow.py | 8 +++++--- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/st2common/st2common/transport/actionexecutionstate.py b/st2common/st2common/transport/actionexecutionstate.py index 39f2e653ae..b42a50b386 100644 --- a/st2common/st2common/transport/actionexecutionstate.py +++ b/st2common/st2common/transport/actionexecutionstate.py @@ -25,7 +25,7 @@ class ActionExecutionStatePublisher(publishers.CUDPublisher): - def __init__(self, urls): + def __init__(self, urls=None): super(ActionExecutionStatePublisher, self).__init__(exchange=ACTIONEXECUTIONSTATE_XCHG, urls=urls) diff --git a/st2common/st2common/transport/execution.py b/st2common/st2common/transport/execution.py index 660f747a8f..b38672f43b 100644 --- a/st2common/st2common/transport/execution.py +++ b/st2common/st2common/transport/execution.py @@ -38,7 +38,7 @@ def __init__(self, urls=None): class ActionExecutionOutputPublisher(publishers.CUDPublisher): - def __init__(self, urls): + def __init__(self, urls=None): super(ActionExecutionOutputPublisher, self).__init__(exchange=EXECUTION_OUTPUT_XCHG, urls=urls) diff --git a/st2common/st2common/transport/workflow.py b/st2common/st2common/transport/workflow.py index c9e3e58713..db949bc593 100644 --- a/st2common/st2common/transport/workflow.py +++ b/st2common/st2common/transport/workflow.py @@ -32,9 +32,11 @@ class WorkflowExecutionPublisher(publishers.CUDPublisher, publishers.StatePublisherMixin): - def __init__(self, urls): - publishers.CUDPublisher.__init__(self, urls, WORKFLOW_EXECUTION_XCHG) - publishers.StatePublisherMixin.__init__(self, urls, WORKFLOW_EXECUTION_STATUS_MGMT_XCHG) + def __init__(self, urls=None): + publishers.CUDPublisher.__init__(self, exchange=WORKFLOW_EXECUTION_XCHG, + urls=urls) + publishers.StatePublisherMixin.__init__(self, exchange=WORKFLOW_EXECUTION_STATUS_MGMT_XCHG, + urls=urls) def get_queue(name, routing_key): From 65945253cec7b304c45f30d7f7da0f1a2cea7dc2 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 11 Feb 2019 12:46:48 +0100 Subject: [PATCH 066/105] Simplify the code - URL and connection management is now centralized so there is no need for this argument anymore. --- .../st2common/transport/actionexecutionstate.py | 11 ++++++++--- st2common/st2common/transport/announcement.py | 17 ++++++++++------- st2common/st2common/transport/execution.py | 10 ++++------ st2common/st2common/transport/liveaction.py | 16 ++++++++++++---- st2common/st2common/transport/publishers.py | 10 +++++----- st2common/st2common/transport/reactor.py | 14 ++++++-------- st2common/st2common/transport/workflow.py | 12 ++++++------ st2common/tests/unit/test_state_publisher.py | 5 ++--- 8 files changed, 53 insertions(+), 42 deletions(-) diff --git a/st2common/st2common/transport/actionexecutionstate.py b/st2common/st2common/transport/actionexecutionstate.py index b42a50b386..87523930f0 100644 --- a/st2common/st2common/transport/actionexecutionstate.py +++ b/st2common/st2common/transport/actionexecutionstate.py @@ -16,18 +16,23 @@ # All Exchanges and Queues related to liveaction. from __future__ import absolute_import + from kombu import Exchange, Queue + from st2common.transport import publishers +__all__ = [ + 'ActionExecutionStatePublisher' +] + ACTIONEXECUTIONSTATE_XCHG = Exchange('st2.actionexecutionstate', type='topic') class ActionExecutionStatePublisher(publishers.CUDPublisher): - def __init__(self, urls=None): - super(ActionExecutionStatePublisher, self).__init__(exchange=ACTIONEXECUTIONSTATE_XCHG, - urls=urls) + def __init__(self): + super(ActionExecutionStatePublisher, self).__init__(exchange=ACTIONEXECUTIONSTATE_XCHG) def get_queue(name, routing_key): diff --git a/st2common/st2common/transport/announcement.py b/st2common/st2common/transport/announcement.py index 4559afdab6..4f9d69390a 100644 --- a/st2common/st2common/transport/announcement.py +++ b/st2common/st2common/transport/announcement.py @@ -14,6 +14,7 @@ # limitations under the License. from __future__ import absolute_import + from kombu import Exchange, Queue from st2common import log as logging @@ -21,20 +22,22 @@ from st2common.models.api.trace import TraceContext from st2common.transport import publishers +__all__ = [ + 'AnnouncementPublisher', + 'AnnouncementDispatcher', + + 'get_queue' +] + LOG = logging.getLogger(__name__) # Exchange for Announcements ANNOUNCEMENT_XCHG = Exchange('st2.announcement', type='topic') -__all__ = [ - 'AnnouncementPublisher', - 'AnnouncementDispatcher' -] - class AnnouncementPublisher(object): - def __init__(self, urls=None): - self._publisher = publishers.PoolPublisher(urls=urls) + def __init__(self): + self._publisher = publishers.PoolPublisher() def publish(self, payload, routing_key): self._publisher.publish(payload, ANNOUNCEMENT_XCHG, routing_key) diff --git a/st2common/st2common/transport/execution.py b/st2common/st2common/transport/execution.py index b38672f43b..1885a72368 100644 --- a/st2common/st2common/transport/execution.py +++ b/st2common/st2common/transport/execution.py @@ -32,15 +32,13 @@ class ActionExecutionPublisher(publishers.CUDPublisher): - def __init__(self, urls=None): - super(ActionExecutionPublisher, self).__init__(exchange=EXECUTION_XCHG, - urls=urls) + def __init__(self): + super(ActionExecutionPublisher, self).__init__(exchange=EXECUTION_XCHG) class ActionExecutionOutputPublisher(publishers.CUDPublisher): - def __init__(self, urls=None): - super(ActionExecutionOutputPublisher, self).__init__(exchange=EXECUTION_OUTPUT_XCHG, - urls=urls) + def __init__(self): + super(ActionExecutionOutputPublisher, self).__init__(exchange=EXECUTION_OUTPUT_XCHG) def get_queue(name=None, routing_key=None, exclusive=False, auto_delete=False): diff --git a/st2common/st2common/transport/liveaction.py b/st2common/st2common/transport/liveaction.py index 7775034dad..b2b7efe238 100644 --- a/st2common/st2common/transport/liveaction.py +++ b/st2common/st2common/transport/liveaction.py @@ -16,9 +16,18 @@ # All Exchanges and Queues related to liveaction. from __future__ import absolute_import + from kombu import Exchange, Queue + from st2common.transport import publishers +__all__ = [ + 'LiveActionPublisher', + + 'get_queue', + 'get_status_management_queue' +] + LIVEACTION_XCHG = Exchange('st2.liveaction', type='topic') LIVEACTION_STATUS_MGMT_XCHG = Exchange('st2.liveaction.status', type='topic') @@ -26,10 +35,9 @@ class LiveActionPublisher(publishers.CUDPublisher, publishers.StatePublisherMixin): - def __init__(self, urls=None): - publishers.CUDPublisher.__init__(self, exchange=LIVEACTION_XCHG, urls=urls) - publishers.StatePublisherMixin.__init__(self, exchange=LIVEACTION_STATUS_MGMT_XCHG, - urls=urls) + def __init__(self): + publishers.CUDPublisher.__init__(self, exchange=LIVEACTION_XCHG) + publishers.StatePublisherMixin.__init__(self, exchange=LIVEACTION_STATUS_MGMT_XCHG) def get_queue(name, routing_key): diff --git a/st2common/st2common/transport/publishers.py b/st2common/st2common/transport/publishers.py index 6ab50d98a1..010c24ed39 100644 --- a/st2common/st2common/transport/publishers.py +++ b/st2common/st2common/transport/publishers.py @@ -42,7 +42,7 @@ class PoolPublisher(object): def __init__(self, urls=None): """ - :param urls: Connection URLs to use. If not provided it uses a default value from the + :param urls: Connection URLs to use. If not provided it uses a default value from th config. :type urls: ``list`` """ @@ -109,8 +109,8 @@ def get_publisher(self, urls): class CUDPublisher(object): - def __init__(self, exchange, urls=None): - urls = urls or transport_utils.get_messaging_urls() + def __init__(self, exchange): + urls = transport_utils.get_messaging_urls() self._publisher = SharedPoolPublishers().get_publisher(urls=urls) self._exchange = exchange @@ -128,8 +128,8 @@ def publish_delete(self, payload): class StatePublisherMixin(object): - def __init__(self, exchange, urls=None): - urls = urls or transport_utils.get_messaging_urls() + def __init__(self, exchange): + urls = transport_utils.get_messaging_urls() self._state_publisher = SharedPoolPublishers().get_publisher(urls=urls) self._state_exchange = exchange diff --git a/st2common/st2common/transport/reactor.py b/st2common/st2common/transport/reactor.py index 9ddbe4d409..944407b413 100644 --- a/st2common/st2common/transport/reactor.py +++ b/st2common/st2common/transport/reactor.py @@ -49,9 +49,8 @@ class SensorCUDPublisher(publishers.CUDPublisher): Publisher responsible for publishing Trigger model CUD events. """ - def __init__(self, urls=None): - super(SensorCUDPublisher, self).__init__(exchange=SENSOR_CUD_XCHG, - urls=urls) + def __init__(self): + super(SensorCUDPublisher, self).__init__(exchange=SENSOR_CUD_XCHG) class TriggerCUDPublisher(publishers.CUDPublisher): @@ -59,14 +58,13 @@ class TriggerCUDPublisher(publishers.CUDPublisher): Publisher responsible for publishing Trigger model CUD events. """ - def __init__(self, urls=None): - super(TriggerCUDPublisher, self).__init__(exchange=TRIGGER_CUD_XCHG, - urls=urls) + def __init__(self): + super(TriggerCUDPublisher, self).__init__(exchange=TRIGGER_CUD_XCHG) class TriggerInstancePublisher(object): - def __init__(self, urls=None): - self._publisher = publishers.PoolPublisher(urls=urls) + def __init__(self): + self._publisher = publishers.PoolPublisher() def publish_trigger(self, payload=None, routing_key=None): # TODO: We should use trigger reference as a routing key diff --git a/st2common/st2common/transport/workflow.py b/st2common/st2common/transport/workflow.py index db949bc593..a199f1cc01 100644 --- a/st2common/st2common/transport/workflow.py +++ b/st2common/st2common/transport/workflow.py @@ -23,7 +23,9 @@ __all__ = [ 'WorkflowExecutionPublisher', - 'get_queue' + + 'get_queue', + 'get_status_management_queue' ] WORKFLOW_EXECUTION_XCHG = kombu.Exchange('st2.workflow', type='topic') @@ -32,11 +34,9 @@ class WorkflowExecutionPublisher(publishers.CUDPublisher, publishers.StatePublisherMixin): - def __init__(self, urls=None): - publishers.CUDPublisher.__init__(self, exchange=WORKFLOW_EXECUTION_XCHG, - urls=urls) - publishers.StatePublisherMixin.__init__(self, exchange=WORKFLOW_EXECUTION_STATUS_MGMT_XCHG, - urls=urls) + def __init__(self): + publishers.CUDPublisher.__init__(self, exchange=WORKFLOW_EXECUTION_XCHG) + publishers.StatePublisherMixin.__init__(self, exchange=WORKFLOW_EXECUTION_STATUS_MGMT_XCHG) def get_queue(name, routing_key): diff --git a/st2common/tests/unit/test_state_publisher.py b/st2common/tests/unit/test_state_publisher.py index f1eef1d05d..bc66df337f 100644 --- a/st2common/tests/unit/test_state_publisher.py +++ b/st2common/tests/unit/test_state_publisher.py @@ -31,9 +31,8 @@ class FakeModelPublisher(publishers.StatePublisherMixin): - def __init__(self, urls=None): - super(FakeModelPublisher, self).__init__(exchange=FAKE_STATE_MGMT_XCHG, - urls=urls) + def __init__(self): + super(FakeModelPublisher, self).__init__(exchange=FAKE_STATE_MGMT_XCHG) class FakeModelDB(stormbase.StormBaseDB): From 5b5bd6d407180085f941958c4ec55c08d3f98352 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 17:04:27 +0100 Subject: [PATCH 067/105] Add CA, server cert and client cert which can be used for testing. --- .../st2tests/fixtures/ssl_certs/README.md | 10 ++++ .../ssl_certs/ca/ca_certificate_bundle.cer | Bin 0 -> 714 bytes .../ssl_certs/ca/ca_certificate_bundle.pem | 17 ++++++ .../fixtures/ssl_certs/ca/certs/01.pem | 18 ++++++ .../fixtures/ssl_certs/ca/certs/02.pem | 18 ++++++ .../st2tests/fixtures/ssl_certs/ca/index.txt | 2 + .../fixtures/ssl_certs/ca/index.txt.attr | 1 + .../fixtures/ssl_certs/ca/index.txt.attr.old | 1 + .../fixtures/ssl_certs/ca/index.txt.old | 1 + .../fixtures/ssl_certs/ca/openssl.cnf | 54 ++++++++++++++++++ .../ssl_certs/ca/private/ca_private_key.pem | 28 +++++++++ .../st2tests/fixtures/ssl_certs/ca/serial | 1 + .../st2tests/fixtures/ssl_certs/ca/serial.old | 1 + .../ssl_certs/client/client_certificate.p12 | Bin 0 -> 2341 bytes .../ssl_certs/client/client_certificate.pem | 18 ++++++ .../fixtures/ssl_certs/client/private_key.pem | 27 +++++++++ .../fixtures/ssl_certs/client/req.pem | 15 +++++ .../fixtures/ssl_certs/server/private_key.pem | 27 +++++++++ .../fixtures/ssl_certs/server/req.pem | 15 +++++ .../ssl_certs/server/server_certificate.p12 | Bin 0 -> 2341 bytes .../ssl_certs/server/server_certificate.pem | 18 ++++++ 21 files changed, 272 insertions(+) create mode 100644 st2tests/st2tests/fixtures/ssl_certs/README.md create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.cer create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.pem create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/certs/01.pem create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/certs/02.pem create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/index.txt create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.attr create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.attr.old create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.old create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/openssl.cnf create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/private/ca_private_key.pem create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/serial create mode 100644 st2tests/st2tests/fixtures/ssl_certs/ca/serial.old create mode 100644 st2tests/st2tests/fixtures/ssl_certs/client/client_certificate.p12 create mode 100644 st2tests/st2tests/fixtures/ssl_certs/client/client_certificate.pem create mode 100644 st2tests/st2tests/fixtures/ssl_certs/client/private_key.pem create mode 100644 st2tests/st2tests/fixtures/ssl_certs/client/req.pem create mode 100644 st2tests/st2tests/fixtures/ssl_certs/server/private_key.pem create mode 100644 st2tests/st2tests/fixtures/ssl_certs/server/req.pem create mode 100644 st2tests/st2tests/fixtures/ssl_certs/server/server_certificate.p12 create mode 100644 st2tests/st2tests/fixtures/ssl_certs/server/server_certificate.pem diff --git a/st2tests/st2tests/fixtures/ssl_certs/README.md b/st2tests/st2tests/fixtures/ssl_certs/README.md new file mode 100644 index 0000000000..d54f4f1e6b --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/README.md @@ -0,0 +1,10 @@ +# SSL certificates Used for Testing + +This directory contains self signed server and client certificates which are +used by the tests. + +Those certificates are issues and signed by a custom CA which is contained in the ca/ directory. + +Certificate passphrase is ``MySecretPassword``. + +NOTE: Those cerificates will expire on ``notAfter=Feb 11 15:58:38 2024 GMT``. diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.cer b/st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.cer new file mode 100644 index 0000000000000000000000000000000000000000..94557aa645bd2f06863ac2ebf1bfa1e54bd3c856 GIT binary patch literal 714 zcmXqLVmfBf#JFw&GZP~d6DPxtD@;?xbYFHE@Un4gwRyCC=VfH%W@RuCHWW1AXJZa! zVdmlRtqe&mE^&4=kQ3)Mv@|d>G%_?bH8(Jh66ZBC0dftYTXJlnyZerwT z06Kz;sfm%1;bW-WbC00u0SARIro0Wg@utDno%i;M)B~Sv5?u^#zr5S;dAKA?PTLvb5lBJUZ2OC7ps3il0KtL*W~-8^O05j$>! zm4-VEixq8FGuIpLXZ!mqmlITfEZ2N>j{EM91o&h6T%-lmwZS+Ujs#@!iHKCl&T zo)Ri_g>C&L`*Y3PmwuV?LVwAgd+Dk?-|q0Qx1X4*+EMuWQ*1#E^Zy0U&duJV85=R> zb?4DJ#b5qr2OH1TTUWYRwBsrhGb01zVp#)e10G;_%L=ow8Za|5{x{$T@%UL-fDy%p z9Du;s0R|u=!-th8{C2#zZ2b~_d(xQ-`QS^6ng`h<3Kle9VOj7vW#zw|ryCDHv#{~d ztA2JzNLOIlt5;daZkdfyA%#@Rbfdn_M{=08-P)b`ZrLgY`@Rjdrr>XS4L zs~4|8;MtKkvrjz~YR;@#_2as-!~W>UMmownHq<7z z@$Xz_XwX^sJw0T}^PANxSD0P%s=P0e-?Q)9Z;Lf?zh`p$8`LP@xhquVCldYPSfu8| YOv~?ALke=6{yS8iJ7KNrc75sr00s3b{Qv*} literal 0 HcmV?d00001 diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.pem b/st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.pem new file mode 100644 index 0000000000..a194ec97df --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICxjCCAa6gAwIBAgIJALjUApUWLemKMA0GCSqGSIb3DQEBCwUAMBMxETAPBgNV +BAMMCE15VGVzdENBMB4XDTE5MDIxMjE1NTcwM1oXDTI0MDIxMTE1NTcwM1owEzER +MA8GA1UEAwwITXlUZXN0Q0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQDxVR7nSFKXUMET0WTtVNjsgD1HDdvIZcDyPGFEMNhtftPv4RmkxeFnKNumHbIu +s2eox6MCT7wK9CKG+38szyMMDkCObYkGCKzZG2yejkjs6Kv74hvML8p+NIz3Cxch +WEuD6ubnSoKl35cVt4/LUTM/IFG36H6f7Q47NYYsWIBMaXUvY5Wbg5SqxD4LMKkx +uDFzITyrA38xvwb96mTkXT/OJEyswAAeWjjoKHWdirknhiFvKXi1T9jdmJTwBnGz +lFUS1Aavkj/Og7el9JjoL6S83mclDPbcD68/kWUliHHr8l1wfAP/oObOm7wpXViU +64nFnHP0/WtTM50urnWjFYjVAgMBAAGjHTAbMAwGA1UdEwQFMAMBAf8wCwYDVR0P +BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4IBAQDwqchOuO85hfRb25LMeB9T0iEpwQdY +cKCD1ASg42Sp/mzlscPmODxILnvm3BItEKbq6mrG2s9i42FRmXu+6D2Bm7k1jDnh +FW/hI5KG5ULQWfkFqgUAWyeSKTF7oK9FRAfROY3K9E/MXxsO10e+ibgZPZjY8RTC +eUihRw3LvIFj3mY3OQ+sBQ4OTh/nPd66trzAJee15ATC0nK0YJTVhLv576DmxOyb +yuESg2l8qvjXI0C/W+MyLCO4sH1hhg+5pjEwiXH3Z1Sk59l7qag21kp53xhvjL7W ++zisXvuZC08wfCPc3RJ6ThRb8MZZKeFpOffVVHBtgv9Aes7IOyVG15XA +-----END CERTIFICATE----- diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/certs/01.pem b/st2tests/st2tests/fixtures/ssl_certs/ca/certs/01.pem new file mode 100644 index 0000000000..17c4490f8b --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/certs/01.pem @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE----- +MIIC4jCCAcqgAwIBAgIBATANBgkqhkiG9w0BAQsFADATMREwDwYDVQQDDAhNeVRl +c3RDQTAeFw0xOTAyMTIxNTU4MDdaFw0yNDAyMTExNTU4MDdaMCUxEjAQBgNVBAMM +CWxvY2FsaG9zdDEPMA0GA1UECgwGc2VydmVyMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAuLLUdbHqOsUiRnkv2S0fiadrqwfdgaZgVImvMyorVYzoJ5W7 +anJSyWPnV/ly/rjL7toiPhBcVgDuCGkf7CjPN4E5tdxI9ylYk/UHEtMG1ll6kDiF +8hWfHDdktdqnQvuLkUMAA5xgIFfX+UMBuTZk7VowrjnOuljN5eVN89y2fYXXtqC1 +91HilG9VwLewYKQd/Ishb4p2WfxiBIVO+cQpnYB6quvrEYC1XPcRbJuXdrc7KcYn +dWdoj6M7aT1zOnHJrdLtv7F7dkYgV9vqwN7w3ud7uNaEbsHvWz0i+6qjX/uE755N +ZoJ8O8Dx5ug/1lxplnXlfmadIibYPBJatRsSiwIDAQABoy8wLTAJBgNVHRMEAjAA +MAsGA1UdDwQEAwIFoDATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0BAQsF +AAOCAQEAnhmIUhZwweCqdzGNeoNXXkuXyBf2fFvajHlG2a2pZ8r6/fyQbbJgzo04 +ajjWoUoSW+XB+AfJvT6CTZuMWsGkxYvFAxOoXtLpW0OKqEh55q8diMSb/gOxxwND +vHVb1+VjZBhzxxt0TbXeFngMnBSgVhipKQe49pe0H+rDDYptultl81n2zFLzBKUe +h927CnTJ7cpZe4Di2tMJfVsDJB6piuwPu6GnWhT38Q12I+ryL2xbihIw1B4qDtq6 +nq4lYGnpJCNNXg5JR5S1HeYiQtP0sHgU6SvpgMtzDdbCJ0Nu7EpR5J3ChdQWooGf +uTOThX41qx1p47ho4TA9Ac4K/GRcLg== +-----END CERTIFICATE----- diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/certs/02.pem b/st2tests/st2tests/fixtures/ssl_certs/ca/certs/02.pem new file mode 100644 index 0000000000..a10ae91143 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/certs/02.pem @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE----- +MIIC4jCCAcqgAwIBAgIBAjANBgkqhkiG9w0BAQsFADATMREwDwYDVQQDDAhNeVRl +c3RDQTAeFw0xOTAyMTIxNTU4MzhaFw0yNDAyMTExNTU4MzhaMCUxEjAQBgNVBAMM +CWxvY2FsaG9zdDEPMA0GA1UECgwGY2xpZW50MIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA4HxkZw50MGiWYmlrwJBHAjwsD7lfft9gHrRAeP8iEI0oLIJm +/MmUUIyA2DSDGJCIsP+grkmZawLmu7D0vJIVIUo+OBNUQ/3mACWH9z15AW5s/Ig/ +FZErhBg3RFZS+hXVT639U94uKne+mjh/G4Ej7OYHhBywn+EKakIJuUTs10sF0kW/ +4h1Gx9+Ph3tfYSagNdMDXXft0Knn/X8vMwLF5Eg8ZHKnty30wJRr4r2bqTeSCPS5 +k3bfpcxOAnaSpTDuIoxIp7w9pjwLVAVWvbjqDlU5DrPxpsn29i8STNpJ7My7+12/ +C/QJDrlCJCav1ma04G2QZbyAri3ax/MCeonFsQIDAQABoy8wLTAJBgNVHRMEAjAA +MAsGA1UdDwQEAwIFoDATBgNVHSUEDDAKBggrBgEFBQcDAjANBgkqhkiG9w0BAQsF +AAOCAQEAI+PgF1gsQckqTh71CxqKimM0h5pIGh6H09bSa+9LFLFa60E1zR8rmygw +AD+u6sI5foFbSdUiIDJBmHizvwMmIptGSRw0Znzi/jjbjBmZSNLnk+Vird5grjF4 +Pf7Vkgi/NKzXTS3Y2TUUhk5OZZ6OmszHZ0eGJlUcz6Qa13hcalVHc3FmikeAu5/h +XQuthOQDXJBabgexQ+1K6ft6DDImdQCFcZhYXSb30cRHS9lqIVZbI7Rtk6UqwkvE +hYU0g8BVeVBpL7xYBqfrpdy+vBb28rrLT6Dvgf0giQ3F07S+RAivDWjM53Wyhb7T +6o3h8l49IkcEW1mns9Mj2bPNFSOhSA== +-----END CERTIFICATE----- diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt b/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt new file mode 100644 index 0000000000..ad058db53d --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt @@ -0,0 +1,2 @@ +V 240211155807Z 01 unknown /CN=localhost/O=server +V 240211155838Z 02 unknown /CN=localhost/O=client diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.attr b/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.attr new file mode 100644 index 0000000000..8f7e63a347 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.attr @@ -0,0 +1 @@ +unique_subject = yes diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.attr.old b/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.attr.old new file mode 100644 index 0000000000..8f7e63a347 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.attr.old @@ -0,0 +1 @@ +unique_subject = yes diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.old b/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.old new file mode 100644 index 0000000000..970c83b368 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/index.txt.old @@ -0,0 +1 @@ +V 240211155807Z 01 unknown /CN=localhost/O=server diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/openssl.cnf b/st2tests/st2tests/fixtures/ssl_certs/ca/openssl.cnf new file mode 100644 index 0000000000..a8348fbf15 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/openssl.cnf @@ -0,0 +1,54 @@ +[ ca ] +default_ca = testca + +[ testca ] +dir = . +certificate = $dir/ca_certificate_bundle.pem +database = $dir/index.txt +new_certs_dir = $dir/certs +private_key = $dir/private/ca_private_key.pem +serial = $dir/serial + +default_crl_days = 7 +default_days = 1825 +default_md = sha256 + +policy = testca_policy +x509_extensions = certificate_extensions + +[ testca_policy ] +commonName = supplied +stateOrProvinceName = optional +countryName = optional +emailAddress = optional +organizationName = optional +organizationalUnitName = optional +domainComponent = optional + +[ certificate_extensions ] +basicConstraints = CA:false + +[ req ] +default_bits = 2048 +default_keyfile = ./private/ca_private_key.pem +default_md = sha256 +prompt = yes +distinguished_name = root_ca_distinguished_name +x509_extensions = root_ca_extensions + +[ root_ca_distinguished_name ] +commonName = hostname + +[ root_ca_extensions ] +basicConstraints = CA:true +keyUsage = keyCertSign, cRLSign + +[ client_ca_extensions ] +basicConstraints = CA:false +keyUsage = digitalSignature,keyEncipherment +extendedKeyUsage = 1.3.6.1.5.5.7.3.2 + +[ server_ca_extensions ] +basicConstraints = CA:false +keyUsage = digitalSignature,keyEncipherment +extendedKeyUsage = 1.3.6.1.5.5.7.3.1 diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/private/ca_private_key.pem b/st2tests/st2tests/fixtures/ssl_certs/ca/private/ca_private_key.pem new file mode 100644 index 0000000000..e54d4958cd --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/private/ca_private_key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDxVR7nSFKXUMET +0WTtVNjsgD1HDdvIZcDyPGFEMNhtftPv4RmkxeFnKNumHbIus2eox6MCT7wK9CKG ++38szyMMDkCObYkGCKzZG2yejkjs6Kv74hvML8p+NIz3CxchWEuD6ubnSoKl35cV +t4/LUTM/IFG36H6f7Q47NYYsWIBMaXUvY5Wbg5SqxD4LMKkxuDFzITyrA38xvwb9 +6mTkXT/OJEyswAAeWjjoKHWdirknhiFvKXi1T9jdmJTwBnGzlFUS1Aavkj/Og7el +9JjoL6S83mclDPbcD68/kWUliHHr8l1wfAP/oObOm7wpXViU64nFnHP0/WtTM50u +rnWjFYjVAgMBAAECggEBAN14Pz8CyQCiFD5KqHOArP4FBbciSbMTZkknDiAVL1j0 +zixSiEUFb8BK55//mphu/c8PPlINuETZHKKBRIlrof8bSTUr4laOOYmYOEsdymDX +eZVTQC1XIl5FfaPtIpHwRITQWoyhfVoZ4b4FUcnFP+FLmJLMov/C/Y9qpDIoGb2E +NbcMEnIz0i573+Ci1k+OLAdthbCigUvwvJ1iLv5m3s1XrRvIu6TDsERXdB/02pFu +XXNgyidR6XVr/MVov898PB5B0eJbX6Iir7avzpS1V/q5kq2pgFFZk8Vfhvw2k07C +l89peWIo+1h8djem/1n1FLD7aRKzFTb6HULS4uoxCDUCgYEA/o23BbC1/LRTq0IW +7I8BqTCe70cnuvWCUtWiTNWzX3INK4Hsxdeztfiu/W5dTiDndZ4CzMAoXt/rxXkw +Dc449FB1wVKCKShZRyeyyboOCpfzW1040JhjmGU4ZBn6T4U2cpaJyLGtcfkFZSeq +2nOiUntVJcPq6vWF2sdJysGSWucCgYEA8rQsf5RaHM6yRFpKrbb+SC8PAEqAZF9N +XZvl64GLHi9PSK/n68iZw1c7j4VjnCC89PH0flpQfkngrffLiy2pi+JdYo7qBKeT +3IFOiQAvylpxCiQMvFqsxz9mhoj3jJdyNGvKXJeQ5PuxRatZOHwpMP+tpQ7uF2zm +DzReoxqZ4uMCgYB1XNFthjPh9yI8a5Q2LRkO8KPWnm/q+xbDKkxSMJUrBGKeFKEd +9n2dALNtlVzfkLwmtluEG3SBiawit+U3+ES6H/6qy2fHohrHe74q0+V1bOl+zlRL +mHcS5FhDjtaho0GfQ1jzdzgIvE+Ie+mCHp5QeRyg9NtyyRCV9hxHp0fbMQKBgQDr +Cqn9c8JBG7twjrC7wvhHF6vDcGMe0VyvRwdHJ9F+jfqOPiywHzkqABTiTR/GV74m +yRsqMnS5mPpKACvSwYnsunANvrHLiC6d4WwZKWEe6q+GTps23eltnGzB5Ws3cINd +WPZE7VOZLlbjTam+FiAeH74el3LkpMW3+9OayWw2WQKBgQD0S0L5OoRjVY6SRPe1 +oKqTwSlay2uzqoAhGQqGeb4SaBaImEfLMQzYQpJ5JWAnAzwHhA7x7iDm3QzB93Fg +id1rdsbfzdlZC40T0IslTYLT/mawiOcAHupDuszgnn1ycFV35915zP9Ijzqaojsn +DRI3H6XpQSJyHUNZo1pCZBXyhg== +-----END PRIVATE KEY----- diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/serial b/st2tests/st2tests/fixtures/ssl_certs/ca/serial new file mode 100644 index 0000000000..75016ea362 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/serial @@ -0,0 +1 @@ +03 diff --git a/st2tests/st2tests/fixtures/ssl_certs/ca/serial.old b/st2tests/st2tests/fixtures/ssl_certs/ca/serial.old new file mode 100644 index 0000000000..9e22bcb8e3 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/ca/serial.old @@ -0,0 +1 @@ +02 diff --git a/st2tests/st2tests/fixtures/ssl_certs/client/client_certificate.p12 b/st2tests/st2tests/fixtures/ssl_certs/client/client_certificate.p12 new file mode 100644 index 0000000000000000000000000000000000000000..7feead70f4a5842bd8a04621da3add569e35feb3 GIT binary patch literal 2341 zcmY+EdpHw}9>?3oY~(T#uRA@I^M(JN0Fa6hk$*QF1VE7DV5pQ(PpyT? z9W9RJ=ga^Y|hF|=I_YHq^L;KKlnr4_l{+{?WsCds&Jd_aQwEw+a!MPg2KjXAibAf1W;b$V2 zsdxspjO>R6#bbI|pldU}2 zYMN}KqnnWuD96KC_2u*3iyrbar%sRH@|)k{#^#rZdXA@k{el0~yE63NcubgmHO~Iy z6=QvEhu6B76ffRAgx3?u9{=ve@CiR)6=p-@eiy#sU?FDors!=4oiD-d{DEHOuOq9g z+pCOzgb}qXOF#8Js%Uj?yPgr>N40nxk@}96`k}nH9(O&KaAy+S5w-boh*wntJ?o|C z`%F2dRfei4T-lcW^yclY$|(P?ccn%V`U1BP!NGxawUp1+U?smDF4v)$nt8YevpQBo zYo&-glM?~a=kHPoIj1Sx*Z9xsC+%$xbZM-H7?_HVuWj%UpUlK9_U~IK$Zuqd&qlyC zT4+_w8>yyr72*=7!<}iW)PUH~oy@J+K2l@VW8YBxdyq?$HfrspQi~C{f~Yk03eM7<{(Mnwg0mc^vg;Yg*GE9 zQMd4ym0%KhV50)Mg&mCMjIo10O4xySq#@}ZSJ&6_)}fSSj=jpItB=}Jv*s!;_=oH5 zV$YWy)6}HN3cJ2eOM8oqiW0;Yoez*8;yKo8@e2D>wp8i%TkNT-pzgjP7q2Hw+g(h6 zn_Qv%^J&_BofA#senu0W884UPHsgrqFBo*Cl=T%I<)SP}mBlCz@o3R+=-eurSW~DW z5Hl((YuoUJ@Q6_NLeSb0I?H-JJ%y5d?rH_eg3S+W{XX4XJI(kDC+N8l{}3Dagv-o6 z>*LPPNGqc=Ia}oLkIXyt#)^<&u|QWjnE%)I&+5B{5!=O2o7VI!!v?N*mhC-SveHVr z5^LM!)7KRJAV>+5aFZ2QyS#>^^4NjCHMgc6y?0FLcf= z_gL)o={F5!N2L&wx_MC}g3Dxz&c_j!-Ne#>H)N8rQ5m-bO`kT1>K-W_Bue#&rSh8} zC$EicAUpiJ@@*SUj#N!P*SXc86&1NDtTvnW0clT2_mvbiv(MOT_u~0Arz+JI%!d}^ zR7ABM8m0gupH97;m*`G*9)Ng$k?v1#M;mxySNhK_SnK7wNr~4{R$_Qmf^G2jV%*<7*m`C0@kte>AKcs0Nffucoe%4OdtRtVnN+YQ1;Vu+ zz_YZbcA#x{ybtksC#a?m6az;I){+culG)DqTt9QwMoyKd?$t6{8?-wicn{gkf!>O# zBc8|I6j!maOfec9uSoQ^m z=`ZEm$-g2z=9FI>-I6a*I*PZf3$0~Cc_O!Yd2LSL(7s>;5fpch+ux5A^Wd;B>82xj zQohCBeo0O6F3E-Vjm56CFxbgT)l##GgkvC{iT zLP3#sJS>S#H}6Dd9zGBZXfS`jff@!P)~y3tp647zh}NIdxnrX2?$0-sFP?vl@)#>W zqo3<1VKyO6vtJJ+XhQ3;4csR9m(^j^s{+lc7(-P>RXGx|LMz3SHOXtZm-uEqXVDz8 zU))7}XnAF$S9C`x!F%l-&X}Grqr0Hr+plL~v}c-TmNH`tK3ZE)9{&ypb(3U2OwV>( z2aVa>aECj$I(MWMwE34`MZ4esaJH|^fi!2{=1Ga8U4dxX>Vi9r#XKGPWR&d zgq|oFuLyU;UZg%!4GDt?9Tx`y<%JG_rAL^07;=~tXAVjhTDD4v-J3?2fuSk3bQa0x J&cd&i{1;wSVF>^L literal 0 HcmV?d00001 diff --git a/st2tests/st2tests/fixtures/ssl_certs/client/client_certificate.pem b/st2tests/st2tests/fixtures/ssl_certs/client/client_certificate.pem new file mode 100644 index 0000000000..a10ae91143 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/client/client_certificate.pem @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE----- +MIIC4jCCAcqgAwIBAgIBAjANBgkqhkiG9w0BAQsFADATMREwDwYDVQQDDAhNeVRl +c3RDQTAeFw0xOTAyMTIxNTU4MzhaFw0yNDAyMTExNTU4MzhaMCUxEjAQBgNVBAMM +CWxvY2FsaG9zdDEPMA0GA1UECgwGY2xpZW50MIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA4HxkZw50MGiWYmlrwJBHAjwsD7lfft9gHrRAeP8iEI0oLIJm +/MmUUIyA2DSDGJCIsP+grkmZawLmu7D0vJIVIUo+OBNUQ/3mACWH9z15AW5s/Ig/ +FZErhBg3RFZS+hXVT639U94uKne+mjh/G4Ej7OYHhBywn+EKakIJuUTs10sF0kW/ +4h1Gx9+Ph3tfYSagNdMDXXft0Knn/X8vMwLF5Eg8ZHKnty30wJRr4r2bqTeSCPS5 +k3bfpcxOAnaSpTDuIoxIp7w9pjwLVAVWvbjqDlU5DrPxpsn29i8STNpJ7My7+12/ +C/QJDrlCJCav1ma04G2QZbyAri3ax/MCeonFsQIDAQABoy8wLTAJBgNVHRMEAjAA +MAsGA1UdDwQEAwIFoDATBgNVHSUEDDAKBggrBgEFBQcDAjANBgkqhkiG9w0BAQsF +AAOCAQEAI+PgF1gsQckqTh71CxqKimM0h5pIGh6H09bSa+9LFLFa60E1zR8rmygw +AD+u6sI5foFbSdUiIDJBmHizvwMmIptGSRw0Znzi/jjbjBmZSNLnk+Vird5grjF4 +Pf7Vkgi/NKzXTS3Y2TUUhk5OZZ6OmszHZ0eGJlUcz6Qa13hcalVHc3FmikeAu5/h +XQuthOQDXJBabgexQ+1K6ft6DDImdQCFcZhYXSb30cRHS9lqIVZbI7Rtk6UqwkvE +hYU0g8BVeVBpL7xYBqfrpdy+vBb28rrLT6Dvgf0giQ3F07S+RAivDWjM53Wyhb7T +6o3h8l49IkcEW1mns9Mj2bPNFSOhSA== +-----END CERTIFICATE----- diff --git a/st2tests/st2tests/fixtures/ssl_certs/client/private_key.pem b/st2tests/st2tests/fixtures/ssl_certs/client/private_key.pem new file mode 100644 index 0000000000..7ddd509e15 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/client/private_key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEA4HxkZw50MGiWYmlrwJBHAjwsD7lfft9gHrRAeP8iEI0oLIJm +/MmUUIyA2DSDGJCIsP+grkmZawLmu7D0vJIVIUo+OBNUQ/3mACWH9z15AW5s/Ig/ +FZErhBg3RFZS+hXVT639U94uKne+mjh/G4Ej7OYHhBywn+EKakIJuUTs10sF0kW/ +4h1Gx9+Ph3tfYSagNdMDXXft0Knn/X8vMwLF5Eg8ZHKnty30wJRr4r2bqTeSCPS5 +k3bfpcxOAnaSpTDuIoxIp7w9pjwLVAVWvbjqDlU5DrPxpsn29i8STNpJ7My7+12/ +C/QJDrlCJCav1ma04G2QZbyAri3ax/MCeonFsQIDAQABAoIBAFjujqwRGtCOrn0A +PJLF1Yu6IM595qoRfjfLuvr0QB+EfFTduEUO6rXaY7TDYOgbYjuUmahSOfgd5yCW +Iu6NhNdyXSHD7o8dB8ApHitBbC23/G8y3qMBptam7UYiWK8AdUgiqohOLcXfOGBK +X3ia+YuBOZsJ7qL3+TNNRCLkfltvfA4pkCMgfdZUecJcc0jFNMoCBiyk61CnNhLL +uy1oMS7JzqPRM1ySWCdBJFkV1omDHgrgBx7VmympFUJHb6kVUSh/mnPTejTcM1ds +BkNecBbS/w2X9Gb9PSZzLCAEwmJ8J0hRkgDiahN7Q/kNsQ3ca3r03iocJALecBsW +3sujeH0CgYEA+5ewcq9M/sxdZnuZy69v7T2j8Q/FGGF7IQHlT67r80cEtXeAjlrN +0D9I3+cOrvz57Eay0n2hGLWzhyex6TTX9pZozTjcMuqRkB2ztPp3HkjRucpVhGz4 +pbADvO+ZgO87AGW13E8BBDN8BsWHPFpWpwpHvEcp05sFeUdeGqJfcHsCgYEA5Gsj +dndnmxX63it2Fa3I05MynAiqnt9MNm6zcNqPMKauK6xaawZv5FvQSd5MUQa9sj3s +VgYKr9e61u7WMaHqNwn6BUOwMKv26lwjkXW/wV3QMNzn5bzS2CyjWJEjdPq0WqoH +RRvR455mAlhTVFSyOJ279WXUWoPxqDbd/Y+1yMMCgYAlDqmxqrpniUh0kN4NT1Do +G70rA4yfU7RkHzhcbUJZuesqo2hvD1bjRn8AY7MY+TACqkMql9CDqDfCP4mH9P2e +V3cmSyq74SsBlC5lCMNE1ar2d6Py9m4FUZCrYos0n4gMPe70fTqEGOU6xhtuO0wq +HGyGgeDaRyoeO/HTcHkoQwKBgQCFqaQw2KKKAAyzIV+SRAV2uXYuFGwzV5uzZoge +i+aqo37cE5k9c6DaUlfKQgkKiRVMTiwUEqkCSQ0OZOh2VrdFydLCbd+WO6rbbVtq +7SpursT7MumIaDxBP62+UAAdne8X9tMWP7dMqQ4sZR8uA/neY37vlMz0wq0QsDqq +/AN2HQKBgQDZQIZuZwS12f2Mt/E/27I8lyDiVEj59zwxeayxFq8SzUtbWnWeepes +vtsdF19dWXzwI8MjTDhGo45YyKwtNXMp+uiMA0QFo4R07D68VrxAUDYGgnhhAxlZ +Wmq8OapkJUp69GeDgnG0F72eMhrQu6fJN1dpvNAkfZiuyT2BGBc6cA== +-----END RSA PRIVATE KEY----- diff --git a/st2tests/st2tests/fixtures/ssl_certs/client/req.pem b/st2tests/st2tests/fixtures/ssl_certs/client/req.pem new file mode 100644 index 0000000000..58e270e22a --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/client/req.pem @@ -0,0 +1,15 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICajCCAVICAQAwJTESMBAGA1UEAwwJbG9jYWxob3N0MQ8wDQYDVQQKDAZjbGll +bnQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDgfGRnDnQwaJZiaWvA +kEcCPCwPuV9+32AetEB4/yIQjSgsgmb8yZRQjIDYNIMYkIiw/6CuSZlrAua7sPS8 +khUhSj44E1RD/eYAJYf3PXkBbmz8iD8VkSuEGDdEVlL6FdVPrf1T3i4qd76aOH8b +gSPs5geEHLCf4QpqQgm5ROzXSwXSRb/iHUbH34+He19hJqA10wNdd+3Qqef9fy8z +AsXkSDxkcqe3LfTAlGvivZupN5II9LmTdt+lzE4CdpKlMO4ijEinvD2mPAtUBVa9 +uOoOVTkOs/Gmyfb2LxJM2knszLv7Xb8L9AkOuUIkJq/WZrTgbZBlvICuLdrH8wJ6 +icWxAgMBAAGgADANBgkqhkiG9w0BAQsFAAOCAQEApuP6zTVRGLa69IXIyGIqDzb6 +NjQxyTbB5SzbtgqvdcBs5EuntsFTmS11umKwzoqT0+Kf3JtwO8pu8rQbX3C/EWOP +/eWqFPnGTCRk0AE+m08XxiAgQrgOxiMj483ka6Qr3OdT7zjW6xUyE0ObD+auD+fx +9siygGy8P9X0x0PqpWQoZm17x3bUfERiIl+oI/BltuUmAfPgELtEIBjcz+Xrslgl +5iV8Rn/+srFwMT80QLt9iypt0Me8IkbKTWpDUVQYEaXA3svCvGuthzeukImmmAPZ +rpcXR6WvYVdb2HekgqZtgvDg4FDeLidK164uTeOlCC/CRLPKyJu9VJpTQamC6g== +-----END CERTIFICATE REQUEST----- diff --git a/st2tests/st2tests/fixtures/ssl_certs/server/private_key.pem b/st2tests/st2tests/fixtures/ssl_certs/server/private_key.pem new file mode 100644 index 0000000000..05924ec179 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/server/private_key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAuLLUdbHqOsUiRnkv2S0fiadrqwfdgaZgVImvMyorVYzoJ5W7 +anJSyWPnV/ly/rjL7toiPhBcVgDuCGkf7CjPN4E5tdxI9ylYk/UHEtMG1ll6kDiF +8hWfHDdktdqnQvuLkUMAA5xgIFfX+UMBuTZk7VowrjnOuljN5eVN89y2fYXXtqC1 +91HilG9VwLewYKQd/Ishb4p2WfxiBIVO+cQpnYB6quvrEYC1XPcRbJuXdrc7KcYn +dWdoj6M7aT1zOnHJrdLtv7F7dkYgV9vqwN7w3ud7uNaEbsHvWz0i+6qjX/uE755N +ZoJ8O8Dx5ug/1lxplnXlfmadIibYPBJatRsSiwIDAQABAoIBAQC0UxytYCvwfyFs +rsrxfWWqLsQm8oHoH/ky8E4WZRhz6SOL6ltVnRKIvzpSISCN4vxwUZZXBAAyk6vS +mFhraJiPd2JR1SWD8mEh63uhfFjTk/7eqeDUrxluIgL4rebZtd/YzhJIdDdBvKIH +Ic2f96RoO8MFhzj3pNY5mzwVWCrvtsEY4ygrblQrweqNbcaowJ/YQPPkgvXb6dC3 +IXjBL5IzOwTlnIYhFkuZY736Z8GOw9rcyGxITHAKavWOJkE72drh0gv5rBnu2NLz +Lgta6o+p6/DU1tjq2LRllq1HDL7uy5yGxBtB+uXly22Ur/rQzYBKeRHkj2OqZKlV +kNiyKBipAoGBAOMkqqTu9dd8xPCgu8iQWHlKVwL6gp4Ij/0PCpXL5v5cktyoAvd9 +fb22UGeFLbbdUuctO711oMfMXl8nULafT54WbnSCG2f+oiRacupJQ/QLPQ8nV8Gy +K9+H/rYZ+ggLNkNqjvM5xQZ6/AxZxWEv+qNJfPF0fG1iCWmYh0OrmfDdAoGBANAp +vma47lG3dnQfga88//SJCeuluwumjXvN8gQJvwU1ofaGjRdKxtexWBuZG6BPXnCv +yRm5tWYJnxj+zUF+ImMsd7sd/Iy1PW7gdZtMtjIW4Qmys0IKK3zkwGygayFrnyhg +WU0t63OEiKEJ7mQzvOAmnTG+H7fZ6WWm3gxi+WaHAoGAYDda9YynpMUcY1Wi1d2X +LKG54/AbvjegTrC9aiC6U4sBRukAgLeuuNruijtW1vw/rt9xS9r05U2DuEjeHs2z +GyMjXMT0OQQayM1rmiS43TqZfb7LpKgFf6WK1raAPEILlVkg/pS9Cfa0p8KrInUB +dYOeomUWg/sgQ5Ox0I9zIR0CgYAYxl8a6reykhtPBtDwgloUSJsdqMPyRwhfy8sa +H+7UN+Xm6WyxcPzpfvn1juty0P90efd9UFT+p/Z/ixPyz4hYNVqqso70UD3XjG9y +5FZq774o4VPkcEFsw+0DALS/bYerzovSW7zCKuv3/q6Yzm+UXgQnf3FW+GCG8K1M +3BrC0QKBgC6srVlHBF9FI1D/9yjjx3JIVmKKS7YleAl36t05zCfR46FDPPa7J4/+ +1UzBkEFkn0/Ven8bbkOKr9v7wBjxszCnvZPxDm9oGU8l8TjrZYiuwi0euF+4r61v +HYueOtTDjtOYSPXbQcypA0FjdeHPE5XY6O4I8ti9URyV+M80vijk +-----END RSA PRIVATE KEY----- diff --git a/st2tests/st2tests/fixtures/ssl_certs/server/req.pem b/st2tests/st2tests/fixtures/ssl_certs/server/req.pem new file mode 100644 index 0000000000..5135c2cc33 --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/server/req.pem @@ -0,0 +1,15 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICajCCAVICAQAwJTESMBAGA1UEAwwJbG9jYWxob3N0MQ8wDQYDVQQKDAZzZXJ2 +ZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC4stR1seo6xSJGeS/Z +LR+Jp2urB92BpmBUia8zKitVjOgnlbtqclLJY+dX+XL+uMvu2iI+EFxWAO4IaR/s +KM83gTm13Ej3KViT9QcS0wbWWXqQOIXyFZ8cN2S12qdC+4uRQwADnGAgV9f5QwG5 +NmTtWjCuOc66WM3l5U3z3LZ9hde2oLX3UeKUb1XAt7BgpB38iyFvinZZ/GIEhU75 +xCmdgHqq6+sRgLVc9xFsm5d2tzspxid1Z2iPoztpPXM6ccmt0u2/sXt2RiBX2+rA +3vDe53u41oRuwe9bPSL7qqNf+4Tvnk1mgnw7wPHm6D/WXGmWdeV+Zp0iJtg8Elq1 +GxKLAgMBAAGgADANBgkqhkiG9w0BAQsFAAOCAQEAmgj0lyN0I+pik9xQnmt7RhC1 +r+5ivX9ndnMmpeN8jI0RqUOEU3CewSsxKihiVpVHqUGJhHKJmsnEh/aiD2dPorK+ +I0NGWXGexk3TfHq/Ey1lwyZc1O9+vOYo/6k3zDhJZg0BekNkYciTsMFpI4h8cDr2 +yV3gzRdFPug2wwBPuKumiJuI6ZQU3G3FjgbUIOox91ZZctH1X3PRFmHjZKiHauwE +3FEzyoJUXPhP/HFGooZ6M81nm5VotozqUbj+pslLGjPdX2stduFfhZOriwH/mKll +7seOwR7GpqOhMDSCfs1gBAZkkyGX+z1hk+hccFJHSO0PLg+32Wtzu1kepBw4kA== +-----END CERTIFICATE REQUEST----- diff --git a/st2tests/st2tests/fixtures/ssl_certs/server/server_certificate.p12 b/st2tests/st2tests/fixtures/ssl_certs/server/server_certificate.p12 new file mode 100644 index 0000000000000000000000000000000000000000..7a937f220bfe751cc923045561cb8dff001359d0 GIT binary patch literal 2341 zcmV+=3EK8Bf(ao40Ru3C2KfMvR99{59fgthRg;fU zo`e$R|G;Sy_q?|>y}1-*an=wBxE~nC zO$-;n#p*M+(8ImKP&aGVPJ6PV^1~P@1N}$sIN(Lj3jG6H`Zs{QYN^JZBwGHvhr?y?V8 zwEC3c%A%+HbH|;{*J~zR#rp;Qr>+&LO6y_QbJ>BJg@-0`8_z&hC+4vqTn?5Ikov2_{i@*vDpK3g{Rb3VN@N zI&f^!R(#j;&KcEDD~JwE(_2@>|4x3Ean@{HICI~bt_G^-fYlkxA2s-ua(6_Gzq#5J zLS=pLuE2#~Wnq}sac}qQXgCk6o5@t*xEV;Xb8vxNt|*4*d^qT?eHdJ*NOeK%;@lT@ z(g(UY2)Hhcod2`12G7&35Hf;2F4w+2C%R%fbP%^8OC{((QE&Z*8>@{y7Y$>HIe*H98tLQd0*xL{kp{ZJc)3k^&LR_*i~ke{CBj}s&`w# z%XY6MAJ0$dE~X~>FoFd^1_>&LNQU4nQv^{r{eFipPhQ0-y3EHIPzd*Ze%k?{A?#tyE{ zEuRJYoBH#MKY|kby%;2sqbzBY9Mh5-Wa^+^dVIzJDt`?b!5(PiEQvPOQMNJc(v;%x zz|WoIpnN$*VJBYzeUvn2WGzIQacMkXHx#{-rS`4l| zf)jb%I`I$u1m>~vbZL|sD-GnPY)DD)6A2Cp!L>2y*HbDh*woI)8`*r8(TJjY*RGfT zhNWUH=)E9Q{ErbOo|pjDR%sF@^mw8Fl)kzeFYl=7ydV0Iygjpa7y&MxYgkK>j6I}H zw6u^^Z#cK2zu%j=51G`G_|#TOd%u`JdLe6RtR<K=>oQaZ96HAxRL=YU;SkQ#7a&-c=skCCA-;q59L;NIL+NP|grmjJyuYqZe;$4<@ z;5WF8V2aGn9lLW(9E6#bLkH+g3v4+E9Llc6Yfpv|33d}LrRtanYBL+udK2mvhWm!X z%jW;MpckP@t&wz>o9ve1We@G~>5`-@;W#5|cU3b$!KHL~w4)s#Nojy%ln_*g@rq}G zv9P#xg|-CC-H9dEmgnJ{~MyHjTU!7vV>(V@woN`>Un_OthEJR*MS+2 z^cx$)bAE9M_%-q9F?<)CI70_E&Ay4>%L)#u>cDR7)-F{^#E>cHlWKiy)A%3@$XXCR z*N(J6F)mQ^a@%F=wQWukm0+AT$;u~O(?Eito}IRW-@CUJAQc(W7)~mL52;24UAv+> z->nO27n}V5>O;FEo#Vv0{^f%F7cBA{cY!H|Ku~~IRhoxY9U19Xj&1`PZ1Shsc`iWy zv%reGv*H0I)%#ie2=;LtJ6U^rd(pT~Yq-9oz5(8jT6&@&O(|TVx%9`8g}UM_>Zb}H z@wEQCAfIEHjWrsB?rghG>+2B#F5{Gskp}#Ia9~QInB~FbCtgj!e_gr}gOdrYiS4q9 zP5)X{Ez4v(Tg9Gxr$RaClT?MX_B9LaOk{f@`W*SdxwYi6hFB>BQ8xw}GEBe>$RJa} z;quO0<7>`9IdWoq4N6L3`4^E{u?Hfj!&`}GYuwRX&LJC80USfQ0!^XAk83IP z3p}suUt5rA3$1k*R}T0LfSf3SO12wJ!)+)N!J=pQvc)N$6n+0a8$gBA$5~E`9L=7J z`S^^j#$Q?@p{HVTECtlj%2xuaB$us2b0xeI!96;2{|2)G#JK_&Gp9 zZvoK-_-T5lRSa literal 0 HcmV?d00001 diff --git a/st2tests/st2tests/fixtures/ssl_certs/server/server_certificate.pem b/st2tests/st2tests/fixtures/ssl_certs/server/server_certificate.pem new file mode 100644 index 0000000000..17c4490f8b --- /dev/null +++ b/st2tests/st2tests/fixtures/ssl_certs/server/server_certificate.pem @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE----- +MIIC4jCCAcqgAwIBAgIBATANBgkqhkiG9w0BAQsFADATMREwDwYDVQQDDAhNeVRl +c3RDQTAeFw0xOTAyMTIxNTU4MDdaFw0yNDAyMTExNTU4MDdaMCUxEjAQBgNVBAMM +CWxvY2FsaG9zdDEPMA0GA1UECgwGc2VydmVyMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAuLLUdbHqOsUiRnkv2S0fiadrqwfdgaZgVImvMyorVYzoJ5W7 +anJSyWPnV/ly/rjL7toiPhBcVgDuCGkf7CjPN4E5tdxI9ylYk/UHEtMG1ll6kDiF +8hWfHDdktdqnQvuLkUMAA5xgIFfX+UMBuTZk7VowrjnOuljN5eVN89y2fYXXtqC1 +91HilG9VwLewYKQd/Ishb4p2WfxiBIVO+cQpnYB6quvrEYC1XPcRbJuXdrc7KcYn +dWdoj6M7aT1zOnHJrdLtv7F7dkYgV9vqwN7w3ud7uNaEbsHvWz0i+6qjX/uE755N +ZoJ8O8Dx5ug/1lxplnXlfmadIibYPBJatRsSiwIDAQABoy8wLTAJBgNVHRMEAjAA +MAsGA1UdDwQEAwIFoDATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0BAQsF +AAOCAQEAnhmIUhZwweCqdzGNeoNXXkuXyBf2fFvajHlG2a2pZ8r6/fyQbbJgzo04 +ajjWoUoSW+XB+AfJvT6CTZuMWsGkxYvFAxOoXtLpW0OKqEh55q8diMSb/gOxxwND +vHVb1+VjZBhzxxt0TbXeFngMnBSgVhipKQe49pe0H+rDDYptultl81n2zFLzBKUe +h927CnTJ7cpZe4Di2tMJfVsDJB6piuwPu6GnWhT38Q12I+ryL2xbihIw1B4qDtq6 +nq4lYGnpJCNNXg5JR5S1HeYiQtP0sHgU6SvpgMtzDdbCJ0Nu7EpR5J3ChdQWooGf +uTOThX41qx1p47ho4TA9Ac4K/GRcLg== +-----END CERTIFICATE----- From fe237c5ccf9247194ccceb5d0ad9bcea4a811546 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 17:10:50 +0100 Subject: [PATCH 068/105] Update Travis config so we configure RabbitMQ so it exposes SSL / TLS listener on port 5671. Default non SSL listener is exposed on port 5672. --- .travis.yml | 16 ++++++++++------ scripts/travis/rabbitmq.config | 10 ++++++++++ 2 files changed, 20 insertions(+), 6 deletions(-) create mode 100644 scripts/travis/rabbitmq.config diff --git a/.travis.yml b/.travis.yml index 1b5615860c..9ef55b3797 100644 --- a/.travis.yml +++ b/.travis.yml @@ -93,22 +93,26 @@ install: # Let's enable rabbitmqadmin # See https://github.com/messagebus/lapine/wiki/Testing-on-Travis. before_script: - # key_url no longer works for APT addon # Use a custom mongod.conf which uses various speed optimizations - sudo cp scripts/travis/mongod.conf /etc/mongod.conf # Clean up any old MongoDB 3.4 data files laying around and make sure mongodb user can write to it - sudo rm -rf /var/lib/mongodb ; sudo mkdir /var/lib/mongodb ; sudo chown -R mongodb:mongodb /var/lib/mongodb - sudo service mongod restart ; sleep 5 - sudo service mongod status - - tail -30 /var/log/mongodb/mongod.log - - mongod --version - - git --version - - pip --version - - virtualenv --version + - sudo tail -n 30 /var/log/mongodb/mongod.log + # Use custom RabbitMQ config which enables SSL / TLS listener on port 5671 with test certs + - sudo cp scripts/travis/rabbitmq.config /etc/rabbitmq/rabbitmq.config + # Install rabbitmq_management RabbitMQ plugin - sudo rabbitmq-plugins enable rabbitmq_management - sudo wget http://guest:guest@localhost:15672/cli/rabbitmqadmin -O /usr/local/bin/rabbitmqadmin - sudo chmod +x /usr/local/bin/rabbitmqadmin - sudo service rabbitmq-server restart + - sudo tail -n 30 /var/log/rabbitmq/* + # Print various binary versions + - mongod --version + - git --version + - pip --version + - virtualenv --version # Print out various environment variables info - make play diff --git a/scripts/travis/rabbitmq.config b/scripts/travis/rabbitmq.config new file mode 100644 index 0000000000..d23be26ec5 --- /dev/null +++ b/scripts/travis/rabbitmq.config @@ -0,0 +1,10 @@ +[ + {rabbit, [ + {ssl_listeners, [5671]}, + {ssl_options, [{cacertfile, "/home/travis/build/StackStorm/st2/st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.pem"}, + {certfile, "/home/travis/build/StackStorm/st2/st2tests/st2tests/fixtures/ssl_certs/server/server_certificate.pem"}, + {keyfile, "/home/travis/build/StackStorm/st2/st2tests/st2tests/fixtures/ssl_certs/server/private_key.pem"}, + {verify, verify_peer}, + {fail_if_no_peer_cert, false}]} + ]} +]. From fd0c7b132303c10c0a6e3cc9160b4768db49d622 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 18:05:53 +0100 Subject: [PATCH 069/105] Add a workaround - for now we are stuck on Ubuntu Precise and ancient RabbitMQ on Travis tests. --- scripts/travis/rabbitmq.config | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/travis/rabbitmq.config b/scripts/travis/rabbitmq.config index d23be26ec5..0cf25a732a 100644 --- a/scripts/travis/rabbitmq.config +++ b/scripts/travis/rabbitmq.config @@ -1,6 +1,7 @@ [ {rabbit, [ {ssl_listeners, [5671]}, + {ssl_allow_poodle_attack, true}, {ssl_options, [{cacertfile, "/home/travis/build/StackStorm/st2/st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.pem"}, {certfile, "/home/travis/build/StackStorm/st2/st2tests/st2tests/fixtures/ssl_certs/server/server_certificate.pem"}, {keyfile, "/home/travis/build/StackStorm/st2/st2tests/st2tests/fixtures/ssl_certs/server/private_key.pem"}, From cd88c9f29794c96082de4f030815dacbf23b6ae0 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 19:19:43 +0100 Subject: [PATCH 070/105] Update tests config. --- st2tests/st2tests/config.py | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/st2tests/st2tests/config.py b/st2tests/st2tests/config.py index 470f7adb9d..1b50f2aa68 100644 --- a/st2tests/st2tests/config.py +++ b/st2tests/st2tests/config.py @@ -150,7 +150,34 @@ def _register_api_opts(): help='URL of the messaging server.'), cfg.ListOpt( 'cluster_urls', default=[], - help='URL of all the nodes in a messaging service cluster.') + help='URL of all the nodes in a messaging service cluster.'), + cfg.IntOpt( + 'connection_retries', default=10, + help='How many times should we retry connection before failing.'), + cfg.IntOpt( + 'connection_retry_wait', default=10000, + help='How long should we wait between connection retries.'), + cfg.BoolOpt( + 'ssl', default=False, + help='Use SSL / TLS to connect to the messaging server. Same as ' + 'appending "?ssl=true" at the end of the connection URL string.'), + cfg.StrOpt( + 'ssl_keyfile', default=None, + help='Private keyfile used to identify the local connection against RabbitMQ.'), + cfg.StrOpt( + 'ssl_certfile', default=None, + help='Certificate file used to identify the local connection (client).'), + cfg.StrOpt( + 'ssl_cert_reqs', default=None, choices='none, optional, required', + help='Specifies whether a certificate is required from the other side of the ' + 'connection, and whether it will be validated if provided.'), + cfg.StrOpt( + 'ssl_ca_certs', default=None, + help='ca_certs file contains a set of concatenated CA certificates, which are ' + 'used to validate certificates passed from RabbitMQ.'), + cfg.StrOpt( + 'login_method', default=None, + help='Login method to use (AMQPLAIN, PLAIN, EXTERNAL, etc.).') ] _register_opts(messaging_opts, group='messaging') From ed4fe114ef36cc62e2d7f8e12741f001e8023e22 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 19:22:47 +0100 Subject: [PATCH 071/105] Add tests for RabbitMQ connection SSL related parameters and RabbitMQ SSL listener. --- .../integration/test_rabbitmq_ssl_listener.py | 185 ++++++++++++++++++ 1 file changed, 185 insertions(+) create mode 100644 st2common/tests/integration/test_rabbitmq_ssl_listener.py diff --git a/st2common/tests/integration/test_rabbitmq_ssl_listener.py b/st2common/tests/integration/test_rabbitmq_ssl_listener.py new file mode 100644 index 0000000000..812e26473e --- /dev/null +++ b/st2common/tests/integration/test_rabbitmq_ssl_listener.py @@ -0,0 +1,185 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import os +import ssl +import socket + +import unittest2 +from oslo_config import cfg + +from st2common.transport import utils as transport_utils + +from st2tests.fixturesloader import get_fixtures_base_path + +__all__ = [ + 'RabbitMQTLSListenerTestCase' +] + +CERTS_FIXTURES_PATH = os.path.join(get_fixtures_base_path(), 'ssl_certs/') +ON_TRAVIS = (os.environ.get('TRAVIS', 'false').lower() == 'true') + +NON_SSL_LISTENER_PORT = 5672 +SSL_LISTENER_PORT = 5671 + + +# NOTE: We only run those tests on Travis because at the moment, local vagrant dev VM doesn't +# expose RabbitMQ SSL listener by default +@unittest2.skipIf(not ON_TRAVIS, 'Skipping tests because not running on Travis') +class RabbitMQTLSListenerTestCase(unittest2.TestCase): + + def setUp(self): + # Set default values + cfg.CONF.set_override(name='ssl', override=False, group='messaging') + cfg.CONF.set_override(name='ssl_keyfile', override=None, group='messaging') + cfg.CONF.set_override(name='ssl_certfile', override=None, group='messaging') + cfg.CONF.set_override(name='ssl_ca_certs', override=None, group='messaging') + cfg.CONF.set_override(name='ssl_cert_reqs', override=None, group='messaging') + + def test_non_ssl_connection_on_ssl_listener_port_failure(self): + connection = transport_utils.get_connection(urls='amqp://guest:guest@127.0.0.1:5671/') + + expected_msg_1 = '[Errno 104] Connection reset by peer' + expected_msg_2 = 'Socket closed' + + try: + connection.connect() + except Exception as e: + self.assertFalse(connection.connected) + self.assertTrue(isinstance(e, (IOError, socket.error))) + self.assertTrue(expected_msg_1 in str(e) or expected_msg_2 in str(e)) + else: + self.fail('Exception was not thrown') + + if connection: + connection.release() + + def test_ssl_connection_on_ssl_listener_success(self): + # Using query param notation + urls = 'amqp://guest:guest@127.0.0.1:5671/?ssl=true' + connection = transport_utils.get_connection(urls=urls) + + try: + self.assertTrue(connection.connect()) + self.assertTrue(connection.connected) + finally: + if connection: + connection.release() + + # Using messaging.ssl config option + cfg.CONF.set_override(name='ssl', override=True, group='messaging') + + connection = transport_utils.get_connection(urls='amqp://guest:guest@127.0.0.1:5671/') + + try: + self.assertTrue(connection.connect()) + self.assertTrue(connection.connected) + finally: + if connection: + connection.release() + + def test_ssl_connection_ca_certs_provided(self): + ca_cert_path = os.path.join(CERTS_FIXTURES_PATH, 'ca/ca_certificate_bundle.pem') + + cfg.CONF.set_override(name='ssl', override=True, group='messaging') + cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') + + # 1. Validate server cert against a valid CA bundle (success) - cert required + cfg.CONF.set_override(name='ssl_cert_reqs', override='required', group='messaging') + + connection = transport_utils.get_connection(urls='amqp://guest:guest@127.0.0.1:5671/') + + try: + self.assertTrue(connection.connect()) + self.assertTrue(connection.connected) + finally: + if connection: + connection.release() + + # 2. Validate server cert against other CA bundle (failure) + # CA bundle which was not used to sign the server cert + ca_cert_path = os.path.join('/etc/ssl/certs/thawte_Primary_Root_CA.pem') + + cfg.CONF.set_override(name='ssl_cert_reqs', override='required', group='messaging') + cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') + + connection = transport_utils.get_connection(urls='amqp://guest:guest@127.0.0.1:5671/') + + expected_msg = r'\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed' + self.assertRaisesRegexp(ssl.SSLError, expected_msg, connection.connect) + + # 3. Validate server cert against other CA bundle (failure) + ca_cert_path = os.path.join('/etc/ssl/certs/thawte_Primary_Root_CA.pem') + + cfg.CONF.set_override(name='ssl_cert_reqs', override='optional', group='messaging') + cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') + + connection = transport_utils.get_connection(urls='amqp://guest:guest@127.0.0.1:5671/') + + expected_msg = r'\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed' + self.assertRaisesRegexp(ssl.SSLError, expected_msg, connection.connect) + + # 4. Validate server cert against other CA bundle (failure) + # We use invalid bundle but cert_reqs is none + ca_cert_path = os.path.join('/etc/ssl/certs/thawte_Primary_Root_CA.pem') + + cfg.CONF.set_override(name='ssl_cert_reqs', override='none', group='messaging') + cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') + + connection = transport_utils.get_connection(urls='amqp://guest:guest@127.0.0.1:5671/') + + try: + self.assertTrue(connection.connect()) + self.assertTrue(connection.connected) + finally: + if connection: + connection.release() + + def test_ssl_connect_client_side_cert_authentication(self): + # 1. Success, valid client side cert provided + ssl_keyfile = os.path.join(CERTS_FIXTURES_PATH, 'client/private_key.pem') + ssl_certfile = os.path.join(CERTS_FIXTURES_PATH, 'client/client_certificate.pem') + ca_cert_path = os.path.join(CERTS_FIXTURES_PATH, 'ca/ca_certificate_bundle.pem') + + cfg.CONF.set_override(name='ssl_keyfile', override=ssl_keyfile, group='messaging') + cfg.CONF.set_override(name='ssl_certfile', override=ssl_certfile, group='messaging') + cfg.CONF.set_override(name='ssl_cert_reqs', override='required', group='messaging') + cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') + + connection = transport_utils.get_connection(urls='amqp://guest:guest@127.0.0.1:5671/') + + try: + self.assertTrue(connection.connect()) + self.assertTrue(connection.connected) + finally: + if connection: + connection.release() + + # 2. Invalid client side cert provided - failure + ssl_keyfile = os.path.join(CERTS_FIXTURES_PATH, 'client/private_key.pem') + ssl_certfile = os.path.join(CERTS_FIXTURES_PATH, 'server/server_certificate.pem') + ca_cert_path = os.path.join(CERTS_FIXTURES_PATH, 'ca/ca_certificate_bundle.pem') + + cfg.CONF.set_override(name='ssl_keyfile', override=ssl_keyfile, group='messaging') + cfg.CONF.set_override(name='ssl_certfile', override=ssl_certfile, group='messaging') + cfg.CONF.set_override(name='ssl_cert_reqs', override='required', group='messaging') + cfg.CONF.set_override(name='ssl_ca_certs', override=ca_cert_path, group='messaging') + + connection = transport_utils.get_connection(urls='amqp://guest:guest@127.0.0.1:5671/') + + expected_msg = r'\[X509: KEY_VALUES_MISMATCH\] key values mismatch' + self.assertRaisesRegexp(ssl.SSLError, expected_msg, connection.connect) From 7e471f5e87a7357cd623c2bac978308364228d11 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 19:25:23 +0100 Subject: [PATCH 072/105] Add commented out RabbitMQ ssl related config options which users can uncomment to test this functionality. --- conf/st2.dev.conf | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/conf/st2.dev.conf b/conf/st2.dev.conf index c266534710..52f9ac61dc 100644 --- a/conf/st2.dev.conf +++ b/conf/st2.dev.conf @@ -91,7 +91,14 @@ ssh_key_file = /home/vagrant/.ssh/stanley_rsa [messaging] url = amqp://guest:guest@127.0.0.1:5672/ -#url = redis://localhost:6379/0 +# Uncomment to test SSL options +#url = amqp://guest:guest@127.0.0.1:5671/ +#ssl = True +#ssl_keyfile = /data/stanley/st2tests/st2tests/fixtures/ssl_certs/client/private_key.pem +#ssl_certfile = /data/stanley/st2tests/st2tests/fixtures/ssl_certs/client/client_certificate.pem +#ssl_ca_certs = /data/stanley/st2tests/st2tests/fixtures/ssl_certs/ca/ca_certificate_bundle.pem +#ssl_cert_reqs = required +#ssl_cert_reqs = required [ssh_runner] remote_dir = /tmp From 42e77e03954141a93dfa400a71c664b5c934c4e8 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 19:51:06 +0100 Subject: [PATCH 073/105] Try installing latest version of erlang and rabbitmq-server package. From d8271f27748ec0b8b2839dbfd596412daec28db5 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 20:02:01 +0100 Subject: [PATCH 074/105] Revert the change, those versions won't work on Precise. From 1450d7135492f616ad871ad4e607c625a158a553 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 20:14:44 +0100 Subject: [PATCH 075/105] Temporary disable tests since they don't work on Travis due to us using ancient Precise version. --- st2common/tests/integration/test_rabbitmq_ssl_listener.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/st2common/tests/integration/test_rabbitmq_ssl_listener.py b/st2common/tests/integration/test_rabbitmq_ssl_listener.py index 812e26473e..eaa000449b 100644 --- a/st2common/tests/integration/test_rabbitmq_ssl_listener.py +++ b/st2common/tests/integration/test_rabbitmq_ssl_listener.py @@ -39,7 +39,10 @@ # NOTE: We only run those tests on Travis because at the moment, local vagrant dev VM doesn't # expose RabbitMQ SSL listener by default -@unittest2.skipIf(not ON_TRAVIS, 'Skipping tests because not running on Travis') +# TODO: Re-enable once we upgrade Travis from Precise to Xenial where latest version of RabbitMQ +# and OpenSSL is available +@unittest2.skip('Skipping until we upgrade to Xenial on Travis') +# @unittest2.skipIf(not ON_TRAVIS, 'Skipping tests because not running on Travis') class RabbitMQTLSListenerTestCase(unittest2.TestCase): def setUp(self): From ec3a8b444a3fe57c4f4002d7c8ef819a79a64e94 Mon Sep 17 00:00:00 2001 From: armab Date: Fri, 8 Feb 2019 20:38:04 +0100 Subject: [PATCH 076/105] Fix API 'POST /api/v1/apikeys' wasn't creating new record with the provided ID At a high level fixes `st2api key load` that wasn't idempotent and failing to import same file twice with: ``` HTTPError: 409 Client Error: Conflict MESSAGE: Tried to save duplicate unique keys (E11000 duplicate key error collection: st2.api_key_d_b index: key_hash_1 dup key: { : "903b7f0761094265969761443f4f25fc87c780b6248c38ce8dbaadfa52a2b53fd72381ac141072cb53ae78ff02e16fdfc0e40001472b9f25585a7b2864c501db" }) for url: http://127.0.0.1:9101/v1/apikeys ``` because it couldn't import records with the requested ID. --- st2common/st2common/models/api/auth.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/st2common/st2common/models/api/auth.py b/st2common/st2common/models/api/auth.py index e1f9e39106..f51841e63d 100644 --- a/st2common/st2common/models/api/auth.py +++ b/st2common/st2common/models/api/auth.py @@ -143,11 +143,13 @@ def from_model(cls, model, mask_secrets=False): @classmethod def to_model(cls, instance): + # If PrimaryKey ID is provided, - we want to work with existing ST2 API key + id = getattr(instance, 'id', None) user = str(instance.user) if instance.user else None key_hash = getattr(instance, 'key_hash', None) metadata = getattr(instance, 'metadata', {}) enabled = bool(getattr(instance, 'enabled', True)) - model = cls.model(user=user, key_hash=key_hash, metadata=metadata, enabled=enabled) + model = cls.model(id=id, user=user, key_hash=key_hash, metadata=metadata, enabled=enabled) return model From e56d0ac1076310befbc6f620f20c9db065ee2d35 Mon Sep 17 00:00:00 2001 From: armab Date: Tue, 12 Feb 2019 13:29:21 +0100 Subject: [PATCH 077/105] Add ID's to apikey fixtures --- st2tests/st2tests/fixtures/generic/apikeys/apikey1.yaml | 1 + st2tests/st2tests/fixtures/generic/apikeys/apikey2.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/st2tests/st2tests/fixtures/generic/apikeys/apikey1.yaml b/st2tests/st2tests/fixtures/generic/apikeys/apikey1.yaml index 49cc94e8a4..ffdaccad18 100644 --- a/st2tests/st2tests/fixtures/generic/apikeys/apikey1.yaml +++ b/st2tests/st2tests/fixtures/generic/apikeys/apikey1.yaml @@ -1,4 +1,5 @@ --- +id: 58e3f3330c0517062a3fda43 user: bill key_hash: "ec81d4a56f5987b0ae1cff6e152459986e873d6604637fc70d85c0a0daf131b0a830ccd5b6454cc0c95c0ba6e6655933c993325eb3a28bc43af6c1d801a7c1e8" # 1234 metadata: diff --git a/st2tests/st2tests/fixtures/generic/apikeys/apikey2.yaml b/st2tests/st2tests/fixtures/generic/apikeys/apikey2.yaml index fa2755052a..1fed0f928c 100644 --- a/st2tests/st2tests/fixtures/generic/apikeys/apikey2.yaml +++ b/st2tests/st2tests/fixtures/generic/apikeys/apikey2.yaml @@ -1,4 +1,5 @@ --- +id: 5c5ddd776cb8de530e0a1391 user: dilbert key_hash: "17f858ea0bb108feaa91b8eee524c7382e0218ff541783d45996a1149d50dfde4bc19f2e6a591028a2ea08de4211893b246d4eda61dd3c9cf294a2405184ac4b" # 5678 metadata: From 4d8f8c92bf3d7f9c16c580607c8326ce9ddc387f Mon Sep 17 00:00:00 2001 From: armab Date: Tue, 12 Feb 2019 13:37:18 +0100 Subject: [PATCH 078/105] Add 'ID' for POST API creation test case --- st2api/tests/unit/controllers/v1/test_auth_api_keys.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/st2api/tests/unit/controllers/v1/test_auth_api_keys.py b/st2api/tests/unit/controllers/v1/test_auth_api_keys.py index 3a32dc03c1..069051e4cd 100644 --- a/st2api/tests/unit/controllers/v1/test_auth_api_keys.py +++ b/st2api/tests/unit/controllers/v1/test_auth_api_keys.py @@ -197,6 +197,7 @@ def test_post_delete_key(self): def test_post_delete_same_key_hash(self): api_key = { + 'id': '5c5dbb576cb8de06a2d79a4d', 'user': 'herge', 'key_hash': 'ABCDE' } @@ -207,6 +208,7 @@ def test_post_delete_same_key_hash(self): # drop into the DB since API will be masking this value. api_key_db = ApiKey.get_by_id(resp1.json['id']) + self.assertEqual(resp1.json['id'], api_key['id'], 'PK ID of created API should match.') self.assertEqual(api_key_db.key_hash, api_key['key_hash'], 'Key_hash should match.') self.assertEqual(api_key_db.user, api_key['user'], 'Key_hash should match.') From 26c4e922462ffbd4a41fd331c514547d16e09777 Mon Sep 17 00:00:00 2001 From: armab Date: Tue, 12 Feb 2019 13:38:15 +0100 Subject: [PATCH 079/105] Fix a typo in older tests --- st2api/tests/unit/controllers/v1/test_auth_api_keys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/st2api/tests/unit/controllers/v1/test_auth_api_keys.py b/st2api/tests/unit/controllers/v1/test_auth_api_keys.py index 069051e4cd..3a6f8f6e02 100644 --- a/st2api/tests/unit/controllers/v1/test_auth_api_keys.py +++ b/st2api/tests/unit/controllers/v1/test_auth_api_keys.py @@ -210,7 +210,7 @@ def test_post_delete_same_key_hash(self): self.assertEqual(resp1.json['id'], api_key['id'], 'PK ID of created API should match.') self.assertEqual(api_key_db.key_hash, api_key['key_hash'], 'Key_hash should match.') - self.assertEqual(api_key_db.user, api_key['user'], 'Key_hash should match.') + self.assertEqual(api_key_db.user, api_key['user'], 'User should match.') resp = self.app.delete('/v1/apikeys/%s' % resp1.json['id']) self.assertEqual(resp.status_int, 204) From 905f8788f0d4340ae6aee6f708cf1daadd045b81 Mon Sep 17 00:00:00 2001 From: W Chan Date: Mon, 11 Feb 2019 22:50:26 +0000 Subject: [PATCH 080/105] Refactor scheduler process to exit properly Add retries in the scheduler handler to temporarily handle DB connection failures. Refactor how threads exit for the process to return proper code. --- st2actions/st2actions/cmd/scheduler.py | 11 +++++- st2actions/st2actions/scheduler/config.py | 7 +++- st2actions/st2actions/scheduler/handler.py | 45 +++++++++++++++++----- st2common/st2common/util/service.py | 36 +++++++++++++++++ 4 files changed, 88 insertions(+), 11 deletions(-) create mode 100644 st2common/st2common/util/service.py diff --git a/st2actions/st2actions/cmd/scheduler.py b/st2actions/st2actions/cmd/scheduler.py index 1ae0096084..8cc48f8cee 100644 --- a/st2actions/st2actions/cmd/scheduler.py +++ b/st2actions/st2actions/cmd/scheduler.py @@ -51,7 +51,9 @@ def _run_queuer(): try: handler.start() entrypoint.start() - entrypoint.wait() + + # Wait on handler first since entrypoint is more durable. + handler.wait() or entrypoint.wait() except (KeyboardInterrupt, SystemExit): LOG.info('(PID=%s) Scheduler stopped.', os.getpid()) @@ -68,6 +70,13 @@ def _run_queuer(): return 1 except: LOG.exception('(PID=%s) Scheduler unexpectedly stopped.', os.getpid()) + + try: + handler.shutdown() + entrypoint.shutdown() + except: + pass + return 1 return 0 diff --git a/st2actions/st2actions/scheduler/config.py b/st2actions/st2actions/scheduler/config.py index ed7d7477be..552042bc0a 100644 --- a/st2actions/st2actions/scheduler/config.py +++ b/st2actions/st2actions/scheduler/config.py @@ -56,7 +56,12 @@ def _register_service_opts(): 'gc_interval', default=10, help='How often (in seconds) to look for zombie execution requests before rescheduling ' 'them.'), - + cfg.IntOpt( + 'retry_max_attempt', default=10, + help='The maximum number of attempts that the scheduler retries on error.'), + cfg.IntOpt( + 'retry_wait_msec', default=3000, + help='The number of milliseconds to wait in between retries.') ] cfg.CONF.register_opts(scheduler_opts, group='scheduler') diff --git a/st2actions/st2actions/scheduler/handler.py b/st2actions/st2actions/scheduler/handler.py index e93adf6ec9..74027b88ed 100644 --- a/st2actions/st2actions/scheduler/handler.py +++ b/st2actions/st2actions/scheduler/handler.py @@ -16,10 +16,12 @@ from __future__ import absolute_import import eventlet +import retrying from oslo_config import cfg from st2common import log as logging from st2common.util import date +from st2common.util import service as service_utils from st2common.constants import action as action_constants from st2common.constants import policy as policy_constants from st2common.exceptions.db import StackStormDBObjectNotFoundError @@ -60,25 +62,37 @@ def __init__(self): self._shutdown = False self._pool = eventlet.GreenPool(size=cfg.CONF.scheduler.pool_size) self._coordinator = coordination_service.get_coordinator() + self._main_thread = None + self._cleanup_thread = None def run(self): - LOG.debug('Entering scheduler loop') + LOG.debug('Starting scheduler handler...') while not self._shutdown: eventlet.greenthread.sleep(cfg.CONF.scheduler.sleep_interval) + self.process() - execution_queue_item_db = self._get_next_execution() + @retrying.retry( + retry_on_exception=service_utils.retry_on_exceptions, + stop_max_attempt_number=cfg.CONF.scheduler.retry_max_attempt, + wait_fixed=cfg.CONF.scheduler.retry_wait_msec) + def process(self): + execution_queue_item_db = self._get_next_execution() - if execution_queue_item_db: - self._pool.spawn(self._handle_execution, execution_queue_item_db) + if execution_queue_item_db: + self._pool.spawn(self._handle_execution, execution_queue_item_db) def cleanup(self): - LOG.debug('Starting scheduler garbage collection') + LOG.debug('Starting scheduler garbage collection...') while not self._shutdown: eventlet.greenthread.sleep(cfg.CONF.scheduler.gc_interval) self._handle_garbage_collection() + @retrying.retry( + retry_on_exception=service_utils.retry_on_exceptions, + stop_max_attempt_number=cfg.CONF.scheduler.retry_max_attempt, + wait_fixed=cfg.CONF.scheduler.retry_wait_msec) def _handle_garbage_collection(self): """ Periodically look for executions which have "handling" set to "True" and haven't been @@ -328,11 +342,24 @@ def _update_to_scheduled(liveaction_db, execution_queue_item_db): def start(self): self._shutdown = False - eventlet.spawn(self.run) - eventlet.spawn(self.cleanup) + # Spawn the worker threads. + self._main_thread = eventlet.spawn(self.run) + self._cleanup_thread = eventlet.spawn(self.cleanup) - def shutdown(self): - self._shutdown = True + # Link the threads to the shutdown function. If either of the threads exited with error, + # then initiate shutdown which will allow the waits below to throw exception to the + # main process. + self._main_thread.link(self.shutdown) + self._cleanup_thread.link(self.shutdown) + + def shutdown(self, *args, **kwargs): + if not self._shutdown: + self._shutdown = True + + def wait(self): + # Wait for the worker threads to complete. If there is an exception thrown in the thread, + # then the exception will be propagated to the main process for a proper return code. + self._main_thread.wait() or self._cleanup_thread.wait() def get_handler(): diff --git a/st2common/st2common/util/service.py b/st2common/st2common/util/service.py new file mode 100644 index 0000000000..9e9a7df92d --- /dev/null +++ b/st2common/st2common/util/service.py @@ -0,0 +1,36 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import pymongo + +from st2common import log as logging + + +LOG = logging.getLogger(__name__) + + +def retry_on_exceptions(exc): + LOG.warning('Evaluating retry on exception %s. %s', type(exc), str(exc)) + + is_mongo_connection_error = isinstance(exc, pymongo.errors.ConnectionFailure) + + retrying = is_mongo_connection_error + + if retrying: + LOG.warning('Retrying on exception %s.', type(exc)) + + return retrying From 86ae0078a2522ea6ac5f671edd35f2ef4e2207bc Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 10:20:19 +0100 Subject: [PATCH 081/105] Add missing license header. --- st2actions/st2actions/cmd/scheduler.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/st2actions/st2actions/cmd/scheduler.py b/st2actions/st2actions/cmd/scheduler.py index 8cc48f8cee..fad2aa22b4 100644 --- a/st2actions/st2actions/cmd/scheduler.py +++ b/st2actions/st2actions/cmd/scheduler.py @@ -1,6 +1,21 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # Monkey patching should be done as early as possible. # See http://eventlet.net/doc/patching.html#monkeypatching-the-standard-library from __future__ import absolute_import + from st2common.util.monkey_patch import monkey_patch monkey_patch() From 059d3e1ecc7644f2022ba125fb9c996cbdc16355 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 10:26:16 +0100 Subject: [PATCH 082/105] Fix function name. --- st2actions/st2actions/cmd/scheduler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/st2actions/st2actions/cmd/scheduler.py b/st2actions/st2actions/cmd/scheduler.py index fad2aa22b4..9e548d6ebb 100644 --- a/st2actions/st2actions/cmd/scheduler.py +++ b/st2actions/st2actions/cmd/scheduler.py @@ -51,7 +51,7 @@ def _setup(): _setup_sigterm_handler() -def _run_queuer(): +def _run_scheduler(): LOG.info('(PID=%s) Scheduler started.', os.getpid()) # Lazy load these so that decorator metrics are in place @@ -104,7 +104,7 @@ def _teardown(): def main(): try: _setup() - return _run_queuer() + return _run_scheduler() except SystemExit as exit_code: sys.exit(exit_code) except: From ef9f3fe64e2af1cd8caec7127b5b0639268d1e53 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 10:57:15 +0100 Subject: [PATCH 083/105] Add a test case for scheduler correctly exiting on handler and entrypoint exceptions. NOTE: Tests currently fail because issue hasn't been fully fixed yet. --- .../tests/unit/test_scheduler_entrypoint.py | 119 ++++++++++++++++++ 1 file changed, 119 insertions(+) create mode 100644 st2actions/tests/unit/test_scheduler_entrypoint.py diff --git a/st2actions/tests/unit/test_scheduler_entrypoint.py b/st2actions/tests/unit/test_scheduler_entrypoint.py new file mode 100644 index 0000000000..300e37c224 --- /dev/null +++ b/st2actions/tests/unit/test_scheduler_entrypoint.py @@ -0,0 +1,119 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import eventlet +import mock + +from st2actions.cmd.scheduler import _run_scheduler +from st2actions.scheduler.handler import ActionExecutionSchedulingQueueHandler +from st2actions.scheduler.entrypoint import SchedulerEntrypoint + +from st2tests.base import CleanDbTestCase + +__all__ = [ + 'SchedulerServiceEntryPointTestCase' +] + +def mock_handler_run(self): + # NOTE: We use eventlet.sleep to emulate async nature of this process + eventlet.sleep(0.2) + raise Exception('handler run exception') + + +def mock_handler_start_wait(self): + """ + This method emulates exception being throw in async nature in cls.process() + method. + """ + # NOTE: We use eventlet.sleep to emulate async nature of this process + eventlet.sleep(0.2) + + # Mock call to process() to emulate .wait() and not .start() throwing + eventlet.spawn(self.process, mock.Mock()) + + +def mock_handler_process(self, request): + # NOTE: We use eventlet.sleep to emulate async nature of this process + eventlet.sleep(0.2) + raise Exception('handler process exception') + + +def mock_entrypoint_start(self): + # NOTE: We use eventlet.sleep to emulate async nature of this process + eventlet.sleep(0.2) + raise Exception('entrypoint start exception') + + +def mock_entrypoint_start_wait(self): + # NOTE: We use eventlet.sleep to emulate async nature of this process + eventlet.sleep(0.2) + + # Mock call to process() to emulate .wait() and not .start() throwing + eventlet.spawn(self.process, mock.Mock()) + + +def mock_entrypoint_process(self, request): + # NOTE: We use eventlet.sleep to emulate async nature of this process + eventlet.sleep(0.2) + raise Exception('entrypoint process exception') + + +class SchedulerServiceEntryPointTestCase(CleanDbTestCase): + @mock.patch.object(ActionExecutionSchedulingQueueHandler, 'run', mock_handler_run) + @mock.patch('st2actions.cmd.scheduler.LOG') + def test_service_exits_correctly_on_fatal_exception_in_handler_run(self, mock_log): + run_thread = eventlet.spawn(_run_scheduler) + result = run_thread.wait() + + self.assertEqual(result, 1) + + mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] + self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) + + @mock.patch.object(ActionExecutionSchedulingQueueHandler, 'start', mock_handler_start_wait) + @mock.patch.object(ActionExecutionSchedulingQueueHandler, 'process', mock_handler_process) + @mock.patch('st2actions.cmd.scheduler.LOG') + def test_service_exits_correctly_on_fatal_exception_in_handler_process(self, mock_log): + run_thread = eventlet.spawn(_run_scheduler) + result = run_thread.wait() + + self.assertEqual(result, 1) + + mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] + self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) + + + @mock.patch.object(SchedulerEntrypoint, 'start', mock_entrypoint_start) + @mock.patch('st2actions.cmd.scheduler.LOG') + def test_service_exits_correctly_on_fatal_exception_in_entrypoint_start(self, mock_log): + run_thread = eventlet.spawn(_run_scheduler) + result = run_thread.wait() + + self.assertEqual(result, 1) + + mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] + self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) + + @mock.patch.object(SchedulerEntrypoint, 'start', mock_entrypoint_start_wait) + @mock.patch.object(SchedulerEntrypoint, 'process', mock_entrypoint_process) + @mock.patch('st2actions.cmd.scheduler.LOG') + def test_service_exits_correctly_on_fatal_exception_in_entrypoint_process(self, mock_log): + run_thread = eventlet.spawn(_run_scheduler) + result = run_thread.wait() + + self.assertEqual(result, 1) + + mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] + self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) From c635bea6065ea5d918fa5ad55a4ba83db7393e92 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 11:08:29 +0100 Subject: [PATCH 084/105] Make binaries executable. --- st2actions/bin/runners.sh | 0 st2actions/bin/st2scheduler | 0 2 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 st2actions/bin/runners.sh mode change 100644 => 100755 st2actions/bin/st2scheduler diff --git a/st2actions/bin/runners.sh b/st2actions/bin/runners.sh old mode 100644 new mode 100755 diff --git a/st2actions/bin/st2scheduler b/st2actions/bin/st2scheduler old mode 100644 new mode 100755 From 6fb90d8e4802d16993758a9b1e66a9e2ba8c3d7e Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Tue, 12 Feb 2019 12:10:43 +0100 Subject: [PATCH 085/105] Update tests. --- .../tests/unit/test_scheduler_entrypoint.py | 58 +------------------ 1 file changed, 1 insertion(+), 57 deletions(-) diff --git a/st2actions/tests/unit/test_scheduler_entrypoint.py b/st2actions/tests/unit/test_scheduler_entrypoint.py index 300e37c224..1528f8d284 100644 --- a/st2actions/tests/unit/test_scheduler_entrypoint.py +++ b/st2actions/tests/unit/test_scheduler_entrypoint.py @@ -26,50 +26,19 @@ 'SchedulerServiceEntryPointTestCase' ] + def mock_handler_run(self): # NOTE: We use eventlet.sleep to emulate async nature of this process eventlet.sleep(0.2) raise Exception('handler run exception') -def mock_handler_start_wait(self): - """ - This method emulates exception being throw in async nature in cls.process() - method. - """ - # NOTE: We use eventlet.sleep to emulate async nature of this process - eventlet.sleep(0.2) - - # Mock call to process() to emulate .wait() and not .start() throwing - eventlet.spawn(self.process, mock.Mock()) - - -def mock_handler_process(self, request): - # NOTE: We use eventlet.sleep to emulate async nature of this process - eventlet.sleep(0.2) - raise Exception('handler process exception') - - def mock_entrypoint_start(self): # NOTE: We use eventlet.sleep to emulate async nature of this process eventlet.sleep(0.2) raise Exception('entrypoint start exception') -def mock_entrypoint_start_wait(self): - # NOTE: We use eventlet.sleep to emulate async nature of this process - eventlet.sleep(0.2) - - # Mock call to process() to emulate .wait() and not .start() throwing - eventlet.spawn(self.process, mock.Mock()) - - -def mock_entrypoint_process(self, request): - # NOTE: We use eventlet.sleep to emulate async nature of this process - eventlet.sleep(0.2) - raise Exception('entrypoint process exception') - - class SchedulerServiceEntryPointTestCase(CleanDbTestCase): @mock.patch.object(ActionExecutionSchedulingQueueHandler, 'run', mock_handler_run) @mock.patch('st2actions.cmd.scheduler.LOG') @@ -82,19 +51,6 @@ def test_service_exits_correctly_on_fatal_exception_in_handler_run(self, mock_lo mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) - @mock.patch.object(ActionExecutionSchedulingQueueHandler, 'start', mock_handler_start_wait) - @mock.patch.object(ActionExecutionSchedulingQueueHandler, 'process', mock_handler_process) - @mock.patch('st2actions.cmd.scheduler.LOG') - def test_service_exits_correctly_on_fatal_exception_in_handler_process(self, mock_log): - run_thread = eventlet.spawn(_run_scheduler) - result = run_thread.wait() - - self.assertEqual(result, 1) - - mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] - self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) - - @mock.patch.object(SchedulerEntrypoint, 'start', mock_entrypoint_start) @mock.patch('st2actions.cmd.scheduler.LOG') def test_service_exits_correctly_on_fatal_exception_in_entrypoint_start(self, mock_log): @@ -105,15 +61,3 @@ def test_service_exits_correctly_on_fatal_exception_in_entrypoint_start(self, mo mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) - - @mock.patch.object(SchedulerEntrypoint, 'start', mock_entrypoint_start_wait) - @mock.patch.object(SchedulerEntrypoint, 'process', mock_entrypoint_process) - @mock.patch('st2actions.cmd.scheduler.LOG') - def test_service_exits_correctly_on_fatal_exception_in_entrypoint_process(self, mock_log): - run_thread = eventlet.spawn(_run_scheduler) - result = run_thread.wait() - - self.assertEqual(result, 1) - - mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] - self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) From 6bf07058e9ce7c193605891707f2ed3e2a91832f Mon Sep 17 00:00:00 2001 From: W Chan Date: Tue, 12 Feb 2019 20:01:20 +0000 Subject: [PATCH 086/105] Regenerated the sample st2 config Regenerated the sample st2 config with the scheduler retry configuration options. --- conf/st2.conf.sample | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/conf/st2.conf.sample b/conf/st2.conf.sample index 8a16afad77..939f95b25e 100644 --- a/conf/st2.conf.sample +++ b/conf/st2.conf.sample @@ -261,14 +261,18 @@ thread_pool_size = 10 logging = /etc/st2/logging.rulesengine.conf [scheduler] -# How long (in seconds) to sleep between each action scheduler main loop run interval. -sleep_interval = 0.1 -# How often (in seconds) to look for zombie execution requests before rescheduling them. -gc_interval = 10 +# The maximum number of attempts that the scheduler retries on error. +retry_max_attempt = 10 # Location of the logging configuration file. logging = /etc/st2/logging.scheduler.conf +# How long (in seconds) to sleep between each action scheduler main loop run interval. +sleep_interval = 0.1 # The size of the pool used by the scheduler for scheduling executions. pool_size = 10 +# The number of milliseconds to wait in between retries. +retry_wait_msec = 3000 +# How often (in seconds) to look for zombie execution requests before rescheduling them. +gc_interval = 10 [schema] # Version of JSON schema to use. From 62c56c1ec2ba9fc009f1682ed26ef0d31e2804e7 Mon Sep 17 00:00:00 2001 From: W Chan Date: Tue, 12 Feb 2019 20:16:35 +0000 Subject: [PATCH 087/105] Add unit test to cover the handler cleanup Add a unit test to cover failure in the handler cleanup. This should signal the run method to also pause and exit the scheduler handler process. --- .../tests/unit/test_scheduler_entrypoint.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/st2actions/tests/unit/test_scheduler_entrypoint.py b/st2actions/tests/unit/test_scheduler_entrypoint.py index 1528f8d284..9e0282a610 100644 --- a/st2actions/tests/unit/test_scheduler_entrypoint.py +++ b/st2actions/tests/unit/test_scheduler_entrypoint.py @@ -33,6 +33,12 @@ def mock_handler_run(self): raise Exception('handler run exception') +def mock_handler_cleanup(self): + # NOTE: We use eventlet.sleep to emulate async nature of this process + eventlet.sleep(0.2) + raise Exception('handler clean exception') + + def mock_entrypoint_start(self): # NOTE: We use eventlet.sleep to emulate async nature of this process eventlet.sleep(0.2) @@ -51,6 +57,17 @@ def test_service_exits_correctly_on_fatal_exception_in_handler_run(self, mock_lo mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) + @mock.patch.object(ActionExecutionSchedulingQueueHandler, 'cleanup', mock_handler_cleanup) + @mock.patch('st2actions.cmd.scheduler.LOG') + def test_service_exits_correctly_on_fatal_exception_in_handler_cleanup(self, mock_log): + run_thread = eventlet.spawn(_run_scheduler) + result = run_thread.wait() + + self.assertEqual(result, 1) + + mock_log_exception_call = mock_log.exception.call_args_list[0][0][0] + self.assertTrue('Scheduler unexpectedly stopped' in mock_log_exception_call) + @mock.patch.object(SchedulerEntrypoint, 'start', mock_entrypoint_start) @mock.patch('st2actions.cmd.scheduler.LOG') def test_service_exits_correctly_on_fatal_exception_in_entrypoint_start(self, mock_log): From 83ad99405ad8fa94453341e1b54fe0c23a373b1c Mon Sep 17 00:00:00 2001 From: W Chan Date: Tue, 12 Feb 2019 22:12:19 +0000 Subject: [PATCH 088/105] Add unit tests to cover the retries in scheduler handler Add unit tests to cover the retries in the run and cleanup in the scheduler handler. --- st2actions/tests/unit/test_scheduler_retry.py | 120 ++++++++++++++++++ st2tests/st2tests/config.py | 6 + 2 files changed, 126 insertions(+) create mode 100644 st2actions/tests/unit/test_scheduler_retry.py diff --git a/st2actions/tests/unit/test_scheduler_retry.py b/st2actions/tests/unit/test_scheduler_retry.py new file mode 100644 index 0000000000..51756e2ce0 --- /dev/null +++ b/st2actions/tests/unit/test_scheduler_retry.py @@ -0,0 +1,120 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import eventlet +import mock +import pymongo +import uuid + +from st2tests import config as test_config +test_config.parse_args() + +from st2actions.scheduler import handler +from st2common.models.db import execution_queue as ex_q_db +from st2common.persistence import execution_queue as ex_q_db_access +from st2tests.base import CleanDbTestCase + + +__all__ = [ + 'SchedulerHandlerRetryTestCase' +] + + +MOCK_QUEUE_ITEM = ex_q_db.ActionExecutionSchedulingQueueItemDB(liveaction_id=uuid.uuid4().hex) + + +class SchedulerHandlerRetryTestCase(CleanDbTestCase): + + @mock.patch.object( + handler.ActionExecutionSchedulingQueueHandler, '_get_next_execution', + mock.MagicMock(side_effect=[pymongo.errors.ConnectionFailure(), MOCK_QUEUE_ITEM])) + @mock.patch.object( + eventlet.GreenPool, 'spawn', + mock.MagicMock(return_value=None)) + def test_handler_retry_connection_error(self): + scheduling_queue_handler = handler.ActionExecutionSchedulingQueueHandler() + scheduling_queue_handler.process() + + # Make sure retry occurs and that _handle_execution in process is called. + calls = [mock.call(scheduling_queue_handler._handle_execution, MOCK_QUEUE_ITEM)] + eventlet.GreenPool.spawn.assert_has_calls(calls) + + @mock.patch.object( + handler.ActionExecutionSchedulingQueueHandler, '_get_next_execution', + mock.MagicMock(side_effect=[pymongo.errors.ConnectionFailure()] * 3)) + @mock.patch.object( + eventlet.GreenPool, 'spawn', + mock.MagicMock(return_value=None)) + def test_handler_retries_exhausted(self): + scheduling_queue_handler = handler.ActionExecutionSchedulingQueueHandler() + self.assertRaises(pymongo.errors.ConnectionFailure, scheduling_queue_handler.process) + self.assertEqual(eventlet.GreenPool.spawn.call_count, 0) + + @mock.patch.object( + handler.ActionExecutionSchedulingQueueHandler, '_get_next_execution', + mock.MagicMock(side_effect=KeyError())) + @mock.patch.object( + eventlet.GreenPool, 'spawn', + mock.MagicMock(return_value=None)) + def test_handler_retry_unexpected_error(self): + scheduling_queue_handler = handler.ActionExecutionSchedulingQueueHandler() + self.assertRaises(KeyError, scheduling_queue_handler.process) + self.assertEqual(eventlet.GreenPool.spawn.call_count, 0) + + @mock.patch.object( + ex_q_db_access.ActionExecutionSchedulingQueue, 'query', + mock.MagicMock(side_effect=[pymongo.errors.ConnectionFailure(), [MOCK_QUEUE_ITEM]])) + @mock.patch.object( + ex_q_db_access.ActionExecutionSchedulingQueue, 'add_or_update', + mock.MagicMock(return_value=None)) + def test_handler_gc_retry_connection_error(self): + scheduling_queue_handler = handler.ActionExecutionSchedulingQueueHandler() + scheduling_queue_handler._handle_garbage_collection() + + # Make sure retry occurs and that _handle_execution in process is called. + calls = [mock.call(MOCK_QUEUE_ITEM, publish=False)] + ex_q_db_access.ActionExecutionSchedulingQueue.add_or_update.assert_has_calls(calls) + + @mock.patch.object( + ex_q_db_access.ActionExecutionSchedulingQueue, 'query', + mock.MagicMock(side_effect=[pymongo.errors.ConnectionFailure()] * 3)) + @mock.patch.object( + ex_q_db_access.ActionExecutionSchedulingQueue, 'add_or_update', + mock.MagicMock(return_value=None)) + def test_handler_gc_retries_exhausted(self): + scheduling_queue_handler = handler.ActionExecutionSchedulingQueueHandler() + + self.assertRaises( + pymongo.errors.ConnectionFailure, + scheduling_queue_handler._handle_garbage_collection + ) + + self.assertEqual(ex_q_db_access.ActionExecutionSchedulingQueue.add_or_update.call_count, 0) + + @mock.patch.object( + ex_q_db_access.ActionExecutionSchedulingQueue, 'query', + mock.MagicMock(side_effect=KeyError())) + @mock.patch.object( + ex_q_db_access.ActionExecutionSchedulingQueue, 'add_or_update', + mock.MagicMock(return_value=None)) + def test_handler_gc_unexpected_error(self): + scheduling_queue_handler = handler.ActionExecutionSchedulingQueueHandler() + + self.assertRaises( + KeyError, + scheduling_queue_handler._handle_garbage_collection + ) + + self.assertEqual(ex_q_db_access.ActionExecutionSchedulingQueue.add_or_update.call_count, 0) diff --git a/st2tests/st2tests/config.py b/st2tests/st2tests/config.py index 1b50f2aa68..3cd3300110 100644 --- a/st2tests/st2tests/config.py +++ b/st2tests/st2tests/config.py @@ -289,6 +289,12 @@ def _register_scheduler_opts(): cfg.FloatOpt( 'gc_interval', default=5, help='How often to look for zombie executions before rescheduling them (in ms).'), + cfg.IntOpt( + 'retry_max_attempt', default=3, + help='The maximum number of attempts that the scheduler retries on error.'), + cfg.IntOpt( + 'retry_wait_msec', default=100, + help='The number of milliseconds to wait in between retries.') ] _register_opts(scheduler_opts, group='scheduler') From ee07d938bf8c53e45c1e9a038858c9654b9d5d85 Mon Sep 17 00:00:00 2001 From: W Chan Date: Wed, 13 Feb 2019 00:54:08 +0000 Subject: [PATCH 089/105] Fix scheduler test configs in unit tests Add or move the parsing of test configs to the top of affected test modules and make sure the scheduler default config options do not conflict with test configs. --- .../tests/unit/test_actionchain_cancel.py | 3 +++ .../tests/unit/test_actionchain_pause_resume.py | 3 +++ st2actions/st2actions/scheduler/config.py | 9 ++++++++- st2actions/tests/unit/test_scheduler.py | 6 +++--- st2actions/tests/unit/test_scheduler_entrypoint.py | 3 +++ 5 files changed, 20 insertions(+), 4 deletions(-) diff --git a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_cancel.py b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_cancel.py index 1e663f3dab..e53a1673ad 100644 --- a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_cancel.py +++ b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_cancel.py @@ -19,6 +19,9 @@ import os import tempfile +from st2tests import config as test_config +test_config.parse_args() + from st2common.bootstrap import actionsregistrar from st2common.bootstrap import runnersregistrar diff --git a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_pause_resume.py b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_pause_resume.py index 189d74ec34..0b25930a2f 100644 --- a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_pause_resume.py +++ b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_pause_resume.py @@ -19,6 +19,9 @@ import os import tempfile +from st2tests import config as test_config +test_config.parse_args() + from st2common.bootstrap import actionsregistrar from st2common.bootstrap import runnersregistrar diff --git a/st2actions/st2actions/scheduler/config.py b/st2actions/st2actions/scheduler/config.py index 552042bc0a..27edfd6634 100644 --- a/st2actions/st2actions/scheduler/config.py +++ b/st2actions/st2actions/scheduler/config.py @@ -19,6 +19,10 @@ from st2common import config as common_config from st2common.constants import system as sys_constants +from st2common import log as logging + + +LOG = logging.getLogger(__name__) def parse_args(args=None): @@ -67,4 +71,7 @@ def _register_service_opts(): cfg.CONF.register_opts(scheduler_opts, group='scheduler') -register_opts() +try: + register_opts() +except cfg.DuplicateOptError: + LOG.exception('The scheduler configuration options are already parsed and loaded.') diff --git a/st2actions/tests/unit/test_scheduler.py b/st2actions/tests/unit/test_scheduler.py index 05c9f13397..64c9cbae16 100644 --- a/st2actions/tests/unit/test_scheduler.py +++ b/st2actions/tests/unit/test_scheduler.py @@ -17,6 +17,9 @@ import mock +from st2tests import config as test_config +test_config.parse_args() + import st2common from st2tests import ExecutionDbTestCase from st2tests.fixturesloader import FixturesLoader @@ -37,9 +40,6 @@ from st2common.services import executions as execution_service from st2common.exceptions import db as db_exc -from st2tests import config as test_config -test_config.parse_args() - LIVE_ACTION = { 'parameters': { diff --git a/st2actions/tests/unit/test_scheduler_entrypoint.py b/st2actions/tests/unit/test_scheduler_entrypoint.py index 9e0282a610..65f6d2d8ed 100644 --- a/st2actions/tests/unit/test_scheduler_entrypoint.py +++ b/st2actions/tests/unit/test_scheduler_entrypoint.py @@ -16,6 +16,9 @@ import eventlet import mock +from st2tests import config as test_config +test_config.parse_args() + from st2actions.cmd.scheduler import _run_scheduler from st2actions.scheduler.handler import ActionExecutionSchedulingQueueHandler from st2actions.scheduler.entrypoint import SchedulerEntrypoint From 0ebe0c89f7335af1f69d965f581d27326495bc0d Mon Sep 17 00:00:00 2001 From: W Chan Date: Wed, 13 Feb 2019 17:20:07 +0000 Subject: [PATCH 090/105] Include the scheduler retry and exit code fix in changelog From 0f11aebe030987ca12bbc567fc7fd4bffd0d4827 Mon Sep 17 00:00:00 2001 From: W Chan Date: Fri, 15 Feb 2019 21:07:02 +0000 Subject: [PATCH 091/105] Update orquesta to version 0.4 Update orquesta to version 0.4 to include fixes on CPU usage, interpretation of quoted strings and boolean values in shorthand publish, with items variable with the word "in" that conflict with syntax, and other docs updates. --- contrib/runners/orquesta_runner/in-requirements.txt | 2 +- contrib/runners/orquesta_runner/requirements.txt | 2 +- requirements.txt | 2 +- st2common/in-requirements.txt | 2 +- st2common/requirements.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/contrib/runners/orquesta_runner/in-requirements.txt b/contrib/runners/orquesta_runner/in-requirements.txt index 8b7542e527..b5e1f74671 100644 --- a/contrib/runners/orquesta_runner/in-requirements.txt +++ b/contrib/runners/orquesta_runner/in-requirements.txt @@ -1 +1 @@ -git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta +git+https://github.com/StackStorm/orquesta.git@v0.4#egg=orquesta diff --git a/contrib/runners/orquesta_runner/requirements.txt b/contrib/runners/orquesta_runner/requirements.txt index 7ba0329159..c2971e635a 100644 --- a/contrib/runners/orquesta_runner/requirements.txt +++ b/contrib/runners/orquesta_runner/requirements.txt @@ -1,2 +1,2 @@ # Don't edit this file. It's generated automatically! -git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta +git+https://github.com/StackStorm/orquesta.git@v0.4#egg=orquesta diff --git a/requirements.txt b/requirements.txt index 1343a7b96f..d091f7a76f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,7 @@ cryptography==2.4.1 eventlet==0.24.1 flex>=6.13.2,<6.15.0 git+https://github.com/Kami/logshipper.git@stackstorm_patched#egg=logshipper -git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta +git+https://github.com/StackStorm/orquesta.git@v0.4#egg=orquesta git+https://github.com/StackStorm/python-mistralclient.git@st2-2.10.1#egg=python-mistralclient git+https://github.com/StackStorm/st2-auth-backend-flat-file.git@master#egg=st2-auth-backend-flat-file gitpython==2.1.11 diff --git a/st2common/in-requirements.txt b/st2common/in-requirements.txt index 93fd72847e..d114191ce0 100644 --- a/st2common/in-requirements.txt +++ b/st2common/in-requirements.txt @@ -9,7 +9,7 @@ jsonschema kombu mongoengine networkx -git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta +git+https://github.com/StackStorm/orquesta.git@v0.4#egg=orquesta oslo.config paramiko pyyaml diff --git a/st2common/requirements.txt b/st2common/requirements.txt index 657dca13c4..cbb3217f1d 100644 --- a/st2common/requirements.txt +++ b/st2common/requirements.txt @@ -3,7 +3,7 @@ apscheduler==3.5.3 cryptography==2.4.1 eventlet==0.24.1 flex>=6.13.2,<6.15.0 -git+https://github.com/StackStorm/orquesta.git@v0.3#egg=orquesta +git+https://github.com/StackStorm/orquesta.git@v0.4#egg=orquesta greenlet==0.4.15 ipaddr jinja2 From e49134f5e093c3d076e2f8c6b13e46f9289c3ab0 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Fri, 14 Dec 2018 12:29:17 +0100 Subject: [PATCH 092/105] Fix fibonacci sensor intger overflow by resetting self.a and self.b if they get large enough. Also use larger poll interval. --- contrib/examples/sensors/fibonacci_sensor.py | 13 +++++++++---- contrib/examples/sensors/fibonacci_sensor.yaml | 2 +- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/contrib/examples/sensors/fibonacci_sensor.py b/contrib/examples/sensors/fibonacci_sensor.py index 6e41db339e..e7bd86b42d 100644 --- a/contrib/examples/sensors/fibonacci_sensor.py +++ b/contrib/examples/sensors/fibonacci_sensor.py @@ -5,8 +5,7 @@ class FibonacciSensor(PollingSensor): - def __init__(self, sensor_service, config, - poll_interval=5): + def __init__(self, sensor_service, config, poll_interval=20): super(FibonacciSensor, self).__init__( sensor_service=sensor_service, config=config, @@ -24,8 +23,14 @@ def setup(self): self.logger = self.sensor_service.get_logger(name=self.__class__.__name__) def poll(self): - fib = self.a + self.b - self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b) + # Reset a and b if there are large enough to avoid integer overflow problems + if self.a > 10000 or self.b > 10000: + self.a = 0 + self.b = 1 + self.count = 2 + + fib = (self.a + self.b) + self.logger.debug('Count: %d, a: %d, b: %d, fib: %s', self.count, self.a, self.b, fib) payload = { "count": self.count, diff --git a/contrib/examples/sensors/fibonacci_sensor.yaml b/contrib/examples/sensors/fibonacci_sensor.yaml index 6a2a449f56..5485cd8135 100644 --- a/contrib/examples/sensors/fibonacci_sensor.yaml +++ b/contrib/examples/sensors/fibonacci_sensor.yaml @@ -2,7 +2,7 @@ class_name: "FibonacciSensor" entry_point: "fibonacci_sensor.py" description: "Simple polling sensor that emits fibonacci number." - poll_interval: 5 + poll_interval: 20 trigger_types: - name: "fibonacci" From bf182475d093068240867513ce50fd872b570ff0 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Fri, 14 Dec 2018 12:34:08 +0100 Subject: [PATCH 093/105] Add log statement. --- contrib/examples/sensors/fibonacci_sensor.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/contrib/examples/sensors/fibonacci_sensor.py b/contrib/examples/sensors/fibonacci_sensor.py index e7bd86b42d..266e81aba3 100644 --- a/contrib/examples/sensors/fibonacci_sensor.py +++ b/contrib/examples/sensors/fibonacci_sensor.py @@ -20,11 +20,14 @@ def setup(self): self.a = 0 self.b = 1 self.count = 2 + self.logger = self.sensor_service.get_logger(name=self.__class__.__name__) def poll(self): # Reset a and b if there are large enough to avoid integer overflow problems if self.a > 10000 or self.b > 10000: + self.logger.debug('Reseting values to avoid integer overflow issues') + self.a = 0 self.b = 1 self.count = 2 From 2d91cba5e798863f6079951d896549cc1134e339 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 18 Feb 2019 10:28:53 +0100 Subject: [PATCH 094/105] Fix / update changelog file. --- CHANGELOG.rst | 58 ++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 48 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 77314bae7e..b999f09962 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,19 +1,41 @@ Changelog ========= -2.10.1 - December 19, 2018 --------------------------- +in development +-------------- -Fixed +Added ~~~~~ -* Fix an issue with ``GET /v1/keys`` API endpoint not correctly handling ``?scope=all`` and - ``?user=`` query filter parameter inside the open-source edition. This would allow - user A to retrieve datastore values from user B and similar. +* Add support for various new SSL / TLS related config options (``ssl_keyfile``, ``ssl_certfile``, + ``ssl_ca_certs``, ``ssl_certfile``, ``authentication_mechanism``) to the ``messaging`` section in + ``st2.conf`` config file. - NOTE: Enterprise edition with RBAC was not affected, because in RBAC version, correct check is - in place which only allows users with an admin role to use ``?scope=all`` and retrieve / view - datastore values for arbitrary system users. (security issue bug fix) + With those config options, user can configure things such as client based certificate + authentication, client side verification of a server certificate against a specific CA bundle, etc. + + NOTE: Those options are only supported when using a default and officially supported AMQP backend + with RabbitMQ server. (new feature) #4541 +* Add metrics instrumentation to the ``st2notifier`` service. For the available / exposed metrics, + please refer to https://docs.stackstorm.com/reference/metrics.html. (improvement) #4536 + +Changed +~~~~~~~ + +* Update logging code so we exclude log messages with log level ``AUDIT`` from a default service + log file (e.g. ``st2api.log``). Log messages with level ``AUDIT`` are already logged in a + dedicated service audit log file (e.g. ``st2api.audit.log``) so there is no need for them to also + be duplicated and included in regular service log file. + + NOTE: To aid with debugging, audit log messages are also included in a regular log file when log + level is set to ``DEBUG`` or ``system.debug`` config option is set to ``True``. + + Reported by Nick Maludy. (improvement) #4538 #4502 +* Moved the lock from concurrency policies into the scheduler to fix a race condition when there + are multiple scheduler instances scheduling execution for action with concurrency policies. + #4481 (bug fix) +* Add retries to scheduler to handle temporary hiccup in DB connection. Refactor scheduler + service to return proper exit code when there is a failure. #4539 (bug fix) Fixed ~~~~~ @@ -36,8 +58,24 @@ Fixed Reported by @johandahlberg (bug fix) #4533 * Fix ``core.sendmail`` action so it specifies ``charset=UTF-8`` in the ``Content-Type`` email header. This way it works correctly when an email subject and / or body contains unicode data. - + Reported by @johandahlberg (bug fix) #4533 4534 +* Fix CLI ``st2 apikey load`` not being idempotent and API endpoint ``/api/v1/apikeys`` not + honoring desired ``ID`` for the new record creation. #4542 + +2.10.1 - December 19, 2018 +-------------------------- + +Fixed +~~~~~ + +* Fix an issue with ``GET /v1/keys`` API endpoint not correctly handling ``?scope=all`` and + ``?user=`` query filter parameter inside the open-source edition. This would allow + user A to retrieve datastore values from user B and similar. + + NOTE: Enterprise edition with RBAC was not affected, because in RBAC version, correct check is + in place which only allows users with an admin role to use ``?scope=all`` and retrieve / view + datastore values for arbitrary system users. (security issue bug fix) 2.10.0 - December 13, 2018 -------------------------- From 5701b2207ab893de0f79d9cfe1e9eeda6af27471 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 18 Feb 2019 10:39:52 +0100 Subject: [PATCH 095/105] Revert changes which are targeted for v3.0.0 and not v2.10.2 release. --- fixed-requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fixed-requirements.txt b/fixed-requirements.txt index 6535a680fb..a7560aae3f 100644 --- a/fixed-requirements.txt +++ b/fixed-requirements.txt @@ -12,7 +12,7 @@ amqp==2.3.2 # See https://github.com/StackStorm/st2/issues/4160#issuecomment-394386433 for details oslo.config>=1.12.1,<1.13 oslo.utils>=3.36.2,<=3.37.0 -six==1.12.0 +six==1.11.0 pyyaml>=4.2b4,<5.2 requests[security]<2.15,>=2.14.1 apscheduler==3.5.3 @@ -42,7 +42,7 @@ tooz==1.63.1 zake==0.2.2 routes==2.4.1 flex==6.13.2 -webob==1.8.4 +webob==1.8.2 prance==0.9.0 pywinrm==0.3.0 # test requirements below From 015a328e1c2208dbbdadda75b8c68abfaff8b7fc Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 18 Feb 2019 10:47:31 +0100 Subject: [PATCH 096/105] Add missing entry for flex to fixed-requirements.txt and regenerate requirements. --- fixed-requirements.txt | 2 +- requirements.txt | 4 ++-- st2client/requirements.txt | 2 +- st2common/requirements.txt | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/fixed-requirements.txt b/fixed-requirements.txt index a7560aae3f..f9b83b1ec2 100644 --- a/fixed-requirements.txt +++ b/fixed-requirements.txt @@ -41,8 +41,8 @@ prompt-toolkit==1.0.15 tooz==1.63.1 zake==0.2.2 routes==2.4.1 -flex==6.13.2 webob==1.8.2 +flex==6.14.0 prance==0.9.0 pywinrm==0.3.0 # test requirements below diff --git a/requirements.txt b/requirements.txt index d091f7a76f..9d32bc948c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ argcomplete bcrypt cryptography==2.4.1 eventlet==0.24.1 -flex>=6.13.2,<6.15.0 +flex==6.14.0 git+https://github.com/Kami/logshipper.git@stackstorm_patched#egg=logshipper git+https://github.com/StackStorm/orquesta.git@v0.4#egg=orquesta git+https://github.com/StackStorm/python-mistralclient.git@st2-2.10.1#egg=python-mistralclient @@ -43,7 +43,7 @@ python-json-logger python-statsd==2.1.0 pytz==2018.7 pywinrm==0.3.0 -pyyaml>=4.2b4,<5.2 +pyyaml<5.2,>=4.2b4 rednose requests[security]<2.15,>=2.14.1 retrying==1.3.3 diff --git a/st2client/requirements.txt b/st2client/requirements.txt index 0d9a1c6443..d08e5fef1c 100644 --- a/st2client/requirements.txt +++ b/st2client/requirements.txt @@ -8,7 +8,7 @@ prompt-toolkit==1.0.15 python-dateutil==2.7.5 python-editor==1.0.3 pytz==2018.7 -pyyaml>=4.2b4,<5.2 +pyyaml<5.2,>=4.2b4 requests[security]<2.15,>=2.14.1 six==1.11.0 sseclient==0.0.19 diff --git a/st2common/requirements.txt b/st2common/requirements.txt index cbb3217f1d..c35965acce 100644 --- a/st2common/requirements.txt +++ b/st2common/requirements.txt @@ -2,7 +2,7 @@ apscheduler==3.5.3 cryptography==2.4.1 eventlet==0.24.1 -flex>=6.13.2,<6.15.0 +flex==6.14.0 git+https://github.com/StackStorm/orquesta.git@v0.4#egg=orquesta greenlet==0.4.15 ipaddr @@ -18,7 +18,7 @@ prometheus_client==0.1.1 pymongo==3.7.2 python-dateutil==2.7.5 python-statsd==2.1.0 -pyyaml>=4.2b4,<5.2 +pyyaml<5.2,>=4.2b4 requests[security]<2.15,>=2.14.1 retrying==1.3.3 routes==2.4.1 From f88d9b86c136f309bc288c15fbaa25bc9da369cd Mon Sep 17 00:00:00 2001 From: blag Date: Fri, 15 Feb 2019 15:43:48 -0800 Subject: [PATCH 097/105] Add a checkrequirements make target and integrate it into Travis tests --- Makefile | 10 +++++++++- scripts/travis/build.sh | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 70085b4072..03c7b691e2 100644 --- a/Makefile +++ b/Makefile @@ -132,7 +132,7 @@ play: @echo .PHONY: check -check: requirements flake8 checklogs +check: requirements flake8 checkrequirements checklogs .PHONY: install-runners install-runners: @@ -146,6 +146,14 @@ install-runners: (. $(VIRTUALENV_DIR)/bin/activate; cd $$component; python setup.py develop); \ done +.PHONY: checkrequirements +checkrequirements: requirements + @echo + @echo "============== CHECKING REQUIREMENTS ==============" + @echo + # Update requirements and then make sure no files were changed + git status -- *requirements.txt */*requirements.txt | grep -q "nothing to commit, working tree clean" + .PHONY: checklogs checklogs: @echo diff --git a/scripts/travis/build.sh b/scripts/travis/build.sh index 68647002de..81a3e8fdf9 100755 --- a/scripts/travis/build.sh +++ b/scripts/travis/build.sh @@ -15,7 +15,7 @@ fi if [ ${TASK} == 'checks' ]; then # compile .py files, useful as compatibility syntax check make compile - make pylint flake8 bandit .st2client-dependencies-check .st2common-circular-dependencies-check + make pylint flake8 bandit checkrequirements .st2client-dependencies-check .st2common-circular-dependencies-check elif [ ${TASK} == 'unit' ]; then # compile .py files, useful as compatibility syntax check make compile From a684e41d699dee0c897b9f0903cc0e28a9c275bf Mon Sep 17 00:00:00 2001 From: blag Date: Fri, 15 Feb 2019 16:40:30 -0800 Subject: [PATCH 098/105] Simplify some make logic --- Makefile | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index 03c7b691e2..0d324e03b7 100644 --- a/Makefile +++ b/Makefile @@ -132,8 +132,11 @@ play: @echo .PHONY: check -check: requirements flake8 checkrequirements checklogs +check: check-requirements flake8 checklogs +# NOTE: We pass --no-deps to the script so we don't install all the +# package dependencies which are already installed as part of "requirements" +# make targets. This speeds up the build .PHONY: install-runners install-runners: @echo "" @@ -143,16 +146,17 @@ install-runners: echo "==========================================================="; \ echo "Installing runner:" $$component; \ echo "==========================================================="; \ - (. $(VIRTUALENV_DIR)/bin/activate; cd $$component; python setup.py develop); \ + (. $(VIRTUALENV_DIR)/bin/activate; cd $$component; python setup.py develop --no-deps); \ done -.PHONY: checkrequirements -checkrequirements: requirements +.PHONY: check-requirements +check-requirements: requirements @echo @echo "============== CHECKING REQUIREMENTS ==============" @echo # Update requirements and then make sure no files were changed - git status -- *requirements.txt */*requirements.txt | grep -q "nothing to commit, working tree clean" + git status -- *requirements.txt */*requirements.txt | grep -q "nothing to commit" + @echo "All requirements files up-to-date!" .PHONY: checklogs checklogs: @@ -848,7 +852,7 @@ debs: ci: ci-checks ci-unit ci-integration ci-mistral ci-packs-tests .PHONY: ci-checks -ci-checks: compile .generated-files-check .pylint .flake8 .bandit .st2client-dependencies-check .st2common-circular-dependencies-check circle-lint-api-spec .rst-check .st2client-install-check +ci-checks: compile .generated-files-check .pylint .flake8 check-requirements .st2client-dependencies-check .st2common-circular-dependencies-check circle-lint-api-spec .rst-check .st2client-install-check .PHONY: ci-py3-unit ci-py3-unit: From 2e73b8128d87d0acb60619c78f6027c74a5fd66f Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 18 Feb 2019 15:31:13 +0100 Subject: [PATCH 099/105] Use latest travis config from master. --- .travis.yml | 32 +++++++++++++++++++++++--------- 1 file changed, 23 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 9ef55b3797..5d28bf5f6a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,7 @@ # Used old infrastructure, needed for integration tests: # http://docs.travis-ci.com/user/workers/standard-infrastructure/ sudo: required +# NOTE: We use precise because tests finish faster than on Xenial dist: precise language: python @@ -60,8 +61,25 @@ matrix: python: 3.6 name: "Integration Tests (Python 3.6)" -services: - - rabbitmq +addons: + apt: + sources: + - mongodb-upstart + - sourceline: 'deb [arch=amd64] http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.4 multiverse' + key_url: 'https://www.mongodb.org/static/pgp/server-3.4.asc' + # NOTE: Precise repo doesn't contain Erlang 20.x, latest version is 19.x so we need to use RabbitMQ 3.7.6 + #- sourceline: 'deb [arch=amd64] http://packages.erlang-solutions.com/ubuntu precise contrib' + # key_url: 'https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc' + #- sourceline: 'deb [arch=amd64] https://dl.bintray.com/rabbitmq/debian precise rabbitmq-server-v3.6.x' + # key_url: 'https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc' + - sourceline: 'ppa:git-core/ppa' + packages: + - mongodb-org-server + - mongodb-org-shell + - erlang + - rabbitmq-server + - git + - libffi-dev cache: pip: true @@ -76,13 +94,8 @@ cache: #- .tox/ before_install: - # 1. Install MongoDB 3.4 and latest version of git - - sudo add-apt-repository -y ppa:git-core/ppa - - curl https://www.mongodb.org/static/pgp/server-3.4.asc | sudo apt-key add - - - echo "deb [arch=amd64] http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.4 multiverse" | sudo tee -a /etc/apt/sources.list - # Work around for Travis timeout issues, see https://github.com/travis-ci/travis-ci/issues/9112 - - sudo apt-get update --option Acquire::Retries=100 --option Acquire::http::Timeout="60" - - sudo apt-get install mongodb-org-server mongodb-org-shell git libffi-dev -y + # Work around for apt Travis timeout issues, see https://github.com/travis-ci/travis-ci/issues/9112 + #- sudo apt-get update --option Acquire::Retries=100 --option Acquire::http::Timeout="60" - pip install --upgrade "pip>=9.0,<9.1" - sudo pip install --upgrade "virtualenv==15.1.0" @@ -103,6 +116,7 @@ before_script: # Use custom RabbitMQ config which enables SSL / TLS listener on port 5671 with test certs - sudo cp scripts/travis/rabbitmq.config /etc/rabbitmq/rabbitmq.config # Install rabbitmq_management RabbitMQ plugin + - sudo service rabbitmq-server restart ; sleep 5 - sudo rabbitmq-plugins enable rabbitmq_management - sudo wget http://guest:guest@localhost:15672/cli/rabbitmqadmin -O /usr/local/bin/rabbitmqadmin - sudo chmod +x /usr/local/bin/rabbitmqadmin From 582cd69cf8349a3a59d9cbdafeced9c10be705cb Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 21 Jan 2019 12:37:00 +0100 Subject: [PATCH 100/105] Log a message when we are sleeping due to the rabbitmq connection error. --- .../st2common/transport/connection_retry_wrapper.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/st2common/st2common/transport/connection_retry_wrapper.py b/st2common/st2common/transport/connection_retry_wrapper.py index 79ff7f60fa..e29902ab1f 100644 --- a/st2common/st2common/transport/connection_retry_wrapper.py +++ b/st2common/st2common/transport/connection_retry_wrapper.py @@ -14,6 +14,7 @@ # limitations under the License. from __future__ import absolute_import + import eventlet __all__ = ['ConnectionRetryWrapper', 'ClusterRetryContext'] @@ -35,7 +36,14 @@ def __init__(self, cluster_size): # No of nodes attempted. Starts at 1 since the self._nodes_attempted = 1 - def test_should_stop(self): + def test_should_stop(self, e=None): + # Special workaround for "(504) CHANNEL_ERROR - second 'channel.open' seen" which happens + # during tests on Travis and block and slown down the tests + # NOTE: This error is not fatal during tests and we can simply switch to a next connection + # without sleeping. + if "CHANNEL_ERROR - second 'channel.open' seen" in e: + return False, -1 + should_stop = True if self._nodes_attempted > self.cluster_size * self.cluster_retry: return should_stop, -1 @@ -129,6 +137,7 @@ def run(self, connection, wrapped_callback): # be notified so raise. if should_stop: raise + # -1, 0 and 1+ are handled properly by eventlet.sleep self._logger.debug('Received RabbitMQ server error, sleeping for %s seconds ' 'before retrying: %s' % (wait, str(e))) From af7b0f28afa855f4e02f29c93d9db1a46d476097 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 21 Jan 2019 12:55:02 +0100 Subject: [PATCH 101/105] Fix the if check. --- st2common/st2common/transport/connection_retry_wrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/st2common/st2common/transport/connection_retry_wrapper.py b/st2common/st2common/transport/connection_retry_wrapper.py index e29902ab1f..a8bab1be60 100644 --- a/st2common/st2common/transport/connection_retry_wrapper.py +++ b/st2common/st2common/transport/connection_retry_wrapper.py @@ -41,7 +41,7 @@ def test_should_stop(self, e=None): # during tests on Travis and block and slown down the tests # NOTE: This error is not fatal during tests and we can simply switch to a next connection # without sleeping. - if "CHANNEL_ERROR - second 'channel.open' seen" in e: + if "second 'channel.open' seen" in str(e): return False, -1 should_stop = True From a1bdb5d9f9791374ebebd2a8f925475cafade27e Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 21 Jan 2019 13:28:32 +0100 Subject: [PATCH 102/105] Add a test case for it. --- .../tests/unit/test_connection_retry_wrapper.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/st2common/tests/unit/test_connection_retry_wrapper.py b/st2common/tests/unit/test_connection_retry_wrapper.py index 80c998eab1..97ad1fc035 100644 --- a/st2common/tests/unit/test_connection_retry_wrapper.py +++ b/st2common/tests/unit/test_connection_retry_wrapper.py @@ -36,6 +36,19 @@ def test_single_node_cluster_retry(self): self.assertTrue(should_stop, 'Done trying.') self.assertEqual(wait, -1) + def test_should_stop_second_channel_open_error_should_be_non_fatal(self): + retry_context = ClusterRetryContext(cluster_size=1) + + e = Exception("(504) CHANNEL_ERROR - second 'channel.open' seen") + should_stop, wait = retry_context.test_should_stop(e=e) + self.assertFalse(should_stop) + self.assertEqual(wait, -1) + + e = Exception("CHANNEL_ERROR - second 'channel.open' seen") + should_stop, wait = retry_context.test_should_stop(e=e) + self.assertFalse(should_stop) + self.assertEqual(wait, -1) + def test_multiple_node_cluster_retry(self): cluster_size = 3 last_index = cluster_size * 2 From 027bf215f1a5ba92ef3838546c5e3134bbc0baea Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 21 Jan 2019 13:29:18 +0100 Subject: [PATCH 103/105] Pass missing argument to the method. --- st2common/st2common/transport/connection_retry_wrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/st2common/st2common/transport/connection_retry_wrapper.py b/st2common/st2common/transport/connection_retry_wrapper.py index a8bab1be60..30c780e63c 100644 --- a/st2common/st2common/transport/connection_retry_wrapper.py +++ b/st2common/st2common/transport/connection_retry_wrapper.py @@ -128,7 +128,7 @@ def run(self, connection, wrapped_callback): wrapped_callback(connection=connection, channel=channel) should_stop = True except connection.connection_errors + connection.channel_errors as e: - should_stop, wait = self._retry_context.test_should_stop() + should_stop, wait = self._retry_context.test_should_stop(e) # reset channel to None to avoid any channel closing errors. At this point # in case of an exception there should be no channel but that is better to # guarantee. From e5d4817a422084beed372fca97a816163878ba69 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 18 Feb 2019 15:39:17 +0100 Subject: [PATCH 104/105] Add missing changelog entries, reorganize some existing entries and put them under a correct section. --- CHANGELOG.rst | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b999f09962..cb0f832f54 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -31,11 +31,16 @@ Changed level is set to ``DEBUG`` or ``system.debug`` config option is set to ``True``. Reported by Nick Maludy. (improvement) #4538 #4502 -* Moved the lock from concurrency policies into the scheduler to fix a race condition when there - are multiple scheduler instances scheduling execution for action with concurrency policies. - #4481 (bug fix) -* Add retries to scheduler to handle temporary hiccup in DB connection. Refactor scheduler - service to return proper exit code when there is a failure. #4539 (bug fix) +* Update ``pyyaml`` dependency to the latest version. This latest version fixes an issue which + could result in a code execution vulnerability if code uses ``yaml.load`` in an unsafe manner + on untrusted input. + + NOTE: StackStorm platform itself is not affected, because we already used ``yaml.safe_load`` + everywhere. + + Only custom packs which use ``yaml.load`` with non trusted user input could potentially be + affected. (improvement) #4510 #4552 #4554 +* Update Orquesta to ``v0.4``. #4551 Fixed ~~~~~ @@ -62,6 +67,11 @@ Fixed Reported by @johandahlberg (bug fix) #4533 4534 * Fix CLI ``st2 apikey load`` not being idempotent and API endpoint ``/api/v1/apikeys`` not honoring desired ``ID`` for the new record creation. #4542 +* Moved the lock from concurrency policies into the scheduler to fix a race condition when there + are multiple scheduler instances scheduling execution for action with concurrency policies. + #4481 (bug fix) +* Add retries to scheduler to handle temporary hiccup in DB connection. Refactor scheduler + service to return proper exit code when there is a failure. #4539 (bug fix) 2.10.1 - December 19, 2018 -------------------------- From 60fcf80e5aeb4823acc07b48e7c6701cc8926f68 Mon Sep 17 00:00:00 2001 From: Tomaz Muraus Date: Mon, 18 Feb 2019 15:43:38 +0100 Subject: [PATCH 105/105] Fix failing tests. --- st2api/tests/unit/controllers/v1/test_packs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/st2api/tests/unit/controllers/v1/test_packs.py b/st2api/tests/unit/controllers/v1/test_packs.py index 8c5d9f338c..1ccd4ea439 100644 --- a/st2api/tests/unit/controllers/v1/test_packs.py +++ b/st2api/tests/unit/controllers/v1/test_packs.py @@ -530,14 +530,14 @@ def test_packs_register_endpoint(self, mock_get_packs): {'packs': ['dummy_pack_1'], 'types': ['action']}) self.assertEqual(resp.status_int, 200) - self.assertEqual(resp.json, {'actions': 1, 'runners': 15}) + self.assertEqual(resp.json, {'actions': 1, 'runners': 18}) # Verify that plural name form also works resp = self.app.post_json('/v1/packs/register', {'packs': ['dummy_pack_1'], 'types': ['actions']}) self.assertEqual(resp.status_int, 200) - self.assertEqual(resp.json, {'actions': 1, 'runners': 15}) + self.assertEqual(resp.json, {'actions': 1, 'runners': 18}) # Register single resource from a single pack specified multiple times - verify that # resources from the same pack are only registered once