diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..852476aeb --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,32 @@ +--- +name: Bug report +about: Create a report to help us identify and resolve bugs +title: '' +labels: bug +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Error logs** +If applicable, provide a log from https://gist.github.com/ + +**Environment (please provide the following information about your setup):** + - OS: [e.g. Ubuntu] + - Version [e.g. 22.04] + - Additional hardware (network adapters) + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..9fd0ca896 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,17 @@ +--- +name: Feature request +about: Suggest a new feature or change request +title: '' +labels: request +assignees: '' + +--- + +**What is the problem your feature is trying to solve?** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you think would solve the problem** +A clear and concise description of what you want to happen. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.gitignore b/.gitignore index 93fe84e64..f79a6efcb 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,135 @@ -venv/ -net_orc/ -.vscode/ \ No newline at end of file +# Runtime folder +runtime/ +venv/ +net_orc/ +.vscode/ + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 000000000..4e89b0c10 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,429 @@ +# This Pylint rcfile contains a best-effort configuration to uphold the +# best-practices and style described in the Google Python style guide: +# https://google.github.io/styleguide/pyguide.html +# +# Its canonical open-source location is: +# https://google.github.io/styleguide/pylintrc + +[MASTER] + +# Files or directories to be skipped. They should be base names, not paths. +ignore=third_party + +# Files or directories matching the regex patterns are skipped. The regex +# matches against base names, not paths. +ignore-patterns= + +# Pickle collected data for later comparisons. +persistent=no + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Use multiple processes to speed up Pylint. +jobs=4 + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=abstract-method, + apply-builtin, + arguments-differ, + attribute-defined-outside-init, + backtick, + bad-option-value, + basestring-builtin, + buffer-builtin, + c-extension-no-member, + consider-using-enumerate, + cmp-builtin, + cmp-method, + coerce-builtin, + coerce-method, + delslice-method, + div-method, + duplicate-code, + eq-without-hash, + execfile-builtin, + file-builtin, + filter-builtin-not-iterating, + fixme, + getslice-method, + global-statement, + hex-method, + idiv-method, + implicit-str-concat, + import-error, + import-self, + import-star-module-level, + inconsistent-return-statements, + input-builtin, + intern-builtin, + invalid-str-codec, + locally-disabled, + long-builtin, + long-suffix, + map-builtin-not-iterating, + misplaced-comparison-constant, + missing-function-docstring, + metaclass-assignment, + next-method-called, + next-method-defined, + no-absolute-import, + no-else-break, + no-else-continue, + no-else-raise, + no-else-return, + no-init, # added + no-member, + no-name-in-module, + no-self-use, + nonzero-method, + oct-method, + old-division, + old-ne-operator, + old-octal-literal, + old-raise-syntax, + parameter-unpacking, + print-statement, + raising-string, + range-builtin-not-iterating, + raw_input-builtin, + rdiv-method, + reduce-builtin, + relative-import, + reload-builtin, + round-builtin, + setslice-method, + signature-differs, + standarderror-builtin, + suppressed-message, + sys-max-int, + too-few-public-methods, + too-many-ancestors, + too-many-arguments, + too-many-boolean-expressions, + too-many-branches, + too-many-instance-attributes, + too-many-locals, + too-many-nested-blocks, + too-many-public-methods, + too-many-return-statements, + too-many-statements, + trailing-newlines, + unichr-builtin, + unicode-builtin, + unnecessary-pass, + unpacking-in-except, + useless-else-on-loop, + useless-object-inheritance, + useless-suppression, + using-cmp-argument, + wrong-import-order, + xrange-builtin, + zip-builtin-not-iterating, + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages +reports=no + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + + +[BASIC] + +# Good variable names which should always be accepted, separated by a comma +good-names=main,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty,cached_property.cached_property,cached_property.threaded_cached_property,cached_property.cached_property_with_ttl,cached_property.threaded_cached_property_with_ttl + +# Regular expression matching correct function names +function-rgx=^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$ + +# Regular expression matching correct variable names +variable-rgx=^[a-z][a-z0-9_]*$ + +# Regular expression matching correct constant names +const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ + +# Regular expression matching correct attribute names +attr-rgx=^_{0,2}[a-z][a-z0-9_]*$ + +# Regular expression matching correct argument names +argument-rgx=^[a-z][a-z0-9_]*$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=^[a-z][a-z0-9_]*$ + +# Regular expression matching correct class names +class-rgx=^_?[A-Z][a-zA-Z0-9]*$ + +# Regular expression matching correct module names +module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$ + +# Regular expression matching correct method names +method-rgx=(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$ + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=10 + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager,contextlib2.contextmanager + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=80 + +# TODO(https://github.com/PyCQA/pylint/issues/3352): Direct pylint to exempt +# lines made too long by directives to pytype. + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=(?x)( + ^\s*(\#\ )??$| + ^\s*(from\s+\S+\s+)?import\s+.+$) + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=yes + +# Maximum number of lines in a module +max-module-lines=99999 + +# String used as indentation unit. The internal Google style guide mandates 2 +# spaces. Google's externaly-published style guide says 4, consistent with +# PEP 8. Here, we use 2 spaces, for conformity with many open-sourced Google +# projects (like TensorFlow). +indent-string=' ' + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=TODO + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=yes + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_) + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six,six.moves,past.builtins,future.builtins,functools + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging,absl.logging,tensorflow.io.logging + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub, + TERMIOS, + Bastion, + rexec, + sets + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant, absl + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls, + class_ + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=StandardError, + Exception, + BaseException \ No newline at end of file diff --git a/README.md b/README.md index b374bdbf5..41c559499 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,9 @@ Test Run cannot automate everything, and so additional manual testing may be req ## Roadmap :chart_with_upwards_trend: Test Run will constantly evolve to further support end-users by automating device network behaviour against industry standards. +## Issue reporting :triangular_flag_on_post: +If the application has come across a problem at any point during setup or use, please raise an issue under the [issues tab](https://github.com/auto-iot/test-run/issues). Issue templates exist for both bug reports and feature requests. If neither of these are appropriate for your issue, raise a blank issue instead. + ## Contributing :keyboard: The contributing requirements can be found in [CONTRIBUTING.md](CONTRIBUTING.md). In short, checkout the [Google CLA](https://cla.developers.google.com/) site to get started. diff --git a/cmd/install b/cmd/install index 351eb4129..6dee1c635 100755 --- a/cmd/install +++ b/cmd/install @@ -2,6 +2,7 @@ GIT_URL=https://github.com/auto-iot NET_ORC_DIR=net_orc +NET_ORC_VERSION="main" python3 -m venv venv @@ -10,7 +11,7 @@ source venv/bin/activate pip3 install -r etc/requirements.txt rm -rf $NET_ORC_DIR -git clone $GIT_URL/network-orchestrator $NET_ORC_DIR +git clone -b $NET_ORC_VERSION $GIT_URL/network-orchestrator $NET_ORC_DIR chown -R $USER $NET_ORC_DIR pip3 install -r $NET_ORC_DIR/python/requirements.txt diff --git a/cmd/start b/cmd/start index 43a295338..113f14b3e 100755 --- a/cmd/start +++ b/cmd/start @@ -5,6 +5,12 @@ if [[ "$EUID" -ne 0 ]]; then exit 1 fi +# Ensure that /var/run/netns folder exists +mkdir -p /var/run/netns + +# Clear up existing runtime files +rm -rf runtime + # Check if python modules exist. Install if not [ ! -d "venv" ] && cmd/install @@ -12,6 +18,6 @@ fi source venv/bin/activate # TODO: Execute python code -python -u framework/run.py +python -u framework/test_runner.py $@ deactivate \ No newline at end of file diff --git a/conf/system.json.example b/conf/system.json.example index 379545ad6..2d4b737d0 100644 --- a/conf/system.json.example +++ b/conf/system.json.example @@ -1,7 +1,7 @@ { - "network": { - "device_intf": "enx123456789123", - "internet_intf": "enx123456789124" - }, - "log_level": "INFO" + "network": { + "device_intf": "enx123456789123", + "internet_intf": "enx123456789124" + }, + "log_level": "INFO" } \ No newline at end of file diff --git a/etc/requirements.txt b/etc/requirements.txt index 56b8f0f66..979b408bd 100644 --- a/etc/requirements.txt +++ b/etc/requirements.txt @@ -1 +1,2 @@ -netifaces \ No newline at end of file +netifaces +scapy \ No newline at end of file diff --git a/framework/device.py b/framework/device.py new file mode 100644 index 000000000..08014c127 --- /dev/null +++ b/framework/device.py @@ -0,0 +1,10 @@ +"""Track device object information.""" +from dataclasses import dataclass + +@dataclass +class Device: + """Represents a physical device and it's configuration.""" + + make: str + model: str + mac_addr: str diff --git a/framework/logger.py b/framework/logger.py index 25970bd21..64d8fdb97 100644 --- a/framework/logger.py +++ b/framework/logger.py @@ -1,4 +1,4 @@ -"""Manages all things logging.""" +"""Manages stream and file loggers.""" import json import logging import os @@ -6,18 +6,43 @@ LOGGERS = {} _LOG_FORMAT = "%(asctime)s %(name)-8s %(levelname)-7s %(message)s" _DATE_FORMAT = '%b %02d %H:%M:%S' -_CONF_DIR="conf" -_CONF_FILE_NAME="system.json" +_DEFAULT_LOG_LEVEL = logging.INFO +_LOG_LEVEL = logging.INFO +_CONF_DIR = "conf" +_CONF_FILE_NAME = "system.json" +_LOG_DIR = "runtime/testing/" -with open(os.path.join(_CONF_DIR, _CONF_FILE_NAME), encoding='utf-8') as config_file: - system_conf_json = json.load(config_file) - log_level_str = system_conf_json['log_level'] - log_level = logging.getLevelName(log_level_str) +# Set log level +with open(os.path.join(_CONF_DIR, _CONF_FILE_NAME), encoding='utf-8') as system_conf_file: + system_conf_json = json.load(system_conf_file) +log_level_str = system_conf_json['log_level'] -logging.basicConfig(format=_LOG_FORMAT, datefmt=_DATE_FORMAT, level=log_level) +temp_log = logging.getLogger('temp') +try: + temp_log.setLevel(logging.getLevelName(log_level_str)) + _LOG_LEVEL = logging.getLevelName(log_level_str) +except ValueError: + print('Invalid log level set in ' + _CONF_DIR + '/' + _CONF_FILE_NAME + + '. Using INFO as log level') + _LOG_LEVEL = _DEFAULT_LOG_LEVEL -def get_logger(name): - """Returns the logger belonging to the class calling the method.""" +log_format = logging.Formatter(fmt=_LOG_FORMAT, datefmt=_DATE_FORMAT) + +def add_file_handler(log, log_file): + handler = logging.FileHandler(_LOG_DIR + log_file + ".log") + handler.setFormatter(log_format) + log.addHandler(handler) + +def add_stream_handler(log): + handler = logging.StreamHandler() + handler.setFormatter(log_format) + log.addHandler(handler) + +def get_logger(name, log_file=None): if name not in LOGGERS: LOGGERS[name] = logging.getLogger(name) + LOGGERS[name].setLevel(_LOG_LEVEL) + add_stream_handler(LOGGERS[name]) + if log_file is not None: + add_file_handler(LOGGERS[name], log_file) return LOGGERS[name] diff --git a/framework/run.py b/framework/run.py deleted file mode 100644 index ad7c038ee..000000000 --- a/framework/run.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Starts Test Run.""" - -from testrun import TestRun - -testrun = TestRun() diff --git a/framework/test_runner.py b/framework/test_runner.py new file mode 100644 index 000000000..91ff4cb1a --- /dev/null +++ b/framework/test_runner.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 + +"""Wrapper for the TestRun that simplifies +virtual testing procedure by allowing direct calling +from the command line. + +Run using the provided command scripts in the cmd folder. +E.g sudo cmd/start +""" + +import argparse +import sys +from testrun import TestRun +import logger +import signal + +LOGGER = logger.get_logger('runner') + + +class TestRunner: + + def __init__(self, local_net=True, config_file=None, validate=True, net_only=False): + self._register_exits() + self.test_run = TestRun(local_net=local_net, config_file=config_file, + validate=validate, net_only=net_only) + + def _register_exits(self): + signal.signal(signal.SIGINT, self._exit_handler) + signal.signal(signal.SIGTERM, self._exit_handler) + signal.signal(signal.SIGABRT, self._exit_handler) + signal.signal(signal.SIGQUIT, self._exit_handler) + + def _exit_handler(self, signum, arg): # pylint: disable=unused-argument + LOGGER.debug("Exit signal received: " + str(signum)) + if signum in (2, signal.SIGTERM): + LOGGER.info("Exit signal received.") + # Kill all container services quickly + # If we're here, we want everything to stop immediately + # and don't care about a gracefully shutdown + self._stop(True) + sys.exit(1) + + def stop(self, kill=False): + self.test_run.stop(kill) + + def start(self): + self.test_run.start() + LOGGER.info("Test Run has finished") + + +def parse_args(argv): + parser = argparse.ArgumentParser(description="Test Run", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument("-r", "--remote-net", action="store_false", + help='''Use the network orchestrator from the parent directory instead + of the one downloaded locally from the install script.''') + parser.add_argument("-f", "--config-file", default=None, + help="Define the configuration file for Test Run and Network Orchestrator") + parser.add_argument("--no-validate", action="store_true", + help="Turn off the validation of the network after network boot") + parser.add_argument("-net", "--net-only", action="store_true", + help="Run the network only, do not run tests") + args, unknown = parser.parse_known_args() + return args + + +if __name__ == "__main__": + args = parse_args(sys.argv) + runner = TestRunner(local_net=args.remote_net, + config_file=args.config_file, + validate=not args.no_validate, + net_only=args.net_only) + runner.start() diff --git a/framework/testrun.py b/framework/testrun.py index 225bed853..42534265a 100644 --- a/framework/testrun.py +++ b/framework/testrun.py @@ -1,98 +1,176 @@ -"""The overall control of the Test Run application. - -This file provides the integration between all of the -Test Run components, such as net_orc, test_orc and test_ui. - -Run using the provided command scripts in the cmd folder. -E.g sudo cmd/start -""" - -import os -import sys -import json -import signal -import time -import logger - -# Locate parent directory -current_dir = os.path.dirname(os.path.realpath(__file__)) -parent_dir = os.path.dirname(current_dir) - -# Add net_orc to Python path -net_orc_dir = os.path.join(parent_dir, 'net_orc', 'python', 'src') -sys.path.append(net_orc_dir) - -import network_orchestrator as net_orc # pylint: disable=wrong-import-position - -LOGGER = logger.get_logger('test_run') -CONFIG_FILE = "conf/system.json" -EXAMPLE_CONFIG_FILE = "conf/system.json.example" -RUNTIME = 300 - -class TestRun: # pylint: disable=too-few-public-methods - """Test Run controller. - - Creates an instance of the network orchestrator, test - orchestrator and user interface. - """ - - def __init__(self): - LOGGER.info("Starting Test Run") - - # Catch any exit signals - self._register_exits() - - self._start_network() - - # Keep application running - time.sleep(RUNTIME) - - self._stop_network() - - def _register_exits(self): - signal.signal(signal.SIGINT, self._exit_handler) - signal.signal(signal.SIGTERM, self._exit_handler) - signal.signal(signal.SIGABRT, self._exit_handler) - signal.signal(signal.SIGQUIT, self._exit_handler) - - def _exit_handler(self, signum, arg): # pylint: disable=unused-argument - LOGGER.debug("Exit signal received: " + str(signum)) - if signum in (2, signal.SIGTERM): - LOGGER.info("Exit signal received.") - self._stop_network() - - def _load_config(self): - """Loads all settings from the config file into memory.""" - if not os.path.isfile(CONFIG_FILE): - LOGGER.error("Configuration file is not present at " + CONFIG_FILE) - LOGGER.info("An example is present in " + EXAMPLE_CONFIG_FILE) - sys.exit(1) - - with open(CONFIG_FILE, 'r', encoding='UTF-8') as config_file_open: - config_json = json.load(config_file_open) - self._net_orc.import_config(config_json) - - def _start_network(self): - # Create an instance of the network orchestrator - self._net_orc = net_orc.NetworkOrchestrator() - - # Load config file and pass to other components - self._load_config() - - # Load and build any unbuilt network containers - self._net_orc.load_network_modules() - self._net_orc.build_network_modules() - - # Create baseline network - self._net_orc.create_net() - - # Launch network service containers - self._net_orc.start_network_services() - - LOGGER.info("Network is ready.") - - def _stop_network(self): - LOGGER.info("Stopping Test Run") - self._net_orc.stop_networking_services(kill=True) - self._net_orc.restore_net() - sys.exit(0) +"""The overall control of the Test Run application. + +This file provides the integration between all of the +Test Run components, such as net_orc, test_orc and test_ui. + +Run using the provided command scripts in the cmd folder. +E.g sudo cmd/start +""" + +import os +import sys +import json +import signal +import time +import logger +from device import Device + +# Locate parent directory +current_dir = os.path.dirname(os.path.realpath(__file__)) +parent_dir = os.path.dirname(current_dir) + +LOGGER = logger.get_logger('test_run') +CONFIG_FILE = "conf/system.json" +EXAMPLE_CONFIG_FILE = "conf/system.json.example" +RUNTIME = 300 + +DEVICES_DIR = 'local/devices' +DEVICE_CONFIG = 'device_config.json' +DEVICE_MAKE = 'make' +DEVICE_MODEL = 'model' +DEVICE_MAC_ADDR = 'mac_addr' + + +class TestRun: # pylint: disable=too-few-public-methods + """Test Run controller. + + Creates an instance of the network orchestrator, test + orchestrator and user interface. + """ + + def __init__(self, local_net=True, config_file=CONFIG_FILE,validate=True, net_only=False): + self._devices = [] + self._net_only = net_only + + # Catch any exit signals + self._register_exits() + + # Import the correct net orchestrator + self.import_dependencies(local_net) + + # Expand the config file to absolute pathing + config_file_abs=self._get_config_abs(config_file=config_file) + + self._net_orc = net_orc.NetworkOrchestrator(config_file=config_file_abs,validate=validate,async_monitor=not self._net_only) + self._test_orc = test_orc.TestOrchestrator() + + def start(self): + + self._load_devices() + + if self._net_only: + LOGGER.info("Network only option configured, no tests will be run") + self._start_network() + else: + self._start_network() + self._net_orc.listener.register_callback( + self._device_discovered, + [NetworkEvent.DEVICE_DISCOVERED]) + + LOGGER.info("Waiting for devices on the network...") + + # Check timeout and whether testing is currently in progress before stopping + time.sleep(RUNTIME) + + self.stop() + + def stop(self,kill=False): + self._stop_tests() + self._stop_network(kill=kill) + + def import_dependencies(self, local_net=True): + if local_net: + # Add local net_orc to Python path + net_orc_dir = os.path.join(parent_dir, 'net_orc', 'python', 'src') + else: + # Resolve the path to the test-run parent folder + root_dir = os.path.abspath(os.path.join(parent_dir, os.pardir)) + # Add manually cloned network orchestrator from parent folder + net_orc_dir = os.path.join( + root_dir, 'network-orchestrator', 'python', 'src') + # Add net_orc to Python path + sys.path.append(net_orc_dir) + # Import the network orchestrator + global net_orc + import network_orchestrator as net_orc # pylint: disable=wrong-import-position,import-outside-toplevel + + # Add test_orc to Python path + test_orc_dir = os.path.join(parent_dir, 'test_orc', 'python', 'src') + sys.path.append(test_orc_dir) + global test_orc + import test_orchestrator as test_orc # pylint: disable=wrong-import-position,import-outside-toplevel + + global NetworkEvent + from listener import NetworkEvent # pylint: disable=wrong-import-position,import-outside-toplevel + + def _register_exits(self): + signal.signal(signal.SIGINT, self._exit_handler) + signal.signal(signal.SIGTERM, self._exit_handler) + signal.signal(signal.SIGABRT, self._exit_handler) + signal.signal(signal.SIGQUIT, self._exit_handler) + + def _exit_handler(self, signum, arg): # pylint: disable=unused-argument + LOGGER.debug("Exit signal received: " + str(signum)) + if signum in (2, signal.SIGTERM): + LOGGER.info("Exit signal received.") + self.stop(kill=True) + sys.exit(1) + + def _get_config_abs(self,config_file=None): + if config_file is None: + # If not defined, use relative pathing to local file + config_file = os.path.join(parent_dir, CONFIG_FILE) + + # Expand the config file to absolute pathing + return os.path.abspath(config_file) + + def _start_network(self): + self._net_orc.start() + + def _run_tests(self): + """Iterate through and start all test modules.""" + self._test_orc.start() + + def _stop_network(self,kill=False): + self._net_orc.stop(kill=kill) + + def _stop_tests(self): + self._test_orc.stop() + + def _load_devices(self): + LOGGER.debug('Loading devices from ' + DEVICES_DIR) + + for device_folder in os.listdir(DEVICES_DIR): + with open(os.path.join(DEVICES_DIR, device_folder, DEVICE_CONFIG), + encoding='utf-8') as device_config_file: + device_config_json = json.load(device_config_file) + + device_make = device_config_json.get(DEVICE_MAKE) + device_model = device_config_json.get(DEVICE_MODEL) + mac_addr = device_config_json.get(DEVICE_MAC_ADDR) + + device = Device(device_make, device_model, + mac_addr=mac_addr) + self._devices.append(device) + + LOGGER.info('Loaded ' + str(len(self._devices)) + ' devices') + + def get_device(self, mac_addr): + """Returns a loaded device object from the device mac address.""" + for device in self._devices: + if device.mac_addr == mac_addr: + return device + return None + + def _device_discovered(self, mac_addr): + device = self.get_device(mac_addr) + if device is not None: + LOGGER.info( + f'Discovered {device.make} {device.model} on the network') + else: + device = Device(make=None, model=None, mac_addr=mac_addr) + LOGGER.info( + f'A new device has been discovered with mac address {mac_addr}') + + # TODO: Pass device information to test orchestrator/runner + self._run_tests() diff --git a/local/devices/Teltonika TRB140/device_config.json b/local/devices/Teltonika TRB140/device_config.json new file mode 100644 index 000000000..759c1e9b4 --- /dev/null +++ b/local/devices/Teltonika TRB140/device_config.json @@ -0,0 +1,5 @@ +{ + "make": "Teltonika", + "model": "TRB140", + "mac_addr": "00:1e:42:35:73:c4" +} \ No newline at end of file diff --git a/test_orc/modules/base/base.Dockerfile b/test_orc/modules/base/base.Dockerfile new file mode 100644 index 000000000..b5f35326a --- /dev/null +++ b/test_orc/modules/base/base.Dockerfile @@ -0,0 +1,23 @@ +# Image name: test-run/base-test +FROM ubuntu:jammy + +# Install common software +RUN apt-get update && apt-get install -y net-tools iputils-ping tcpdump iproute2 jq python3 python3-pip dos2unix + +# Setup the base python requirements +COPY modules/base/python /testrun/python + +# Install all python requirements for the module +RUN pip3 install -r /testrun/python/requirements.txt + +# Add the bin files +COPY modules/base/bin /testrun/bin + +# Remove incorrect line endings +RUN dos2unix /testrun/bin/* + +# Make sure all the bin files are executable +RUN chmod u+x /testrun/bin/* + +# Start the test module +ENTRYPOINT [ "/testrun/bin/start_module" ] \ No newline at end of file diff --git a/test_orc/modules/base/bin/capture b/test_orc/modules/base/bin/capture new file mode 100644 index 000000000..dccafb0c5 --- /dev/null +++ b/test_orc/modules/base/bin/capture @@ -0,0 +1,20 @@ +#!/bin/bash -e + +# Fetch module name +MODULE_NAME=$1 + +# Define the local file location for the capture to be saved +PCAP_DIR="/runtime/output/" +PCAP_FILE=$MODULE_NAME.pcap + +# Allow a user to define an interface by passing it into this script +INTERFACE=$2 + +# Create the output directory and start the capture +mkdir -p $PCAP_DIR +chown $HOST_USER:$HOST_USER $PCAP_DIR +echo "PCAP Dir: $PCAP_DIR/$PCAP_FILE" +tcpdump -i $INTERFACE -w $PCAP_DIR/$PCAP_FILE -Z $HOST_USER & + +# Small pause to let the capture to start +sleep 1 \ No newline at end of file diff --git a/test_orc/modules/base/bin/setup_binaries b/test_orc/modules/base/bin/setup_binaries new file mode 100644 index 000000000..3535ead3c --- /dev/null +++ b/test_orc/modules/base/bin/setup_binaries @@ -0,0 +1,10 @@ +#!/bin/bash -e + +# Directory where all binaries will be loaded +BIN_DIR=$1 + +# Remove incorrect line endings +dos2unix $BIN_DIR/* + +# Make sure all the bin files are executable +chmod u+x $BIN_DIR/* \ No newline at end of file diff --git a/test_orc/modules/base/bin/start_grpc b/test_orc/modules/base/bin/start_grpc new file mode 100644 index 000000000..917381e89 --- /dev/null +++ b/test_orc/modules/base/bin/start_grpc @@ -0,0 +1,17 @@ +#!/bin/bash -e + +GRPC_DIR="/testrun/python/src/grpc" +GRPC_PROTO_DIR="proto" +GRPC_PROTO_FILE="grpc.proto" + +# Move into the grpc directory +pushd $GRPC_DIR >/dev/null 2>&1 + +# Build the grpc proto file every time before starting server +python3 -m grpc_tools.protoc --proto_path=. ./$GRPC_PROTO_DIR/$GRPC_PROTO_FILE --python_out=. --grpc_python_out=. + +popd >/dev/null 2>&1 + +# Start the grpc server +python3 -u $GRPC_DIR/start_server.py $@ + diff --git a/test_orc/modules/base/bin/start_module b/test_orc/modules/base/bin/start_module new file mode 100644 index 000000000..a9f5402f4 --- /dev/null +++ b/test_orc/modules/base/bin/start_module @@ -0,0 +1,76 @@ +#!/bin/bash + +# Directory where all binaries will be loaded +BIN_DIR="/testrun/bin" + +# Default interface should be veth0 for all containers +DEFAULT_IFACE=veth0 + +# Create a local user that matches the same as the host +# to be used for correct file ownership for various logs +# HOST_USER mapped in via docker container environemnt variables +useradd $HOST_USER + +# Enable IPv6 for all containers +sysctl net.ipv6.conf.all.disable_ipv6=0 +sysctl -p + +# Read in the config file +CONF_FILE="/testrun/conf/module_config.json" +CONF=`cat $CONF_FILE` + +if [[ -z $CONF ]] +then + echo "No config file present at $CONF_FILE. Exiting startup." + exit 1 +fi + +# Extract the necessary config parameters +MODULE_NAME=$(echo "$CONF" | jq -r '.config.meta.name') +DEFINED_IFACE=$(echo "$CONF" | jq -r '.config.network.interface') +GRPC=$(echo "$CONF" | jq -r '.config.grpc') + +# Validate the module name is present +if [[ -z "$MODULE_NAME" || "$MODULE_NAME" == "null" ]] +then + echo "No module name present in $CONF_FILE. Exiting startup." + exit 1 +fi + +# Select which interace to use +if [[ -z $DEFINED_IFACE || "$DEFINED_IFACE" == "null" ]] +then + echo "No Interface Defined, defaulting to veth0" + INTF=$DEFAULT_IFACE +else + INTF=$DEFINED_IFACE +fi + +echo "Starting module $MODULE_NAME..." + +$BIN_DIR/setup_binaries $BIN_DIR + +# Wait for interface to become ready +$BIN_DIR/wait_for_interface $INTF + +# Start network capture +$BIN_DIR/capture $MODULE_NAME $INTF + +# Start the grpc server +if [[ ! -z $GRPC && ! $GRPC == "null" ]] +then + GRPC_PORT=$(echo "$GRPC" | jq -r '.port') + if [[ ! -z $GRPC_PORT && ! $GRPC_PORT == "null" ]] + then + echo "gRPC port resolved from config: $GRPC_PORT" + $BIN_DIR/start_grpc "-p $GRPC_PORT" & + else + $BIN_DIR/start_grpc & + fi +fi + +# Small pause to let all core services stabalize +sleep 3 + +# Start the networking service +$BIN_DIR/start_test_module $MODULE_NAME $INTF \ No newline at end of file diff --git a/test_orc/modules/base/bin/wait_for_interface b/test_orc/modules/base/bin/wait_for_interface new file mode 100644 index 000000000..c9c1682f0 --- /dev/null +++ b/test_orc/modules/base/bin/wait_for_interface @@ -0,0 +1,10 @@ +#!/bin/bash + +# Allow a user to define an interface by passing it into this script +INTF=$1 + +# Wait for local interface to be ready +while ! ip link show $INTF; do + echo $INTF is not yet ready. Waiting 3 seconds + sleep 3 +done \ No newline at end of file diff --git a/test_orc/modules/base/conf/module_config.json b/test_orc/modules/base/conf/module_config.json new file mode 100644 index 000000000..1f3a47ba2 --- /dev/null +++ b/test_orc/modules/base/conf/module_config.json @@ -0,0 +1,12 @@ +{ + "config": { + "meta": { + "name": "base", + "display_name": "Base", + "description": "Base image" + }, + "docker": { + "enable_container": false + } + } +} \ No newline at end of file diff --git a/test_orc/modules/base/python/requirements.txt b/test_orc/modules/base/python/requirements.txt new file mode 100644 index 000000000..9c4e2b056 --- /dev/null +++ b/test_orc/modules/base/python/requirements.txt @@ -0,0 +1,2 @@ +grpcio +grpcio-tools \ No newline at end of file diff --git a/test_orc/modules/base/python/src/grpc/start_server.py b/test_orc/modules/base/python/src/grpc/start_server.py new file mode 100644 index 000000000..9ed31ffcf --- /dev/null +++ b/test_orc/modules/base/python/src/grpc/start_server.py @@ -0,0 +1,34 @@ +from concurrent import futures +import grpc +import proto.grpc_pb2_grpc as pb2_grpc +import proto.grpc_pb2 as pb2 +from network_service import NetworkService +import logging +import sys +import argparse + +DEFAULT_PORT = '5001' + +def serve(PORT): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + pb2_grpc.add_NetworkModuleServicer_to_server(NetworkService(), server) + server.add_insecure_port('[::]:' + PORT) + server.start() + server.wait_for_termination() + +def run(argv): + parser = argparse.ArgumentParser(description="GRPC Server for Network Module", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument("-p", "--port", default=DEFAULT_PORT, + help="Define the default port to run the server on.") + + args = parser.parse_args() + + PORT = args.port + + print("gRPC server starting on port " + PORT) + serve(PORT) + + +if __name__ == "__main__": + run(sys.argv) \ No newline at end of file diff --git a/test_orc/modules/base/python/src/logger.py b/test_orc/modules/base/python/src/logger.py new file mode 100644 index 000000000..0eb7b9ccf --- /dev/null +++ b/test_orc/modules/base/python/src/logger.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 + +import json +import logging +import os + +LOGGERS = {} +_LOG_FORMAT = "%(asctime)s %(name)-8s %(levelname)-7s %(message)s" +_DATE_FORMAT = '%b %02d %H:%M:%S' +_DEFAULT_LEVEL = logging.INFO +_CONF_DIR = "conf" +_CONF_FILE_NAME = "system.json" +_LOG_DIR = "/runtime/network/" + +# Set log level +try: + system_conf_json = json.load( + open(os.path.join(_CONF_DIR, _CONF_FILE_NAME), encoding='utf-8')) + log_level_str = system_conf_json['log_level'] + log_level = logging.getLevelName(log_level_str) +except: + # TODO: Print out warning that log level is incorrect or missing + log_level = _DEFAULT_LEVEL + +log_format = logging.Formatter(fmt=_LOG_FORMAT, datefmt=_DATE_FORMAT) + + +def add_file_handler(log, log_file): + handler = logging.FileHandler(_LOG_DIR+log_file+".log") + handler.setFormatter(log_format) + log.addHandler(handler) + +def add_stream_handler(log): + handler = logging.StreamHandler() + handler.setFormatter(log_format) + log.addHandler(handler) + +def get_logger(name, log_file=None): + if name not in LOGGERS: + LOGGERS[name] = logging.getLogger(name) + LOGGERS[name].setLevel(log_level) + add_stream_handler(LOGGERS[name]) + if log_file is not None: + add_file_handler(LOGGERS[name], log_file) + return LOGGERS[name] diff --git a/test_orc/modules/baseline/baseline.Dockerfile b/test_orc/modules/baseline/baseline.Dockerfile new file mode 100644 index 000000000..5b634e6ee --- /dev/null +++ b/test_orc/modules/baseline/baseline.Dockerfile @@ -0,0 +1,11 @@ +# Image name: test-run/baseline-test +FROM test-run/base-test:latest + +# Copy over all configuration files +COPY modules/baseline/conf /testrun/conf + +# Load device binary files +COPY modules/baseline/bin /testrun/bin + +# Copy over all python files +COPY modules/baseline/python /testrun/python \ No newline at end of file diff --git a/test_orc/modules/baseline/bin/start_test_module b/test_orc/modules/baseline/bin/start_test_module new file mode 100644 index 000000000..2938eb0f8 --- /dev/null +++ b/test_orc/modules/baseline/bin/start_test_module @@ -0,0 +1,42 @@ +#!/bin/bash + +# An example startup script that does the bare minimum to start +# a test module via a pyhon script. Each test module should include a +# start_test_module file that overwrites this one to boot all of its +# specific requirements to run. + +# Define where the python source files are located +PYTHON_SRC_DIR=/testrun/python/src + +# Fetch module name +MODULE_NAME=$1 + +# Default interface should be veth0 for all containers +DEFAULT_IFACE=veth0 + +# Allow a user to define an interface by passing it into this script +DEFINED_IFACE=$2 + +# Select which interace to use +if [[ -z $DEFINED_IFACE || "$DEFINED_IFACE" == "null" ]] +then + echo "No interface defined, defaulting to veth0" + INTF=$DEFAULT_IFACE +else + INTF=$DEFINED_IFACE +fi + +# Create and set permissions on the log files +LOG_FILE=/runtime/output/$MODULE_NAME.log +RESULT_FILE=/runtime/output/$MODULE_NAME-result.json +touch $LOG_FILE +touch $RESULT_FILE +chown $HOST_USER:$HOST_USER $LOG_FILE +chown $HOST_USER:$HOST_USER $RESULT_FILE + +# Run the python scrip that will execute the tests for this module +# -u flag allows python print statements +# to be logged by docker by running unbuffered +python3 -u $PYTHON_SRC_DIR/run.py "-m $MODULE_NAME" + +echo Module has finished \ No newline at end of file diff --git a/test_orc/modules/baseline/conf/module_config.json b/test_orc/modules/baseline/conf/module_config.json new file mode 100644 index 000000000..1b8b7b9ba --- /dev/null +++ b/test_orc/modules/baseline/conf/module_config.json @@ -0,0 +1,21 @@ +{ + "config": { + "meta": { + "name": "baseline", + "display_name": "Baseline", + "description": "Baseline test" + }, + "network": { + "interface": "eth0", + "enable_wan": false, + "ip_index": 9 + }, + "grpc": { + "port": 50001 + }, + "docker": { + "enable_container": true, + "timeout": 30 + } + } +} \ No newline at end of file diff --git a/test_orc/modules/baseline/python/src/logger.py b/test_orc/modules/baseline/python/src/logger.py new file mode 100644 index 000000000..641aa16b4 --- /dev/null +++ b/test_orc/modules/baseline/python/src/logger.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 + +import json +import logging +import os + +LOGGERS = {} +_LOG_FORMAT = "%(asctime)s %(name)-8s %(levelname)-7s %(message)s" +_DATE_FORMAT = '%b %02d %H:%M:%S' +_DEFAULT_LEVEL = logging.INFO +_CONF_DIR = "conf" +_CONF_FILE_NAME = "system.json" +_LOG_DIR = "/runtime/output/" + +# Set log level +try: + system_conf_json = json.load( + open(os.path.join(_CONF_DIR, _CONF_FILE_NAME))) + log_level_str = system_conf_json['log_level'] + log_level = logging.getLevelName(log_level_str) +except: + # TODO: Print out warning that log level is incorrect or missing + log_level = _DEFAULT_LEVEL + +log_format = logging.Formatter(fmt=_LOG_FORMAT, datefmt=_DATE_FORMAT) + +def add_file_handler(log, logFile): + handler = logging.FileHandler(_LOG_DIR+logFile+".log") + handler.setFormatter(log_format) + log.addHandler(handler) + + +def add_stream_handler(log): + handler = logging.StreamHandler() + handler.setFormatter(log_format) + log.addHandler(handler) + + +def get_logger(name, logFile=None): + if name not in LOGGERS: + LOGGERS[name] = logging.getLogger(name) + LOGGERS[name].setLevel(log_level) + add_stream_handler(LOGGERS[name]) + if logFile is not None: + add_file_handler(LOGGERS[name], logFile) + return LOGGERS[name] diff --git a/test_orc/modules/baseline/python/src/run.py b/test_orc/modules/baseline/python/src/run.py new file mode 100644 index 000000000..7ff11559f --- /dev/null +++ b/test_orc/modules/baseline/python/src/run.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 + +import argparse +import signal +import sys +import logger + +from test_module import TestModule + +LOGGER = logger.get_logger('test_module') +RUNTIME = 300 + +class TestModuleRunner: + + def __init__(self,module): + + signal.signal(signal.SIGINT, self._handler) + signal.signal(signal.SIGTERM, self._handler) + signal.signal(signal.SIGABRT, self._handler) + signal.signal(signal.SIGQUIT, self._handler) + + LOGGER.info("Starting Test Module Template") + + self._test_module = TestModule(module) + self._test_module.run_tests() + self._test_module.generate_results() + + def _handler(self, signum, *other): + LOGGER.debug("SigtermEnum: " + str(signal.SIGTERM)) + LOGGER.debug("Exit signal received: " + str(signum)) + if signum in (2, signal.SIGTERM): + LOGGER.info("Exit signal received. Stopping test module...") + LOGGER.info("Test module stopped") + sys.exit(1) + +def run(argv): + parser = argparse.ArgumentParser(description="Test Module Template", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + parser.add_argument( + "-m", "--module", help="Define the module name to be used to create the log file") + + args = parser.parse_args() + + # For some reason passing in the args from bash adds an extra + # space before the argument so we'll just strip out extra space + TestModuleRunner(args.module.strip()) + +if __name__ == "__main__": + run(sys.argv) diff --git a/test_orc/modules/baseline/python/src/test_module.py b/test_orc/modules/baseline/python/src/test_module.py new file mode 100644 index 000000000..d4065cde3 --- /dev/null +++ b/test_orc/modules/baseline/python/src/test_module.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 + +import json +import time +import logger + +LOG_NAME = "test_baseline" +RESULTS_DIR = "/runtime/output/" +LOGGER = logger.get_logger(LOG_NAME) + +class TestModule: + + def __init__(self, module): + + self.module_test1 = None + self.module_test2 = None + self.module_test3 = None + self.module = module + self.add_logger(module) + + def add_logger(self, module): + global LOGGER + LOGGER = logger.get_logger(LOG_NAME, module) + + # Make up some fake test results + def run_tests(self): + LOGGER.info("Running test 1...") + self.module_test1 = True + LOGGER.info("Test 1 complete.") + + LOGGER.info("Running test 2...") + self.module_test2 = False + LOGGER.info("Test 2 complete.") + + def generate_results(self): + results = [] + results.append(self.generate_result("Test 1", self.module_test1)) + results.append(self.generate_result("Test 2", self.module_test2)) + results.append(self.generate_result("Test 3", self.module_test3)) + json_results = json.dumps({"results":results}, indent=2) + self.write_results(json_results) + + def write_results(self,results): + results_file=RESULTS_DIR+self.module+"-result.json" + LOGGER.info("Writing results to " + results_file) + f = open(results_file, "w", encoding="utf-8") + f.write(results) + f.close() + + def generate_result(self, test_name, test_result): + if test_result is not None: + result = "compliant" if test_result else "non-compliant" + else: + result = "skipped" + LOGGER.info(test_name + ": " + result) + res_dict = { + "name": test_name, + "result": result, + "description": "The device is " + result + } + return res_dict diff --git a/test_orc/python/requirements.txt b/test_orc/python/requirements.txt new file mode 100644 index 000000000..e69de29bb diff --git a/test_orc/python/src/module.py b/test_orc/python/src/module.py new file mode 100644 index 000000000..8121c34db --- /dev/null +++ b/test_orc/python/src/module.py @@ -0,0 +1,23 @@ +"""Represemts a test module.""" +from dataclasses import dataclass +from docker.models.containers import Container + +@dataclass +class TestModule: # pylint: disable=too-few-public-methods,too-many-instance-attributes + """Represents a test module.""" + + name: str = None + display_name: str = None + description: str = None + + build_file: str = None + container: Container = None + container_name: str = None + image_name :str = None + enable_container: bool = True + + timeout: int = 60 + + # Absolute path + dir: str = None + dir_name: str = None diff --git a/test_orc/python/src/runner.py b/test_orc/python/src/runner.py new file mode 100644 index 000000000..cc495bf8d --- /dev/null +++ b/test_orc/python/src/runner.py @@ -0,0 +1,40 @@ +"""Provides high level management of the test orchestrator.""" +import time +import logger + +LOGGER = logger.get_logger('runner') + +class Runner: + """Holds the state of the testing for one device.""" + + def __init__(self, test_orc, device): + self._test_orc = test_orc + self._device = device + + def run(self): + self._run_test_modules() + + def _run_test_modules(self): + """Iterates through each test module and starts the container.""" + LOGGER.info('Running test modules...') + for module in self._test_modules: + self.run_test_module(module) + LOGGER.info('All tests complete') + + def run_test_module(self, module): + """Start the test container and extract the results.""" + + if module is None or not module.enable_container: + return + + self._test_orc.start_test_module(module) + + # Determine the module timeout time + test_module_timeout = time.time() + module.timeout + status = self._test_orc.get_module_status(module) + + while time.time() < test_module_timeout and status == 'running': + time.sleep(1) + status = self._test_orc.get_module_status(module) + + LOGGER.info(f'Test module {module.display_name} has finished') diff --git a/test_orc/python/src/test_orchestrator.py b/test_orc/python/src/test_orchestrator.py new file mode 100644 index 000000000..f68a13579 --- /dev/null +++ b/test_orc/python/src/test_orchestrator.py @@ -0,0 +1,200 @@ +"""Provides high level management of the test orchestrator.""" +import os +import json +import time +import shutil +import docker +from docker.types import Mount +import logger +from module import TestModule + +LOG_NAME = "test_orc" +LOGGER = logger.get_logger("test_orc") +RUNTIME_DIR = "runtime" +TEST_MODULES_DIR = "modules" +MODULE_CONFIG = "conf/module_config.json" + +class TestOrchestrator: + """Manages and controls the test modules.""" + + def __init__(self): + self._test_modules = [] + self._module_config = None + + self._path = os.path.dirname(os.path.dirname( + os.path.dirname(os.path.realpath(__file__)))) + + # Resolve the path to the test-run folder + self._root_path = os.path.abspath(os.path.join(self._path, os.pardir)) + + shutil.rmtree(os.path.join(self._root_path, RUNTIME_DIR), ignore_errors=True) + os.makedirs(os.path.join(self._root_path, RUNTIME_DIR), exist_ok=True) + + def start(self): + LOGGER.info("Starting Test Orchestrator") + self._load_test_modules() + self._run_test_modules() + + def stop(self): + """Stop any running tests""" + self._stop_modules() + + def _run_test_modules(self): + """Iterates through each test module and starts the container.""" + LOGGER.info("Running test modules...") + for module in self._test_modules: + self._run_test_module(module) + LOGGER.info("All tests complete") + + def _run_test_module(self, module): + """Start the test container and extract the results.""" + + if module is None or not module.enable_container: + return + + LOGGER.info("Running test module " + module.name) + try: + + container_runtime_dir = os.path.join(self._root_path, "runtime/test/" + module.name) + os.makedirs(container_runtime_dir) + + client = docker.from_env() + + module.container = client.containers.run( + module.image_name, + auto_remove=True, + cap_add=["NET_ADMIN"], + name=module.container_name, + hostname=module.container_name, + privileged=True, + detach=True, + mounts=[Mount( + target="/runtime/output", + source=container_runtime_dir, + type='bind' + )], + environment={"HOST_USER": os.getlogin()} + ) + except (docker.errors.APIError, docker.errors.ContainerError) as container_error: + LOGGER.error("Test module " + module.name + " has failed to start") + LOGGER.debug(container_error) + return + + # Determine the module timeout time + test_module_timeout = time.time() + module.timeout + status = self._get_module_status(module) + + while time.time() < test_module_timeout and status == 'running': + time.sleep(1) + status = self._get_module_status(module) + + LOGGER.info("Test module " + module.name + " has finished") + + def _get_module_status(self,module): + container = self._get_module_container(module) + if container is not None: + return container.status + return None + + def _get_module_container(self, module): + container = None + try: + client = docker.from_env() + container = client.containers.get(module.container_name) + except docker.errors.NotFound: + LOGGER.debug("Container " + + module.container_name + " not found") + except docker.errors.APIError as error: + LOGGER.error("Failed to resolve container") + LOGGER.error(error) + return container + + def _load_test_modules(self): + """Import module configuration from module_config.json.""" + + modules_dir = os.path.join(self._path, TEST_MODULES_DIR) + + LOGGER.debug("Loading test modules from /" + modules_dir) + loaded_modules = "Loaded the following test modules: " + + for module_dir in os.listdir(modules_dir): + + LOGGER.debug("Loading module from: " + module_dir) + + # Load basic module information + module = TestModule() + with open(os.path.join( + self._path, + modules_dir, + module_dir, + MODULE_CONFIG), + encoding='UTF-8') as module_config_file: + module_json = json.load(module_config_file) + + module.name = module_json['config']['meta']['name'] + module.display_name = module_json['config']['meta']['display_name'] + module.description = module_json['config']['meta']['description'] + module.dir = os.path.join(self._path, modules_dir, module_dir) + module.dir_name = module_dir + module.build_file = module_dir + ".Dockerfile" + module.container_name = "tr-ct-" + module.dir_name + "-test" + module.image_name = "test-run/" + module.dir_name + "-test" + + if 'timeout' in module_json['config']['docker']: + module.timeout = module_json['config']['docker']['timeout'] + + # Determine if this is a container or just an image/template + if "enable_container" in module_json['config']['docker']: + module.enable_container = module_json['config']['docker']['enable_container'] + + self._test_modules.append(module) + + if module.enable_container: + loaded_modules += module.dir_name + " " + + LOGGER.info(loaded_modules) + + def build_test_modules(self): + """Build all test modules.""" + LOGGER.info("Building test modules...") + for module in self._test_modules: + self._build_test_module(module) + + def _build_test_module(self, module): + LOGGER.debug("Building docker image for module " + module.dir_name) + client = docker.from_env() + try: + client.images.build( + dockerfile=os.path.join(module.dir, module.build_file), + path=self._path, + forcerm=True, # Cleans up intermediate containers during build + tag=module.image_name + ) + except docker.errors.BuildError as error: + LOGGER.error(error) + + def _stop_modules(self, kill=False): + LOGGER.info("Stopping test modules") + for module in self._test_modules: + # Test modules may just be Docker images, so we do not want to stop them + if not module.enable_container: + continue + self._stop_module(module, kill) + LOGGER.info("All test modules have been stopped") + + def _stop_module(self, module, kill=False): + LOGGER.debug("Stopping test module " + module.container_name) + try: + container = module.container + if container is not None: + if kill: + LOGGER.debug("Killing container:" + + module.container_name) + container.kill() + else: + LOGGER.debug("Stopping container:" + + module.container_name) + container.stop() + LOGGER.debug("Container stopped:" + module.container_name) + except docker.errors.NotFound: + pass \ No newline at end of file