diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cc536d2a..5f6235fd 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -33,7 +33,7 @@ jobs: # stop the build if there are Python syntax errors or undefined names flake8 ./test/ ./src/ --count --select=E9,F63,F7,F82 --show-source --statistics # Stop build on errors - flake8 ./test/ ./src/ --count --max-complexity=10 --max-line-length=120 --statistics + flake8 ./test/ ./src/ --count --max-complexity=15 --max-line-length=120 --statistics - name: Test with pytest run: | pip install -e . diff --git a/.gitignore b/.gitignore index 7f7d89db..c3ad18e1 100644 --- a/.gitignore +++ b/.gitignore @@ -11,11 +11,17 @@ __pycache__ .coverage htmlcov .pytest_cache -.hermes docs/source/api docs/build/ +/dist/ .idea/ .venv/ dist/ + +# HERMES workflow specifics +.hermes +hermes-audit.md +hermes.log +quickfix.sh diff --git a/.mailmap b/.mailmap new file mode 100644 index 00000000..1a74d657 --- /dev/null +++ b/.mailmap @@ -0,0 +1,14 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: CC0-1.0 + +# Mapping of email addresses only. Format (one pair per line): +# + + + + +# Mapping of user names. Format (one pair per line): +# Real Name nickname +# Real Name Name, Real +Jeffrey Kelling jkelling diff --git a/README.md b/README.md index a20deb50..a0ba921f 100644 --- a/README.md +++ b/README.md @@ -37,12 +37,12 @@ This project uses ## Usage -The `haggis` application provides the entry point for the HERMES workflow. +The `hermes` application provides the entry point for the HERMES workflow. After installation, you can run it from your command line environment: ```shell -haggis --help -haggis harvest +hermes --help +hermes harvest ``` You can also call the `hermes` package as Python module: diff --git a/docs/source/dev/data_model.md b/docs/source/dev/data_model.md index 66a7b38a..a5f4c727 100644 --- a/docs/source/dev/data_model.md +++ b/docs/source/dev/data_model.md @@ -10,7 +10,7 @@ SPDX-FileContributor: Michael Meinel # HERMES Data Model -*haggis* uses an internal data model to store the output of the different stages. +*hermes* uses an internal data model to store the output of the different stages. All the data is collected in a directory called `.hermes` located in the root of the project directory. You should not need to interact with this data directly. diff --git a/poetry.lock b/poetry.lock index 86ca8ed3..633d4cbf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -8,7 +8,7 @@ python-versions = "*" [[package]] name = "astroid" -version = "2.12.13" +version = "2.13.2" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -16,6 +16,7 @@ python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" +typing-extensions = ">=4.0.0" wrapt = [ {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, @@ -23,20 +24,21 @@ wrapt = [ [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] -name = "Babel" +name = "babel" version = "2.11.0" description = "Internationalization utilities" category = "dev" @@ -73,7 +75,7 @@ python-versions = "*" chardet = ">=3.0.2" [[package]] -name = "boolean.py" +name = "boolean-py" version = "4.0" description = "Define boolean algebras, create and parse boolean expressions and create custom boolean DSL." category = "dev" @@ -82,7 +84,7 @@ python-versions = "*" [[package]] name = "certifi" -version = "2022.9.24" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -104,17 +106,17 @@ requests = ">=2.20,<3" "ruamel.yaml" = ">=0.16.0" [package.extras] -dev = ["isort", "prospector[with_pyroma] (>=1.4)", "pytest (>=6)", "pytest-cov"] +dev = ["isort", "prospector[with-pyroma] (>=1.4)", "pytest (>=6)", "pytest-cov"] gcloud = ["flask"] publishing = ["twine", "wheel"] [[package]] name = "chardet" -version = "5.0.0" +version = "5.1.0" description = "Universal encoding detector for Python 3" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "charset-normalizer" @@ -125,7 +127,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "click" @@ -148,7 +150,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 [[package]] name = "coverage" -version = "6.5.0" +version = "7.0.5" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -161,7 +163,7 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 toml = ["tomli"] [[package]] -name = "Deprecated" +name = "deprecated" version = "1.2.13" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." category = "dev" @@ -192,7 +194,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false @@ -232,14 +234,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [[package]] -name = "Jinja2" +name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "dev" @@ -268,11 +270,11 @@ six = ">=1.11.0" [package.extras] format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format_nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] [[package]] name = "lazy-object-proxy" -version = "1.8.0" +version = "1.9.0" description = "A fast and thorough lazy object proxy." category = "dev" optional = false @@ -318,7 +320,7 @@ mdurl = ">=0.1,<1.0" [package.extras] benchmarking = ["psutil", "pytest", "pytest-benchmark (>=3.2,<4.0)"] -code_style = ["pre-commit (==2.6)"] +code-style = ["pre-commit (==2.6)"] compare = ["commonmark (>=0.9.1,<0.10.0)", "markdown (>=3.3.6,<3.4.0)", "mistletoe (>=0.8.1,<0.9.0)", "mistune (>=2.0.2,<2.1.0)", "panflute (>=2.1.3,<2.2.0)"] linkify = ["linkify-it-py (>=1.0,<2.0)"] plugins = ["mdit-py-plugins"] @@ -327,7 +329,7 @@ rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx- testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] -name = "MarkupSafe" +name = "markupsafe" version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" @@ -344,7 +346,7 @@ python-versions = ">=3.6" [[package]] name = "mdit-py-plugins" -version = "0.3.1" +version = "0.3.3" description = "Collection of plugins for markdown-it-py" category = "dev" optional = false @@ -354,7 +356,7 @@ python-versions = ">=3.7" markdown-it-py = ">=1.0.0,<3.0.0" [package.extras] -code_style = ["pre-commit"] +code-style = ["pre-commit"] rtd = ["attrs", "myst-parser (>=0.16.1,<0.17.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] @@ -392,21 +394,18 @@ sphinx = ">=4,<6" typing-extensions = "*" [package.extras] -code_style = ["pre-commit (>=2.12,<3.0)"] +code-style = ["pre-commit (>=2.12,<3.0)"] linkify = ["linkify-it-py (>=1.0,<2.0)"] rtd = ["ipython", "sphinx-book-theme", "sphinx-design", "sphinxcontrib.mermaid (>=0.7.1,<0.8.0)", "sphinxext-opengraph (>=0.6.3,<0.7.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=6,<7)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx (<5.2)", "sphinx-pytest"] [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" [[package]] name = "pluggy" @@ -468,8 +467,8 @@ optional = false python-versions = ">=3.6" [[package]] -name = "Pygments" -version = "2.13.0" +name = "pygments" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false @@ -495,7 +494,7 @@ python-dateutil = ">=2.8.0" name = "pyparsing" version = "3.0.9" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "dev" +category = "main" optional = false python-versions = ">=3.6.8" @@ -504,7 +503,7 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyrsistent" -version = "0.19.2" +version = "0.19.3" description = "Persistent/Functional/Immutable data structures" category = "main" optional = false @@ -569,14 +568,14 @@ chardet = "*" [[package]] name = "pytz" -version = "2022.6" +version = "2022.7" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" [[package]] -name = "PyYAML" +name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" category = "dev" @@ -599,27 +598,26 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "reuse" -version = "1.0.0" +version = "1.1.0" description = "reuse is a tool for compliance with the REUSE recommendations." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.2,<4.0.0" [package.dependencies] -binaryornot = "*" -"boolean.py" = "*" -Jinja2 = "*" -license-expression = "*" -python-debian = "*" -requests = "*" +binaryornot = ">=0.4.4,<0.5.0" +"boolean.py" = ">=3.8" +Jinja2 = ">=3.0.0,<4.0.0" +license-expression = ">=1.0" +python-debian = ">=0.1.38,<0.1.45 || >0.1.45,<0.1.46 || >0.1.46,<0.1.47 || >0.1.47,<0.2.0" setuptools = "*" [[package]] -name = "ruamel.yaml" +name = "ruamel-yaml" version = "0.17.21" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" category = "main" @@ -634,7 +632,7 @@ docs = ["ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] -name = "ruamel.yaml.clib" +name = "ruamel-yaml-clib" version = "0.2.7" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" category = "main" @@ -643,7 +641,7 @@ python-versions = ">=3.5" [[package]] name = "setuptools" -version = "65.6.0" +version = "65.6.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false @@ -679,7 +677,7 @@ optional = false python-versions = ">=3.6" [[package]] -name = "Sphinx" +name = "sphinx" version = "4.5.0" description = "Python documentation generator" category = "dev" @@ -759,7 +757,7 @@ pyyaml = "*" sphinx = ">=3,<5" [package.extras] -code_style = ["pre-commit (>=2.7.0,<2.8.0)"] +code-style = ["pre-commit (>=2.7.0,<2.8.0)"] doc = ["ablog (>=0.10.13,<0.11.0)", "folium", "ipywidgets", "matplotlib", "myst-nb (>=0.13.2,<0.14.0)", "nbclient", "numpy", "numpydoc", "pandas", "plotly", "sphinx (>=4.0,<5.0)", "sphinx-copybutton", "sphinx-design", "sphinx-examples", "sphinx-tabs", "sphinx-thebe (>=0.1.1)", "sphinx-togglebutton (>=0.2.1)", "sphinxcontrib-bibtex (>=2.2,<3.0)", "sphinxcontrib-youtube", "sphinxext-opengraph"] test = ["beautifulsoup4 (>=4.6.1,<5)", "coverage", "myst-nb (>=0.13.2,<0.14.0)", "pytest (>=6.0.1,<6.1.0)", "pytest-cov", "pytest-regressions (>=2.0.1,<2.1.0)", "sphinx_thebe"] @@ -795,11 +793,11 @@ test = ["coverage", "pytest"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +version = "1.0.3" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -923,6 +921,14 @@ mslex = {version = ">=0.3.0,<0.4.0", markers = "sys_platform == \"win32\""} psutil = ">=5.7.2,<6.0.0" tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""} +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + [[package]] name = "tomli" version = "2.0.1" @@ -948,7 +954,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "Unidecode" +name = "unidecode" version = "1.3.6" description = "ASCII transliterations of Unicode text" category = "dev" @@ -957,11 +963,11 @@ python-versions = ">=3.5" [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.13" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] @@ -979,7 +985,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "a563183a806a39f7ecb6f4bedff46347ab7bb6ce69989bee6f506f06b75536cf" +content-hash = "2cdb639f26b5ef399e9a69be17fc502b13a5a1aacdb3fbe237bfcce00d722078" [metadata.files] alabaster = [ @@ -987,14 +993,14 @@ alabaster = [ {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, ] astroid = [ - {file = "astroid-2.12.13-py3-none-any.whl", hash = "sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907"}, - {file = "astroid-2.12.13.tar.gz", hash = "sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7"}, + {file = "astroid-2.13.2-py3-none-any.whl", hash = "sha256:8f6a8d40c4ad161d6fc419545ae4b2f275ed86d1c989c97825772120842ee0d2"}, + {file = "astroid-2.13.2.tar.gz", hash = "sha256:3bc7834720e1a24ca797fd785d77efb14f7a28ee8e635ef040b6e2d80ccb3303"}, ] attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] -Babel = [ +babel = [ {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, ] @@ -1006,21 +1012,21 @@ binaryornot = [ {file = "binaryornot-0.4.4-py2.py3-none-any.whl", hash = "sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4"}, {file = "binaryornot-0.4.4.tar.gz", hash = "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061"}, ] -"boolean.py" = [ +boolean-py = [ {file = "boolean.py-4.0-py3-none-any.whl", hash = "sha256:2876f2051d7d6394a531d82dc6eb407faa0b01a0a0b3083817ccd7323b8d96bd"}, {file = "boolean.py-4.0.tar.gz", hash = "sha256:17b9a181630e43dde1851d42bef546d616d5d9b4480357514597e78b203d06e4"}, ] certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] cffconvert = [ {file = "cffconvert-2.0.0-py3-none-any.whl", hash = "sha256:573c825e4e16173d99396dc956bd22ff5d4f84215cc16b6ab05299124f5373bb"}, {file = "cffconvert-2.0.0.tar.gz", hash = "sha256:b4379ee415c6637dc9e3e7ba196605cb3cedcea24613e4ea242c607d9e98eb50"}, ] chardet = [ - {file = "chardet-5.0.0-py3-none-any.whl", hash = "sha256:d3e64f022d254183001eccc5db4040520c0f23b1a3f33d6413e099eb7f126557"}, - {file = "chardet-5.0.0.tar.gz", hash = "sha256:0368df2bfd78b5fc20572bb4e9bb7fb53e2c094f60ae9993339e8671d0afb8aa"}, + {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"}, + {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"}, ] charset-normalizer = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, @@ -1035,58 +1041,59 @@ colorama = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] -Deprecated = [ + {file = "coverage-7.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a7f23bbaeb2a87f90f607730b45564076d870f1fb07b9318d0c21f36871932b"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c18d47f314b950dbf24a41787ced1474e01ca816011925976d90a88b27c22b89"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef14d75d86f104f03dea66c13188487151760ef25dd6b2dbd541885185f05f40"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66e50680e888840c0995f2ad766e726ce71ca682e3c5f4eee82272c7671d38a2"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9fed35ca8c6e946e877893bbac022e8563b94404a605af1d1e6accc7eb73289"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d8d04e755934195bdc1db45ba9e040b8d20d046d04d6d77e71b3b34a8cc002d0"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e109f1c9a3ece676597831874126555997c48f62bddbcace6ed17be3e372de8"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0a1890fca2962c4f1ad16551d660b46ea77291fba2cc21c024cd527b9d9c8809"}, + {file = "coverage-7.0.5-cp310-cp310-win32.whl", hash = "sha256:be9fcf32c010da0ba40bf4ee01889d6c737658f4ddff160bd7eb9cac8f094b21"}, + {file = "coverage-7.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:cbfcba14a3225b055a28b3199c3d81cd0ab37d2353ffd7f6fd64844cebab31ad"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30b5fec1d34cc932c1bc04017b538ce16bf84e239378b8f75220478645d11fca"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1caed2367b32cc80a2b7f58a9f46658218a19c6cfe5bc234021966dc3daa01f0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d254666d29540a72d17cc0175746cfb03d5123db33e67d1020e42dae611dc196"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19245c249aa711d954623d94f23cc94c0fd65865661f20b7781210cb97c471c0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b05ed4b35bf6ee790832f68932baf1f00caa32283d66cc4d455c9e9d115aafc"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:29de916ba1099ba2aab76aca101580006adfac5646de9b7c010a0f13867cba45"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e057e74e53db78122a3979f908973e171909a58ac20df05c33998d52e6d35757"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:411d4ff9d041be08fdfc02adf62e89c735b9468f6d8f6427f8a14b6bb0a85095"}, + {file = "coverage-7.0.5-cp311-cp311-win32.whl", hash = "sha256:52ab14b9e09ce052237dfe12d6892dd39b0401690856bcfe75d5baba4bfe2831"}, + {file = "coverage-7.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:1f66862d3a41674ebd8d1a7b6f5387fe5ce353f8719040a986551a545d7d83ea"}, + {file = "coverage-7.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b69522b168a6b64edf0c33ba53eac491c0a8f5cc94fa4337f9c6f4c8f2f5296c"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436e103950d05b7d7f55e39beeb4d5be298ca3e119e0589c0227e6d0b01ee8c7"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c56bec53d6e3154eaff6ea941226e7bd7cc0d99f9b3756c2520fc7a94e6d96"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a38362528a9115a4e276e65eeabf67dcfaf57698e17ae388599568a78dcb029"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f67472c09a0c7486e27f3275f617c964d25e35727af952869dd496b9b5b7f6a3"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:220e3fa77d14c8a507b2d951e463b57a1f7810a6443a26f9b7591ef39047b1b2"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ecb0f73954892f98611e183f50acdc9e21a4653f294dfbe079da73c6378a6f47"}, + {file = "coverage-7.0.5-cp37-cp37m-win32.whl", hash = "sha256:d8f3e2e0a1d6777e58e834fd5a04657f66affa615dae61dd67c35d1568c38882"}, + {file = "coverage-7.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9e662e6fc4f513b79da5d10a23edd2b87685815b337b1a30cd11307a6679148d"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:790e4433962c9f454e213b21b0fd4b42310ade9c077e8edcb5113db0818450cb"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49640bda9bda35b057b0e65b7c43ba706fa2335c9a9896652aebe0fa399e80e6"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d66187792bfe56f8c18ba986a0e4ae44856b1c645336bd2c776e3386da91e1dd"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:276f4cd0001cd83b00817c8db76730938b1ee40f4993b6a905f40a7278103b3a"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95304068686545aa368b35dfda1cdfbbdbe2f6fe43de4a2e9baa8ebd71be46e2"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:17e01dd8666c445025c29684d4aabf5a90dc6ef1ab25328aa52bedaa95b65ad7"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea76dbcad0b7b0deb265d8c36e0801abcddf6cc1395940a24e3595288b405ca0"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50a6adc2be8edd7ee67d1abc3cd20678987c7b9d79cd265de55941e3d0d56499"}, + {file = "coverage-7.0.5-cp38-cp38-win32.whl", hash = "sha256:e4ce984133b888cc3a46867c8b4372c7dee9cee300335e2925e197bcd45b9e16"}, + {file = "coverage-7.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4a950f83fd3f9bca23b77442f3a2b2ea4ac900944d8af9993743774c4fdc57af"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c2155943896ac78b9b0fd910fb381186d0c345911f5333ee46ac44c8f0e43ab"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:54f7e9705e14b2c9f6abdeb127c390f679f6dbe64ba732788d3015f7f76ef637"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee30375b409d9a7ea0f30c50645d436b6f5dfee254edffd27e45a980ad2c7f4"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b78729038abea6a5df0d2708dce21e82073463b2d79d10884d7d591e0f385ded"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13250b1f0bd023e0c9f11838bdeb60214dd5b6aaf8e8d2f110c7e232a1bff83b"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c407b1950b2d2ffa091f4e225ca19a66a9bd81222f27c56bd12658fc5ca1209"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c76a3075e96b9c9ff00df8b5f7f560f5634dffd1658bafb79eb2682867e94f78"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f26648e1b3b03b6022b48a9b910d0ae209e2d51f50441db5dce5b530fad6d9b1"}, + {file = "coverage-7.0.5-cp39-cp39-win32.whl", hash = "sha256:ba3027deb7abf02859aca49c865ece538aee56dcb4871b4cced23ba4d5088904"}, + {file = "coverage-7.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:949844af60ee96a376aac1ded2a27e134b8c8d35cc006a52903fc06c24a3296f"}, + {file = "coverage-7.0.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:b9727ac4f5cf2cbf87880a63870b5b9730a8ae3a4a360241a0fdaa2f71240ff0"}, + {file = "coverage-7.0.5.tar.gz", hash = "sha256:051afcbd6d2ac39298d62d340f94dbb6a1f31de06dfaf6fcef7b759dd3860c45"}, +] +deprecated = [ {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, ] @@ -1098,8 +1105,8 @@ docutils = [ {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, ] flake8 = [ {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, @@ -1114,10 +1121,10 @@ imagesize = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -Jinja2 = [ +jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] @@ -1126,25 +1133,42 @@ jsonschema = [ {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, ] lazy-object-proxy = [ - {file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0"}, - {file = "lazy_object_proxy-1.8.0-pp37-pypy37_pp73-any.whl", hash = "sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891"}, - {file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"}, - {file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"}, + {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] license-expression = [ {file = "license-expression-30.0.0.tar.gz", hash = "sha256:ad638292aa8493f84354909b517922cb823582c2ce2c4d880e42544a86bea8dd"}, @@ -1157,7 +1181,7 @@ markdown-it-py = [ {file = "markdown-it-py-2.1.0.tar.gz", hash = "sha256:cf7e59fed14b5ae17c0006eff14a2d9a00ed5f3a846148153899a0224e2c07da"}, {file = "markdown_it_py-2.1.0-py3-none-any.whl", hash = "sha256:93de681e5c021a432c63147656fe21790bc01231e0cd2da73626f1aa3ac0fe27"}, ] -MarkupSafe = [ +markupsafe = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, @@ -1204,8 +1228,8 @@ mccabe = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] mdit-py-plugins = [ - {file = "mdit-py-plugins-0.3.1.tar.gz", hash = "sha256:3fc13298497d6e04fe96efdd41281bfe7622152f9caa1815ea99b5c893de9441"}, - {file = "mdit_py_plugins-0.3.1-py3-none-any.whl", hash = "sha256:606a7f29cf56dbdfaf914acb21709b8f8ee29d857e8f29dcc33d8cb84c57bfa1"}, + {file = "mdit-py-plugins-0.3.3.tar.gz", hash = "sha256:5cfd7e7ac582a594e23ba6546a2f406e94e42eb33ae596d0734781261c251260"}, + {file = "mdit_py_plugins-0.3.3-py3-none-any.whl", hash = "sha256:36d08a29def19ec43acdcd8ba471d3ebab132e7879d442760d963f19913e04b9"}, ] mdurl = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, @@ -1220,8 +1244,8 @@ myst-parser = [ {file = "myst_parser-0.18.1-py3-none-any.whl", hash = "sha256:61b275b85d9f58aa327f370913ae1bec26ebad372cc99f3ab85c8ec3ee8d9fb8"}, ] packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, @@ -1255,9 +1279,9 @@ pyflakes = [ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] -Pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +pygments = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] pykwalify = [ {file = "pykwalify-1.8.0-py2.py3-none-any.whl", hash = "sha256:731dfa87338cca9f559d1fca2bdea37299116e3139b73f78ca90a543722d6651"}, @@ -1268,28 +1292,33 @@ pyparsing = [ {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pyrsistent = [ - {file = "pyrsistent-0.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win32.whl", hash = "sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win32.whl", hash = "sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73"}, - {file = "pyrsistent-0.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win32.whl", hash = "sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab"}, - {file = "pyrsistent-0.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win32.whl", hash = "sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291"}, - {file = "pyrsistent-0.19.2-py3-none-any.whl", hash = "sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0"}, - {file = "pyrsistent-0.19.2.tar.gz", hash = "sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2"}, + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, ] pytest = [ {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, @@ -1308,10 +1337,10 @@ python-debian = [ {file = "python_debian-0.1.49-py3-none-any.whl", hash = "sha256:880f3bc52e31599f2a9b432bd7691844286825087fccdcf2f6ffd5cd79a26f9f"}, ] pytz = [ - {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, - {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, + {file = "pytz-2022.7-py2.py3-none-any.whl", hash = "sha256:93007def75ae22f7cd991c84e02d434876818661f8df9ad5df9e950ff4e52cfd"}, + {file = "pytz-2022.7.tar.gz", hash = "sha256:7ccfae7b4b2c067464a6733c6261673fdb8fd1be905460396b97a073e9fa683a"}, ] -PyYAML = [ +pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -1358,14 +1387,14 @@ requests = [ {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] reuse = [ - {file = "reuse-1.0.0-py3-none-any.whl", hash = "sha256:e2605e796311c424465d741ea2a1e1ad03bbb90b921d74750119c331ca5af46e"}, - {file = "reuse-1.0.0.tar.gz", hash = "sha256:db3022be2d87f69c8f508b928023de3026f454ce17d01e22f770f7147ac1e8d4"}, + {file = "reuse-1.1.0-cp311-cp311-manylinux_2_36_x86_64.whl", hash = "sha256:b0f3fb9091ff513af04b555d14a4c529ab05f6a575ab192dd9b68244f1e0721d"}, + {file = "reuse-1.1.0.tar.gz", hash = "sha256:7a054f6e372ad02d0b1b07368030fc38746b50ed45f5422a81994e7a88b52f1f"}, ] -"ruamel.yaml" = [ +ruamel-yaml = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] -"ruamel.yaml.clib" = [ +ruamel-yaml-clib = [ {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, @@ -1401,8 +1430,8 @@ reuse = [ {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, ] setuptools = [ - {file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"}, - {file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1416,7 +1445,7 @@ soupsieve = [ {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, ] -Sphinx = [ +sphinx = [ {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, ] @@ -1440,8 +1469,8 @@ sphinx-icon = [ {file = "sphinx-icon-0.1.2.tar.gz", hash = "sha256:e4adc9922e2e2b19f97813a3994d5e6ccd01e9a21ae73b755f7114ac4247fdf5"}, ] sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, + {file = "sphinxcontrib.applehelp-1.0.3-py3-none-any.whl", hash = "sha256:ba0f2a22e6eeada8da6428d0d520215ee8864253f32facf958cca81e426f661d"}, + {file = "sphinxcontrib.applehelp-1.0.3.tar.gz", hash = "sha256:83749f09f6ac843b8cb685277dbc818a8bf2d76cc19602699094fe9a74db529e"}, ] sphinxcontrib-contentui = [ {file = "sphinxcontrib_contentui-0.2.5-py3-none-any.whl", hash = "sha256:a01c7a0cfe360c99692999d3286b6a4d93ebfc94d0eff2619622fd5e6086ab36"}, @@ -1481,6 +1510,10 @@ taskipy = [ {file = "taskipy-1.10.3-py3-none-any.whl", hash = "sha256:4c0070ca53868d97989f7ab5c6f237525d52ee184f9b967576e8fe427ed9d0b8"}, {file = "taskipy-1.10.3.tar.gz", hash = "sha256:112beaf21e3d5569950b99162a1de003fa885fabee9e450757a6b874be914877"}, ] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -1502,13 +1535,13 @@ typing-extensions = [ {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] -Unidecode = [ +unidecode = [ {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"}, {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"}, ] urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, + {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, + {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, ] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, diff --git a/pyproject.toml b/pyproject.toml index b4c94746..c8f37635 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,9 +9,24 @@ name = "hermes" version = "0.1.0" description = "Workflow to publish research software with rich metadata" -authors = ["Stephan Druskat ", - "Michael Meinel ", - "Oliver Bertuch "] +homepage = "https://software-metadata.pub" +license = "Apache-2.0" +authors = [ + "Stephan Druskat ", + "Michael Meinel ", + "Oliver Bertuch ", + "Jeffrey Kelling ", + "Oliver Knodel " +] + +readme = "README.md" +repository = "https://github.com/hermes-hmc/workflow" +documentation = "https://docs.software-metadata.pub" +keywords = ["publishing", "metadata", "automation"] + +include = [ + "hermes/schema/*.json", +] [tool.poetry.dependencies] python = "^3.10" @@ -19,6 +34,8 @@ click = "^8.1" "ruamel.yaml" = "^0.17.21" jsonschema = "^3.0.0" cffconvert = "^2.0.0" +toml = "^0.10.2" +pyparsing = "^3.0.9" # Packages for developers [tool.poetry.group.dev.dependencies] @@ -33,24 +50,75 @@ optional = true [tool.poetry.group.docs.dependencies] Sphinx = "^4.5.0" +# Sphinx - Additional modules myst-parser = "^0.18.0" -sphinx-autobuild = "^2021.3.14" sphinx-book-theme = "^0.3.3" -# Sphinx - Additional modules -sphinx-autoapi = "^2.0.0" +sphinx-favicon = "^0.2" sphinxcontrib-contentui = "^0.2.5" sphinxcontrib-images = "^0.9.4" sphinx-icon = "^0.1.2" -sphinx-favicon = "^0.2" +sphinx-autobuild = "^2021.3.14" +sphinx-autoapi = "^2.0.0" sphinxemoji = "^0.2.0" sphinxext-opengraph = "^0.6.3" reuse = "^1.0.0" [tool.poetry.plugins.console_scripts] -haggis = "hermes.cli:haggis" +hermes = "hermes.cli:main" [tool.poetry.plugins."hermes.harvest"] -cff = "hermes.commands.harvest.cff:harvest_cff" +000_cff = "hermes.commands.harvest.cff:harvest_cff" +010_codemeta = "hermes.commands.harvest.codemeta:harvest_codemeta" +020_git = "hermes.commands.harvest.git:harvest_git" + +[tool.poetry.plugins."hermes.preprocess"] +000_cff = "hermes.commands.process.cff:add_name" +020_git = "hermes.commands.process.git:flag_authors" + + + +[tool.hermes.logging.formatters.plain] +format = "%(message)s" + +[tool.hermes.logging.formatters.logfile] +format = "%(created)16f:%(name)20s:%(levelname)10s | %(message)s" + +[tool.hermes.logging.formatters.auditlog] +format = "%(asctime)s %(name)-20s %(message)s" + +[tool.hermes.logging] +version = 1 + +[tool.hermes.logging.handlers.terminal] +class = "logging.StreamHandler" +formatter = "plain" +level = "INFO" +stream = "ext://sys.stdout" + +[tool.hermes.logging.handlers.logfile] +class = "logging.FileHandler" +formatter = "logfile" +level = "DEBUG" +filename = "hermes.log" + +[tool.hermes.logging.handlers.auditfile] +class = "logging.FileHandler" +formatter = "plain" +level = "DEBUG" +filename = "hermes-audit.md" +mode = "w" + +[tool.hermes.logging.loggers.cli] +level = "DEBUG" +handlers = ["terminal"] + +[tool.hermes.logging.loggers.hermes] +level = "DEBUG" +handlers = ["terminal", "logfile"] + +[tool.hermes.logging.loggers.audit] +level = "DEBUG" +handlers = ["terminal", "auditfile"] [tool.taskipy.tasks] docs-build = "poetry run sphinx-build -M html docs/source docs/build -W" diff --git a/src/hermes/__main__.py b/src/hermes/__main__.py index 4c599a23..b9400fcb 100644 --- a/src/hermes/__main__.py +++ b/src/hermes/__main__.py @@ -4,8 +4,8 @@ # SPDX-FileContributor: Michael Meinel -from hermes.cli import haggis +from hermes.cli import main if __name__ == '__main__': - haggis() + main() diff --git a/src/hermes/cli.py b/src/hermes/cli.py index febe8765..d7519fff 100644 --- a/src/hermes/cli.py +++ b/src/hermes/cli.py @@ -8,12 +8,40 @@ """ This module provides the main entry point for the HERMES command line application. """ +import logging import typing as t import pathlib +from importlib import metadata import click +from hermes import config from hermes.commands import workflow +from hermes.config import configure, init_logging + + +def log_header(header, summary=None): + _log = config.getLogger('cli') + + dist = metadata.distribution('hermes') + meta = dist.metadata + + if header is None: + title = f"{dist.name} workflow ({dist.version})" + + _log.info(title) + _log.info("=" * len(title)) + _log.info('') + + if 'Summary' in meta: + _log.info('%s', meta['Summary']) + _log.info('') + + else: + _log.info("%s", header) + if summary: + _log.info("%s", summary) + _log.info('') class WorkflowCommand(click.Group): @@ -56,6 +84,21 @@ def invoke(self, ctx: click.Context) -> t.Any: :param ctx: Context for the command. """ + configure() + init_logging() + log_header(None) + + audit_log = logging.getLogger('audit') + audit_log.info("# Running Hermes") + audit_log.info("Running Hermes command line in: %s", ctx.params.get('path', pathlib.Path.cwd()).absolute()) + audit_log.debug("") + audit_log.debug("Invoked `%s` with", ctx.invoked_subcommand or self.name) + audit_log.debug("") + for k, v in ctx.params.items(): + audit_log.debug("`--%s`", k) + audit_log.debug(": `%s`", v) + audit_log.debug("") + if ctx.protected_args: return super().invoke(ctx) @@ -97,10 +140,10 @@ def _process_result(value: t.Any) -> t.Any: @click.group(cls=WorkflowCommand, invoke_without_command=True) @click.option("--deposit", is_flag=True, default=False) -@click.option("--post", is_flag=True, default=False) +@click.option("--postprocess", is_flag=True, default=False) @click.option('--path', default=pathlib.Path('./'), help='Working path', type=pathlib.Path) @click.pass_context -def haggis(ctx: click.Context, *args, **kwargs) -> None: +def main(ctx: click.Context, *args, **kwargs) -> None: """ HERMES aggregated interface script @@ -110,7 +153,7 @@ def haggis(ctx: click.Context, *args, **kwargs) -> None: pass -haggis.add_command(workflow.harvest) -haggis.add_command(workflow.process) -haggis.add_command(workflow.deposit) -haggis.add_command(workflow.post) +main.add_command(workflow.harvest) +main.add_command(workflow.process) +main.add_command(workflow.deposit) +main.add_command(workflow.postprocess) diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index c9e1d643..d5aa692d 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -5,9 +5,8 @@ # SPDX-FileContributor: Stephan Druskat # SPDX-FileContributor: Michael Meinel -import collections -import glob import json +import logging import pathlib import urllib.request import typing as t @@ -18,13 +17,16 @@ import click from cffconvert import Citation -from hermes.model.context import HermesHarvestContext +from hermes.model.context import HermesHarvestContext, ContextPath from hermes.model.errors import HermesValidationError # TODO: should this be configurable via a CLI option? _CFF_VERSION = '1.2.0' +_log = logging.getLogger('cli.harvest.cff') + + def harvest_cff(click_ctx: click.Context, ctx: HermesHarvestContext): """ Implementation of a harvester that provides data from CFF in Codemeta format. @@ -33,6 +35,10 @@ def harvest_cff(click_ctx: click.Context, ctx: HermesHarvestContext): :param ctx: The harvesting context that should contain the provided metadata. """ # Get the parent context (every subcommand has its own context with the main click context as parent) + audit_log = logging.getLogger('audit.cff') + audit_log.info('') + audit_log.info("## Citation File Format") + parent_ctx = click_ctx.parent if parent_ctx is None: raise RuntimeError('No parent context!') @@ -70,6 +76,8 @@ def _convert_cff_to_codemeta(cff_data: str) -> t.Any: def _validate(cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: + audit_log = logging.getLogger('audit.cff') + cff_schema_url = f'https://citation-file-format.github.io/{_CFF_VERSION}/schema.json' # TODO: we should ship the schema we reference to by default to avoid unnecessary network traffic. @@ -80,43 +88,38 @@ def _validate(cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: validator = jsonschema.Draft7Validator(schema_data) errors = sorted(validator.iter_errors(cff_dict), key=lambda e: e.path) if len(errors) > 0: - click.echo(f'{cff_file} is not valid according to {cff_schema_url}!') + audit_log.warning('!!! warning "%s is not valid according to <%s>"', cff_file, cff_schema_url) + for error in errors: - path_str = _build_nodepath_str(error.absolute_path) - click.echo(f' - Invalid input for path {path_str}.\n' - f' Value: {error.instance} -> {error.message}') - click.echo(f' See the Citation File Format schema guide for further details: ' - f'https://github.com/citation-file-format/citation-file-format/blob/{_CFF_VERSION}/schema' - f'-guide.md.') + path = ContextPath.make(error.absolute_path or ['root']) + audit_log.info(' Invalid input for `%s`.', str(path)) + audit_log.info(' !!! message "%s"', error.message) + audit_log.debug(' !!! value "%s"', error.instance) + + audit_log.info('') + audit_log.info('See the Citation File Format schema guide for further details:') + audit_log.info( + f'.') return False + elif len(errors) == 0: - click.echo(f'Found valid Citation File Format file at: {cff_file}') + audit_log.info('- Found valid Citation File Format file at: %s', cff_file) return True def _get_single_cff(path: pathlib.Path) -> t.Optional[pathlib.Path]: # Find CFF files in directories and subdirectories + cff_file = path / 'CITATION.cff' + if cff_file.exists(): + return cff_file + # TODO: Do we really want to search recursive? CFF convention is the file should be at the topmost dir, # which is given via the --path arg. Maybe add another option to enable pointing to a single file? # (So this stays "convention over configuration") - files = glob.glob(str(path / '**' / 'CITATION.cff'), recursive=True) + files = list(path.rglob('**/CITATION.cff')) if len(files) == 1: return pathlib.Path(files[0]) # TODO: Shouldn't we log/echo the found CFF files so a user can debug/cleanup? # TODO: Do we want to hand down a logging instance via Hermes context or just encourage # peeps to use the Click context? return None - - -def _build_nodepath_str(absolute_path: collections.deque) -> str: - # Path deque starts with field name, then index, then field name, etc. - path_str = "'" - for index, value in enumerate(absolute_path): - if index == 0: # First value - path_str += f'{value}' - elif index % 2 == 0: # value is a field name - path_str += f' -> {value}' - else: # Value is an index - path_str += f' {int(value) + 1}' # Use index starting at 1 - path_str += "'" - return path_str diff --git a/src/hermes/commands/harvest/codemeta.py b/src/hermes/commands/harvest/codemeta.py new file mode 100644 index 00000000..a1aeb44c --- /dev/null +++ b/src/hermes/commands/harvest/codemeta.py @@ -0,0 +1,63 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Stephan Druskat +# SPDX-FileContributor: Michael Meinel + +import glob +import json +import pathlib +import typing as t + +import click + +from hermes.model.context import HermesHarvestContext +from hermes.model.errors import HermesValidationError + + +def harvest_codemeta(click_ctx: click.Context, ctx: HermesHarvestContext): + """ + Implementation of a harvester that provides data from a codemeta.json file format. + + :param click_ctx: Click context that this command was run inside (might be used to extract command line arguments). + :param ctx: The harvesting context that should contain the provided metadata. + """ + # Get the parent context (every subcommand has its own context with the main click context as parent) + parent_ctx = click_ctx.parent + if parent_ctx is None: + raise RuntimeError('No parent context!') + path = parent_ctx.params['path'] + + # Get source files + codemeta_file = _get_single_codemeta(path) + if not codemeta_file: + raise HermesValidationError(f'{path} contains either no or more than 1 codemeta.json file. Aborting harvesting ' + f'for this metadata source.') + + # Read the content + codemeta_str = codemeta_file.read_text() + + if not _validate(codemeta_file): + raise HermesValidationError(codemeta_file) + + codemeta = json.loads(codemeta_str) + ctx.update_from(codemeta, local_path=str(codemeta_file)) + + +def _validate(codemeta_file: pathlib.Path) -> bool: + # TODO: Implement + return codemeta_file.exists() + + +def _get_single_codemeta(path: pathlib.Path) -> t.Optional[pathlib.Path]: + # Find CodeMeta files in directories and subdirectories + # TODO: Do we really want to search recursive? Maybe add another option to enable pointing to a single file? + # (So this stays "convention over configuration") + files = glob.glob(str(path / '**' / 'codemeta.json'), recursive=True) + if len(files) == 1: + return pathlib.Path(files[0]) + # TODO: Shouldn't we log/echo the found CFF files so a user can debug/cleanup? + # TODO: Do we want to hand down a logging instance via Hermes context or just encourage + # peeps to use the Click context? + return None diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py new file mode 100644 index 00000000..2b9182e7 --- /dev/null +++ b/src/hermes/commands/harvest/git.py @@ -0,0 +1,314 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Jeffrey Kelling +# SPDX-FileContributor: Michael Meinel + +import logging +import os +import pathlib +import typing as t + +import click +import subprocess +import shutil + +from hermes.model.context import HermesHarvestContext + + +_log = logging.getLogger('harvest.git') + + +# TODO: can and should we get this somehow? +SHELL_ENCODING = 'utf-8' + +_GIT_SEP = '|' +_GIT_FORMAT = ['%aN', '%aE', '%aI'] +_GIT_ARGS = [] + + +# TODO The following code contains a lot of duplicate implementation that can be found in hermes.model +# (In fact, it was kind of the prototype for lots of stuff there.) +# Clean up and refactor to use hermes.model instead + +class ContributorData: + """ + Stores contributor data information from Git history. + """ + + def __init__(self, name: str | t.List[str], email: str | t.List[str], timestamp: str | t.List[str]): + """ + Initialize a new contributor dataset. + + :param name: Name as returned by the `git log` command (i.e., with `.mailmap` applied). + :param email: Email address as returned by the `git log` command (also with `.mailmap` applied). + :param timestamp: Timestamp when the respective commit was done. + """ + self.name = [] + self.email = [] + self.timestamp = [] + + self.update(name=name, email=email, timestamp=timestamp) + + def __str__(self): + parts = [] + if self.name: + parts.append(self.name[0]) + if self.email: + parts.append(f'<{self.email[0]}>') + return f'"{" ".join(parts)}"' + + def _update_attr(self, target, value, unique=True): + match value: + case list(): + target.extend([v for v in value if not unique or v not in target]) + case str() if not unique or value not in target: + target.append(value) + + def update(self, name=None, email=None, timestamp=None): + """ + Update the current contributor with the given data. + + :param name: New name to assign (addtionally). + :param email: New email to assign (additionally). + :param timestamp: New timestamp to adapt time range. + """ + self._update_attr(self.name, name) + self._update_attr(self.email, email) + self._update_attr(self.timestamp, timestamp, unique=False) + + def merge(self, other: 'ContributorData'): + """ + Merge another :py:class:`ContributorData` instance into this one. + + All attributes will be merged yet kept unique if required. + + :param other: The other instance that should contribute to this. + """ + self.name += [n for n in other.name if n not in self.name] + self.email += [e for e in other.email if e not in self.email] + self.timestamp += other.timestamp + + def to_codemeta(self) -> dict: + """ + Return the current dataset as CodeMeta. + + :return: The CodeMeta representation of this dataset. + """ + res = { + '@type': 'Person', + } + + if self.name: + res['name'] = self.name.pop() + if self.name: + res['alternateName'] = list(self.name) + + if self.email: + res['email'] = self.email.pop(0) + if self.email: + res['contactPoint'] = [{'@type': 'ContactPoint', 'email': email} for email in self.email] + + if self.timestamp: + timestamp_start, *_, timestamp_end = sorted(self.timestamp + [self.timestamp[0]]) + res['startTime'] = timestamp_start + res['endTime'] = timestamp_end + + return res + + @classmethod + def from_codemeta(cls, data) -> 'ContributorData': + """ + Initialize a new instance from CodeMeta representation. + + :param data: The CodeMeta dataset to initialize from. + :return: The newly created instance. + """ + name = [data['name']] + data.get('alternateName', []) + email = [data['email']] + [contact['email'] for contact in data.get('contactPoint', [])] + timestamp = [data['startTime'], data['endTime']] + return cls(name, email, timestamp) + + +class NodeRegister: + """ + Helper class to unify Git commit authors / contributors. + + This class keeps track of all registered instances and merges two :py:class:`ContributorData` instances if some + attributes match. + """ + + def __init__(self, cls, *order, **mapping): + """ + Initalize a new register. + + :param cls: Type of objects to store. + :param order: The order of attributes to compare. + :param mapping: A mapping to convert attributes (will be applied for comparison). + """ + self.cls = cls + self.order = order + self.mapping = mapping + self._all = [] + self._node_by = {key: {} for key in self.order} + + def add(self, node: t.Any): + """ + Add (or merge) a new node to the register. + :param node: The node that should be added. + """ + self._all.append(node) + + for key in self.order: + mapping = self.mapping.get(key, lambda x: x) + attr = getattr(node, key, None) + match attr: + case None: + continue + case list(): + for value in attr: + self._node_by[key][mapping(value)] = node + + def update(self, **kwargs): + """ + Add (or merge) a new item to the register with the given attribute values. + + :fixme: This is not a good implementation strategy at all. + + :param kwargs: The attribute values to be stored. + """ + missing = [] + tail = list(self.order) + while tail: + key, *tail = tail + if key not in kwargs: + continue + + arg = kwargs[key] + node = self._node_by[key].get(arg, None) + if node is None: + missing.append((key, arg)) + continue + + node.update(**kwargs) + break + else: + node = self.cls(**kwargs) + self._all.append(node) + + for key in tail: + if key not in kwargs: + continue + + arg = kwargs[key] + alt_node = self._node_by[key].get(arg, None) + if alt_node is None: + missing.append((key, arg)) + + elif alt_node != node: + node.merge(alt_node) + self._all.remove(alt_node) + self._node_by[key][arg] = node + + for key, arg in missing: + self._node_by[key][arg] = node + + +def _audit_authors(authors, audit_log: logging.Logger): + # Collect all authors that have ambiguous data + unmapped_authors = [a for a in authors._all if len(a.email) > 1 or len(a.name) > 1] + + if unmapped_authors: + # Report to the audit about our findings + audit_log.warning('!!! warning "You have unmapped authors in your Git history."') + for author in unmapped_authors: + if len(author.email) > 1: + audit_log.info(" - %s has alternate email: %s", str(author), ', '.join(author.email[1:])) + if len(author.name) > 1: + audit_log.info(" - %s has alternate names: %s", str(author), ', '.join(author.name[1:])) + audit_log.warning('') + + audit_log.info( + "Please consider adding a `.maillog` file to your repository to disambiguate these contributors.") + audit_log.info('') + audit_log.info('``` .mailmap') + + audit_log.info('```') + + +def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): + """ + Implementation of a harvester that provides autor data from Git. + + :param click_ctx: Click context that this command was run inside (might be used to extract command line arguments). + :param ctx: The harvesting context that should contain the provided metadata. + """ + _log = logging.getLogger('cli.harvest.git') + audit_log = logging.getLogger('audit.cff') + audit_log.info('') + audit_log.info("## Git History") + + # Get the parent context (every subcommand has its own context with the main click context as parent) + parent_ctx = click_ctx.parent + if parent_ctx is None: + raise RuntimeError('No parent context!') + + _log.debug(". Get history of currently checked-out branch") + + authors = NodeRegister(ContributorData, 'email', 'name', email=str.upper) + try: + for author_data in ctx.get_data().get('author', []): + authors.add(ContributorData.from_codemeta(author_data)) + except ValueError: + pass + + git_exe = shutil.which('git') + if not git_exe: + raise RuntimeError('Git not available!') + + path = parent_ctx.params['path'] + old_path = pathlib.Path.cwd() + if path != old_path: + os.chdir(path) + + p = subprocess.run([git_exe, "rev-parse", "--abbrev-ref", "HEAD"], capture_output=True) + if p.returncode: + raise RuntimeError(f"`git branch` command failed with code {p.returncode}: " + f"'{p.stderr.decode(SHELL_ENCODING)}'!") + git_branch = p.stdout.decode(SHELL_ENCODING).strip() + # TODO: should we warn or error if the HEAD is detached? + + p = subprocess.run([git_exe, "log", f"--pretty={_GIT_SEP.join(_GIT_FORMAT)}"] + _GIT_ARGS, capture_output=True) + if p.returncode: + raise RuntimeError(f"`git log` command failed with code {p.returncode}: " + f"'{p.stderr.decode(SHELL_ENCODING)}'!") + + log = p.stdout.decode(SHELL_ENCODING).split('\n') + for line in log: + try: + name, email, timestamp = line.split(_GIT_SEP) + except ValueError: + continue + + authors.update(email=email, name=name, timestamp=timestamp) + + _audit_authors(authors, logging.getLogger('audit.git')) + + ctx.update_from({ + '@context': [ + "https://doi.org/10.5063/schema/codemeta-2.0", + {'hermes': 'https://software-metadata.pub/ns/hermes/'} + ], + + '@type': "SoftwareSourceCode", + 'author': [author.to_codemeta() for author in authors._all], + }, branch=git_branch) + + try: + ctx.get_data() + except ValueError: + audit_log.error('!!! warning "Inconsistent data"') + audit_log.info(' The data collected from git is ambiguous.') + audit_log.info(' Consider deleting `%s` to avoid problems.', ctx.hermes_dir) + audit_log.error('') diff --git a/src/hermes/commands/process/__init__.py b/src/hermes/commands/process/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/hermes/commands/process/cff.py b/src/hermes/commands/process/cff.py new file mode 100644 index 00000000..88dc2b9b --- /dev/null +++ b/src/hermes/commands/process/cff.py @@ -0,0 +1,32 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +import logging + +from hermes.model.context import HermesHarvestContext, ContextPath, CodeMetaContext + + +def add_name(ctx: CodeMetaContext, harvest_ctx: HermesHarvestContext): + """ + Augment each author with a `name` attribute (if not present). + + This will allow better matching against the git authors and can be removed in a post-processing step. + + :param ctx: The resulting context that should contain the harmonized data. + :param harvest_ctx: The harvest context containing all raw harvested data. + """ + audit_log = logging.getLogger('audit.cff') + audit_log.info('') + audit_log.info('### Add author names') + + data = harvest_ctx.get_data() + author_path = ContextPath('author') + + for i, author in enumerate(data.get('author', [])): + if 'name' not in author: + harvest_ctx.update(str(author_path[i]["name"]), f"{author['givenName']} {author['familyName']}", + stage='preprocess') + audit_log.debug(f"- {author['givenName']} {author['familyName']}") diff --git a/src/hermes/commands/process/git.py b/src/hermes/commands/process/git.py new file mode 100644 index 00000000..832fea66 --- /dev/null +++ b/src/hermes/commands/process/git.py @@ -0,0 +1,49 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +import logging + +from hermes.model.context import CodeMetaContext, HermesHarvestContext, ContextPath + + +_AUTHOR_KEYS = ('@id', 'email', 'name') + + +def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): + """ + Identify all authors that are not yet in the target context and flag them with role `Contributor`. + + :param ctx: The target context containting harmonized data. + :param harverst_ctx: Data as it was harvested. + """ + audit_log = logging.getLogger('audit.git') + audit_log.info('') + audit_log.info('### Flag new authors') + + author_path = ContextPath('author') + contributor_path = ContextPath('contributor') + + tags = {} + try: + data = harverst_ctx.get_data(tags=tags) + except ValueError: + audit_log.info("- Inconsistent data, skipping.") + return + + for i, contributor in enumerate(author_path.get_from(data)): + query = {k: contributor[k] for k in _AUTHOR_KEYS if k in contributor} + author_key, target, path = author_path['*'].resolve(ctx._data, query=query) + + if author_key._item == '*': + audit_log.debug('- %s', contributor['name']) + if contributor_path not in ctx.keys(): + ctx.update(contributor_path, []) + ctx.update(contributor_path['*'], contributor, tags=tags) + else: + ctx.update(author_key, contributor, tags=tags) + + ctx.tags.update(tags) + harverst_ctx.finish() diff --git a/src/hermes/commands/workflow.py b/src/hermes/commands/workflow.py index f02a3d06..78189c11 100644 --- a/src/hermes/commands/workflow.py +++ b/src/hermes/commands/workflow.py @@ -5,11 +5,14 @@ # SPDX-FileContributor: Stephan Druskat # SPDX-FileContributor: Michael Meinel +import json +import logging from importlib import metadata import click -from hermes.model.context import HermesContext, HermesHarvestContext +from hermes.model.context import HermesContext, HermesHarvestContext, CodeMetaContext +from hermes.model.errors import MergeError @click.group(invoke_without_command=True) @@ -18,7 +21,9 @@ def harvest(click_ctx: click.Context): """ Automatic harvest of metadata """ - click.echo("Metadata harvesting") + _log = logging.getLogger('cli.harvest') + audit_log = logging.getLogger('audit') + audit_log.info("# Metadata harvesting") # Create Hermes context (i.e., all collected metadata for all stages...) ctx = HermesContext() @@ -26,9 +31,18 @@ def harvest(click_ctx: click.Context): # Get all harvesters harvesters = metadata.entry_points(group='hermes.harvest') for harvester in harvesters: + _log.info("- Running harvester %s", harvester.name) + + _log.debug(". Loading harvester from %s", harvester.value) + harvest = harvester.load() + with HermesHarvestContext(ctx, harvester) as harvest_ctx: - harvest = harvester.load() harvest(click_ctx, harvest_ctx) + for _key, ((_value, _tag), *_trace) in harvest_ctx._data.items(): + if any(v != _value and t == _tag for v, t in _trace): + raise MergeError(_key, None, _value) + _log.info('') + audit_log.info('') @click.group(invoke_without_command=True) @@ -36,7 +50,46 @@ def process(): """ Process metadata and prepare it for deposition """ - click.echo("Metadata processing") + _log = logging.getLogger('cli.process') + + audit_log = logging.getLogger('audit') + audit_log.info("# Metadata processing") + + ctx = CodeMetaContext() + + harvesters = metadata.entry_points(group='hermes.harvest') + for harvester in harvesters: + audit_log.info("## Process data from %s", harvester.name) + + harvest_context = HermesHarvestContext(ctx, harvester) + harvest_context.load_cache() + + processors = metadata.entry_points(group='hermes.preprocess', name=harvester.name) + for processor in processors: + _log.debug(". Loading context processor %s", processor.value) + process = processor.load() + + _log.debug(". Apply processor %s", processor.value) + process(ctx, harvest_context) + + ctx.merge_from(harvest_context) + _log.info('') + audit_log.info('') + + if ctx._errors: + audit_log.error('!!! warning "Errors during merge"') + + for ep, error in ctx._errors: + audit_log.info(' - %s: %s', ep.name, error) + + tags_path = ctx.get_cache('process', 'tags', create=True) + with tags_path.open('w') as tags_file: + json.dump(ctx.tags, tags_file, indent=' ') + + with open('codemeta.json', 'w') as codemeta_file: + json.dump(ctx._data, codemeta_file, indent=' ') + + logging.shutdown() @click.group(invoke_without_command=True) @@ -48,8 +101,8 @@ def deposit(): @click.group(invoke_without_command=True) -def post(): +def postprocess(): """ - Post-process metadata after deposition + Postprocess metadata after deposition """ click.echo("Post-processing") diff --git a/src/hermes/config.py b/src/hermes/config.py new file mode 100644 index 00000000..3ddd63d7 --- /dev/null +++ b/src/hermes/config.py @@ -0,0 +1,56 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +# TODO this file contains only dummy implementations which in most cases will lead to a crash... +import logging +import toml + + +_config = {} + + +def configure(): + if _config: + return + + # Load configuration if not present + with open('pyproject.toml', 'r') as config_file: + config_toml = toml.load(config_file) + hermes_config = config_toml['tool']['hermes'] + _config['hermes'] = hermes_config + _config['logging'] = hermes_config['logging'] + + +def get(name): + if name not in _config: + _config['hermes'][name] = {} + _config[name] = _config['hermes'][name] + + return _config.get(name) + + +_loggers = {} + + +def init_logging(): + if _loggers: + return + + # Inintialize logging system + import logging.config + + configure() + + logging.config.dictConfig(_config['logging']) + for log_name in _config['logging']['loggers']: + _loggers[log_name] = logging.getLogger(log_name) + + +def getLogger(log_name): + init_logging() + if log_name not in _loggers: + _loggers[log_name] = logging.getLogger(log_name) + return _loggers.get(log_name) diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index df0ef384..2d4aa527 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -4,6 +4,8 @@ # SPDX-FileContributor: Michael Meinel +import datetime +import pathlib import traceback import json import logging @@ -12,12 +14,17 @@ from pathlib import Path from importlib.metadata import EntryPoint +from hermes.model import errors +from hermes.model.path import ContextPath from hermes.model.errors import HermesValidationError _log = logging.getLogger(__name__) +ContextPath.init_merge_strategies() + + class HermesContext: """ The HermesContext stores the metadata for a certain project. @@ -43,6 +50,12 @@ def __init__(self, project_dir: t.Optional[Path] = None): self._data = {} self._errors = [] + def keys(self) -> t.List[ContextPath]: + """ + Get all the keys for the data stored in this context. + """ + return [ContextPath.parse(k) for k in self._data.keys()] + def get_cache(self, *path: str, create: bool = False) -> Path: """ Retrieve a cache file for a given *path*. @@ -63,7 +76,7 @@ def get_cache(self, *path: str, create: bool = False) -> Path: cache_dir = self.hermes_dir.joinpath(*subdir) if create: cache_dir.mkdir(parents=True, exist_ok=True) - data_file = cache_dir / name + data_file = cache_dir / (name + '.json') self._caches[path] = data_file return data_file @@ -81,6 +94,19 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): pass + def get_data(self, + data: t.Optional[dict] = None, + path: t.Optional['ContextPath'] = None, + tags: t.Optional[dict] = None) -> dict: + if data is None: + data = {} + if path is not None: + data.update(path.get_from(self._data)) + else: + for key in self.keys(): + data.update(key.get_from(self._data)) + return data + def error(self, ep: EntryPoint, error: Exception): """ Add an error that occurred during processing to the error log. @@ -134,7 +160,7 @@ def store_cache(self): data_file = self.get_cache('harvest', self._ep.name, create=True) self._log.debug("Writing cache to %s...", data_file) - json.dump(self._data, data_file.open('w')) + json.dump(self._data, data_file.open('w'), indent=2) def __enter__(self): self.load_cache() @@ -169,28 +195,43 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): See :py:meth:`HermesContext.update` for more information. """ + timestamp = kwargs.pop('timestamp', datetime.datetime.now().isoformat(timespec='seconds')) + harvester = kwargs.pop('harvester', self._ep.name) + if _key not in self._data: self._data[_key] = [] for entry in self._data[_key]: - if entry[1] == kwargs: - self._log.debug("Update %s: %s -> %s (%s)", _key, entry[0], _value, entry[1]) + value, tag = entry + tag_timestamp = tag.pop('timestamp') + tag_harvester = tag.pop('harvester') + + if tag == kwargs: + self._log.debug("Update %s: %s -> %s (%s)", _key, str(value), _value, str(tag)) entry[0] = _value + tag['timestamp'] = timestamp + tag['harvester'] = harvester break + + tag['timestamp'] = tag_timestamp + tag['harvester'] = tag_harvester + else: + kwargs['timestamp'] = timestamp + kwargs['harvester'] = harvester self._data[_key].append([_value, kwargs]) - def _update_key_from(self, _key: str, _value: t.Any, **kwargs): + def _update_key_from(self, _key: ContextPath, _value: t.Any, **kwargs): if isinstance(_value, dict): for key, value in _value.items(): - self._update_key_from(f'{_key}.{key}', value, **kwargs) + self._update_key_from(_key[key], value, **kwargs) elif isinstance(_value, (list, tuple)): for index, value in enumerate(_value): - self._update_key_from(f'{_key}[{index}]', value, **kwargs) + self._update_key_from(_key[index], value, **kwargs) else: - self.update(_key, _value, **kwargs) + self.update(str(_key), _value, **kwargs) def update_from(self, data: t.Dict[str, t.Any], **kwargs: t.Any): """ @@ -216,7 +257,7 @@ def update_from(self, data: t.Dict[str, t.Any], **kwargs: t.Any): """ for key, value in data.items(): - self._update_key_from(key, value, **kwargs) + self._update_key_from(ContextPath(key), value, **kwargs) def error(self, ep: EntryPoint, error: Exception): """ @@ -225,3 +266,89 @@ def error(self, ep: EntryPoint, error: Exception): ep = ep or self._ep self._base.error(ep, error) + + def _check_values(self, path, values): + (value, tag), *values = values + for alt_value, alt_tag in values: + if value != alt_value: + raise ValueError(f'{path}') + return value, tag + + def get_data(self, + data: t.Optional[dict] = None, + path: t.Optional['ContextPath'] = None, + tags: t.Optional[dict] = None) -> dict: + """ + Retrieve the data from a given path. + + This method can be used to extract data and whole sub-trees from the context. + If you want a complete copy of the data, you can also call this method without giving a path. + + :param data: Optional a target dictionary where the data is stored. If not given, a new one is created. + :param path: The path to extract data from. + :param tags: An optional dictionary to collect the tags that belong to the extracted data. + The full path will be used as key for this dictionary. + :return: The extracted data (i.e., the `data` parameter if it was given). + """ + if data is None: + data = {} + for key, values in self._data.items(): + key = ContextPath.parse(key) + if path is None or key in path: + value, tag = self._check_values(key, values) + try: + key.update(data, value, tags, **tag) + if tags is not None and tag: + tags[str(key)] = tag + except errors.MergeError as e: + self.error(self._ep, e) + return data + + def finish(self): + """ + Calling this method will lead to further processors not handling the context anymore. + """ + self._data.clear() + + +class CodeMetaContext(HermesContext): + _PRIMARY_ATTR = { + 'author': ('@id', 'email', 'name'), + } + + def __init__(self, project_dir: pathlib.Path | None = None): + super().__init__(project_dir) + self.tags = {} + + def merge_from(self, other: HermesHarvestContext): + other.get_data(self._data, tags=self.tags) + + def update(self, _key: ContextPath, _value: t.Any, tags: t.Dict[str, t.Dict] | None = None): + if _key._item == '*': + _item_path, _item, _path = _key.resolve(self._data, query=_value, create=True) + if tags: + _tags = {k.lstrip(str(_key) + '.'): t for k, t in tags.items() if ContextPath.parse(k) in _key} + else: + _tags = {} + _path._set_item(_item, _path, _value, **_tags) + if tags is not None and _tags: + for k, v in _tags.items(): + if not v: + continue + + if _key: + tag_key = str(_key) + '.' + k + else: + tag_key = k + tags[tag_key] = v + else: + _key.update(self._data, _value, tags) + + def find_key(self, item, other): + data = item.get_from(self._data) + + for i, node in enumerate(data): + match = [(k, node[k]) for k in self._PRIMARY_ATTR.get(str(item), ('@id',)) if k in node] + if any(other.get(k, None) == v for k, v in match): + return item[i] + return None diff --git a/src/hermes/model/errors.py b/src/hermes/model/errors.py index 9893cd85..24c3ad64 100644 --- a/src/hermes/model/errors.py +++ b/src/hermes/model/errors.py @@ -4,6 +4,11 @@ # SPDX-FileContributor: Michael Meinel +import typing as t + +from hermes.model import path as path_model + + class HermesValidationError(Exception): """ This exception should be thrown when input validation (e.g., during harvest) occurs. @@ -19,3 +24,23 @@ class HermesValidationError(Exception): """ pass + + +class MergeError(Exception): + """ + This exception should be raised when there is an error during a merge / set operation. + """ + def __init__(self, path: path_model.ContextPath, old_Value: t.Any, new_value: t.Any, **kwargs): + """ + Create a new merge incident. + + :param path: The path where the merge error occured. + :param old_Value: Old value that was stored at `path`. + :param new_value: New value that was to be assinged. + :param kwargs: Tag data for the new value. + """ + self.path = path + self.old_value = old_Value + self.new_value = new_value + self.tag = kwargs + super().__init__(f'Error merging {self.path} (ambiguous values "{self.old_value}" and "{self.new_value}")') diff --git a/src/hermes/model/merge.py b/src/hermes/model/merge.py new file mode 100644 index 00000000..578dc477 --- /dev/null +++ b/src/hermes/model/merge.py @@ -0,0 +1,179 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +from hermes.model.path import ContextPath + + +class MergeStrategies: + def __init__(self): + self._strategies = [] + + def select(self, **kwargs): + fitting_strategies = [ + strategy + for strategy in self._strategies + if strategy.can_handle(kwargs) + ] + if fitting_strategies: + return fitting_strategies[0] + else: + return None + + def register(self, strategy): + self._strategies.append(strategy) + + +class MergeStrategy: + @staticmethod + def _check_types(item, value): + match item: + case list(): return any(t in value for t in item) + case str(): return item in value + return False + + @staticmethod + def _check_path(item, value): + item = ContextPath.parse(item) + value = ContextPath.parse(value) + if item == value or item in value: + return True + return False + + checks = { + 'type': _check_types, + 'path': _check_path, + } + + def __init__(self, **filter): + self._filter = filter + + def _check(self, key, filter, value): + if key in filter: + check = self.checks.get(key, lambda item, value: item in value) + return check(filter[key], value) + return True + + def can_handle(self, filter: dict): + return all( + self._check(key, filter, value) + for key, value in self._filter.items() + ) + + def are_equal(self, left, right): + return left == right + + +class CollectionMergeStrategy(MergeStrategy): + def __init__(self, **filter): + super().__init__(**filter) + + def are_equal(self, left, right): + return all( + any(a == b for b in right) + for a in left + ) + + def __call__(self, target, path, value, **kwargs): + match target, path._item: + case list(), int() as index if index < len(target): + match target[index]: + case dict() as item: item.update(value) + case list() as item: item[:] = value + case _: target[index] = value + + case list(), '*': + path._item = len(target) + target.append(value) + + case list(), int() as index if index == len(target): + target.append(value) + + case list(), int() as index: + raise IndexError(f'Index {index} out of bounds to set in {path.parent}.') + case list(), _ as index: + raise TypeError(f'Invalid index type {type(index)} to set in {path.parent}.') + + case dict(), str() as key if key in target: + match target[key]: + case dict() as item: item.update(value) + case list() as item: item[:] = value + case _: target[key] = value + + case dict(), str() as key: + target[key] = value + + case dict(), _ as key: + raise TypeError(f'Invalid key type {type(key)} to set in {path.parent}.') + + case _, _: + raise TypeError(f'Cannot handle target type {type(target)} to set {path}.') + + return value + + +class ObjectMergeStrategy(MergeStrategy): + def __init__(self, *id_keys, **filter): + super().__init__(**filter) + self.id_keys = id_keys or ('@id', ) + + def are_equal(self, left, right): + if not self.id_keys: + return super().are_equal(left, right) + else: + return any(left[key] == right[key] for key in self.id_keys if key in left and key in right) + + def __call__(self, target, path, value, **kwargs): + match target, path._item: + case dict(), str() as key if key in target: + match target[key]: + case dict() as item: item.update(value) + case list() as item: item[:] = value + case _: target[key] = value + + case dict(), str() as key: + target[key] = value + + case dict(), _ as key: + raise TypeError(f'Invalid key type {type(key)} to set in {path.parent}.') + + case list(), int() as index if index < len(target): + match target[index]: + case dict() as item: item.update(value) + case list() as item: item[:] = value + case _: target[index] = value + + case list(), '*': + path._item = len(target) + target.append(value) + + case list(), int() as index if index == len(target): + target.append(value) + + case list(), int() as index: + raise IndexError(f'Index {index} out of bounds to set in {path.parent}.') + case list(), _ as index: + raise TypeError(f'Invalid index type {type(index)} to set in {path.parent}.') + + case _, _: + raise TypeError(f'Cannot handle target type {type(target)} to set {path}.') + + return value + + +default_merge_strategies = [ + ObjectMergeStrategy( + '@id', 'email', 'name', + path='author[*]', + ), + + CollectionMergeStrategy( + type=['list'], + ), + + ObjectMergeStrategy( + type=['map'], + ) +] diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py new file mode 100644 index 00000000..9474ad81 --- /dev/null +++ b/src/hermes/model/path.py @@ -0,0 +1,381 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +import logging +import typing as t + +import pyparsing as pp + +from hermes.model import errors + +_log = logging.getLogger('hermes.model.path') + + +class ContextPathGrammar: + """ + The pyparsing grammar for ContextGrammar paths. + """ + + key = pp.Word('@' + pp.alphas) + index = pp.Word(pp.nums).set_parse_action(lambda tok: [int(tok[0])]) | pp.Char('*') + field = key + (pp.Suppress('[') + index + pp.Suppress(']'))[...] + path = field + (pp.Suppress('.') + field)[...] + + @classmethod + def parse(cls, text: str) -> pp.ParseResults: + """ + Parse a ContextPath string representation into its individual tokens. + + :param text: The path to parse. + :return: The pyparsing.ParseResult. + """ + return cls.path.parse_string(text) + + +class ContextPath: + """ + This class is used to access the different contexts. + + On the one hand, the class allows you to define and manage paths. + You can simply build them up like follows: + + >>> path = ContextPath('spam')['eggs'][1]['ham'] + + will result in a `path` like `spam.eggs[1].ham`. + + hint :: + The paths are idenpendent from any context. + You can create and even re-use them independently for different contexts. + + To construct wildcard paths, you can use the `'*'` as accessor. + + If you need a shortcut for building paths from a list of accessors, you can use :py:meth:`ContextPath.make`. + To parse the string representation, use :py:meth:`ContextPath.parse`. + """ + + merge_strategies = None + + def __init__(self, item: str | int | t.List[str | int], parent: t.Optional['ContextPath'] = None): + """ + Initialize a new path element. + + The path stores a reference to it's parent. + This means that + + >>> path ContextPath('foo', parent=ContextPath('bar')) + + will result in the path `bar.foo`. + + :param item: The accessor to the current path item. + :param parent: The path of the parent item. + """ + if isinstance(item, (list, tuple)) and item: + *head, self._item = item + if head: + self._parent = ContextPath(head, parent) + else: + self._parent = parent + else: + self._item = item + self._parent = parent + self._type = None + + @classmethod + def init_merge_strategies(cls): + # TODO refactor + if cls.merge_strategies is None: + from hermes.model.merge import MergeStrategies, default_merge_strategies + + cls.merge_strategies = MergeStrategies() + for strategy in default_merge_strategies: + cls.merge_strategies.register(strategy) + + @property + def parent(self) -> t.Optional['ContextPath']: + """ + Accessor to the parent node. + """ + return self._parent + + @property + def path(self) -> t.List['ContextPath']: + """ + Get the whole path from the root as list of items. + """ + if self._parent is None: + return [self] + else: + return self._parent.path + [self] + + def __getitem__(self, item: str | int) -> 'ContextPath': + """ + Create a sub-path for the given `item`. + """ + match item: + case str(): self._type = dict + case int(): self._type = list + return ContextPath(item, self) + + def __str__(self) -> str: + """ + Get the string representation of the path. + The result is parsable by :py:meth:`ContextPath.parse` + """ + item = str(self._item) + if self._parent is not None: + parent = str(self._parent) + match self._item: + case '*' | int(): item = parent + f'[{item}]' + case str(): item = parent + '.' + item + case _: raise ValueError(self.item) + return item + + def __repr__(self) -> str: + return f'ContextPath.parse("{str(self)}")' + + def __eq__(self, other: 'ContextPath') -> bool: + """ + This match includes semantics for wildcards. + Items that access `'*'` will automatically match everything (except for None). + """ + return ( + other is not None + and (self._item == other._item or self._item == '*' or other._item == '*') + and self._parent == other._parent + ) + + def __contains__(self, other: 'ContextPath') -> bool: + """ + Check whether `other` is a true child of this path. + """ + while other is not None: + if other == self: + return True + other = other.parent + return False + + def new(self) -> t.Any: + """ + Create a new instance of the container this node represents. + + For this to work, the node need to have at least on child node derive (e.g., by using ``self["child"]``). + """ + if self._type is not None: + return self._type() + raise TypeError() + + @staticmethod + def _get_item(target: dict | list, path: 'ContextPath') -> t.Optional['ContextPath']: + match target, path._item: + case list(), '*': + raise IndexError(f'Cannot resolve any(*) from {path}.') + case list(), int() as index if index < len(target): + return target[index] + case list(), int() as index: + raise IndexError(f'Index {index} out of bounds for {path.parent}.') + case list(), _ as index: + raise TypeError(f'Invalid index type {type(index)} to access {path.parent}.') + + case dict(), str() as key if key in target: + return target[key] + case dict(), str() as key: + raise KeyError(f'Key {key} not in {path.parent}.') + case dict(), _ as key: + raise TypeError(f'Invalid key type {type(key)} to access {path.parent}.') + + case _, _: + raise TypeError(f'Cannot handle target type {type(target)} for {path}.') + + def _find_in_parent(self, target: dict, path: 'ContextPath') -> t.Any: + _item = path._item + _path = path.parent + while _path is not None: + try: + item = self._get_item(target, _path[_item]) + _log.debug("Using type %s from %s.", item, _path) + return item + + except (KeyError, IndexError, TypeError) as e: + _log.debug("%s: %s", _path, e) + _path = _path.parent + continue + + return None + + def _find_setter(self, target: dict | list, path: 'ContextPath', value: t.Any = None, **kwargs) -> t.Callable: + filter = { + 'name': path._item, + } + + if isinstance(path._item, str) or path._parent is not None: + filter['path'] = str(path) + + if type := self._find_in_parent(target, path['@type']): + filter['type'] = type + elif value is not None: + match value: + case list(): filter['type'] = 'list' + case dict(): filter['type'] = 'map' + elif path._type is list: + filter['type'] = 'list' + elif path._type is dict: + filter['type'] = 'map' + + if ep := kwargs.get('ep', None): + filter['ep'] = ep + + setter = self.merge_strategies.select(**filter) + if setter is None: + return self._set_item + else: + return setter + + def _set_item(self, target: dict | list, path: 'ContextPath', value: t.Any, **kwargs) -> t.Optional['ContextPath']: + match target, path._item: + case list(), int() as index if index < len(target): + match target[index]: + case dict() as item: item.update(value) + case list() as item: item[:] = value + case _: target[index] = value + + case dict(), str() as key if key in target: + match target[key]: + case dict() as item: item.update(value) + case list() as item: item[:] = value + case _: target[key] = value + + case dict(), str() as key: + target[key] = value + case list(), '*': + path._item = len(target) + target.append(value) + case list(), int() as index if index == len(target): + target.append(value) + + case dict(), _ as key: + raise TypeError(f'Invalid key type {type(key)} to set in {path.parent}.') + case list(), int() as index: + raise IndexError(f'Index {index} out of bounds to set in {path.parent}.') + case list(), _ as index: + raise TypeError(f'Invalid index type {type(index)} to set in {path.parent}.') + + case _, _: + raise TypeError(f'Cannot handle target type {type(target)} to set {path}.') + + return value + + def resolve(self, + target: list | dict, + create: bool = False, + query: t.Any = None) -> ('ContextPath', list | dict, 'ContextPath'): + """ + Resolve a given path relative to a given target. + + The method will incrementally try to resolve the entries in the `_target.path`. + It stops when the requested item was found or when the resolution could not be completed. + If you set `create` to true, the method tries to create the direct target that contains the selected node. + + :param target: Container to resolve node in. + :param create: Flags whether missing containers should be created. + :param query: + :return: The method returns a tuple with the following values: + - The path to the last item that could be resolved (e.g., the container of the requested element). + - The container for the path from the first return value. + - The rest of the path that could not be resolved. + """ + head, *tail = self.path + next_target = target + while tail: + try: + new_target = self._get_item(next_target, head) + if not isinstance(new_target, (list, dict)) and head.parent: + next_target = self._get_item(next_target, head.parent) + tail = [head._item] + tail + break + else: + next_target = new_target + except (IndexError, KeyError, TypeError): + if create and self.parent is not None: + try: + new_target = head.new() + except TypeError: + pass + else: + setter = self._find_setter(target, head, new_target) + setter(next_target, head, new_target) + next_target = new_target + else: + break + head, *tail = tail + + if head._item == '*': + for i, item in enumerate(next_target): + _keys = [k for k in query.keys() if k in item] + if _keys and all(item[k] == query[k] for k in _keys): + head._item = i + break + else: + if create: + head._item = len(next_target) + + if not hasattr(head, 'set_item'): + head.set_item = self._find_setter(target, head) + tail = ContextPath.make([head._item] + tail) + return head, next_target, tail + + def get_from(self, target: dict | list) -> t.Any: + """ + Expand the path and return the referenced data from a concrete container. + + :param target: The list or dict that this path points into. + :return: The value stored at path. + """ + prefix, target, path = self.resolve(target) + return self._get_item(target, path) + + def update(self, target: t.Dict[str, t.Any] | t.List, value: t.Any, tags: t.Optional[dict] = None, **kwargs): + """ + Update the data stored at the path in a concrete container. + + How this method actually behaves heavily depends on the active MergeStrategy for the path. + + :param target: The dict inside which the value should be stored. + :param value: The value to store. + :param tags: Dictionary containing the tags for all stored values. + :param kwargs: The tag attibutes for the new value. + """ + prefix, _target, tail = self.resolve(target, create=True) + try: + prefix.set_item(_target, tail, value, **kwargs) + if tags is not None and kwargs: + tags[str(self)] = kwargs + except (KeyError, IndexError, TypeError, ValueError): + raise errors.MergeError(self, _target, value, **kwargs) + + @classmethod + def make(cls, path: t.Iterable[str | int]) -> 'ContextPath': + """ + Convert a list of item accessors into a ContextPath. + + :param path: The items in the order of access. + :return: A ContextPath that reference the selected value. + """ + head, *tail = path + path = ContextPath(head) + for next in tail: + path = path[next] + return path + + @classmethod + def parse(cls, path: str) -> 'ContextPath': + """ + Parse a string representation of a ContextPath into a proper object. + + :param path: The path to parse. + :return: A new ContextPath that references the selected path. + """ + path = cls.make(ContextPathGrammar.parse(path)) + return path diff --git a/test/hermes_test/commands/harvest/test_cff.py b/test/hermes_test/commands/harvest/test_cff.py index 9ee7791d..3837fedd 100644 --- a/test/hermes_test/commands/harvest/test_cff.py +++ b/test/hermes_test/commands/harvest/test_cff.py @@ -6,7 +6,6 @@ # SPDX-FileContributor: Michael Meinel import pathlib -from collections import deque import json from ruamel.yaml import YAML @@ -51,23 +50,6 @@ def test_convert_cff_to_codemeta(valid_minimal_cff, codemeta): assert codemeta == actual_result -@pytest.mark.parametrize("path, path_str", [ - (deque(['str1', 0]), "'str1 1'"), - (deque(['str1', 0, 'str2', 1, 'str3', 2]), "'str1 1 -> str2 2 -> str3 3'"), -]) -def test_build_nodepath_str(path, path_str): - assert harvest._build_nodepath_str(path) == path_str - - -@pytest.mark.parametrize("path, path_str", [ - ('str1', "'str1 1'"), - (deque([0, 'str1', 1, 'str2', 2, 'str3']), "'str1 1 -> str2 2 -> str3 3'"), -]) -def test_build_nodepath_str_fail(path, path_str): - with pytest.raises(Exception): - assert harvest._build_nodepath_str(path) == path_str - - def test_get_single_cff(tmp_path): assert harvest._get_single_cff(tmp_path) is None single_cff = tmp_path / 'CITATION.cff' diff --git a/test/hermes_test/commands/harvest/test_codemeta.py b/test/hermes_test/commands/harvest/test_codemeta.py new file mode 100644 index 00000000..f1cedde8 --- /dev/null +++ b/test/hermes_test/commands/harvest/test_codemeta.py @@ -0,0 +1,151 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Stephan Druskat + +import pathlib +import json + +import pytest + +import hermes.commands.harvest.codemeta as harvest + + +CODEMETA_JSON = """\ +{ + "@context": [ + "https://raw.githubusercontent.com/codemeta/codemeta/2.0/codemeta.jsonld", + "https://raw.githubusercontent.com/schemaorg/schemaorg/main/data/releases/13.0/schemaorgcontext.jsonld", + "https://w3id.org/software-types", + "https://w3id.org/software-iodata" + ], + "@id": "https://github.com/hermes-hms/workflow.git", + "@type": "SoftwareSourceCode", + "applicationCategory": "Software Development", + "audience": { + "@id": "/audience/developers", + "@type": "Audience", + "audienceType": "Developers" + }, + "author": { + "@id": "/person/iam-person", + "@type": "Person", + "affiliation": { + "@id": "/org/iamorg", + "@type": "Organization", + "name": "iamorg" + }, + "email": "iam@mail.example", + "familyName": "Person", + "givenName": "Iam", + "position": 1, + "url": "https://iam.website" + }, + "codeRepository": "https://github.com/hermes-hms/workflow.git", + "contributor": { + "@id": "/person/iam-person", + "@type": "Person", + "affiliation": { + "@id": "/org/iamorg", + "@type": "Organization", + "name": "iamorg" + }, + "email": "iam@mail.example", + "familyName": "Person", + "givenName": "Iam", + "position": 1, + "url": "https://iam.website" + }, + "dateCreated": "2023-06-31T10:54:22Z+0200", + "dateModified": "2023-12-31T121:52:34Z+0200", + "description": "Test Codemeta harvesting", + "developmentStatus": "https://www.repostatus.org/#active", + "identifier": "workflow", + "issueTracker": "https://github.com/hermes-hmc/workflow/issues", + "keywords": [ + "metadata", + "scientific", + "codemeta", + "hermes", + "software metadata", + "software publication" + ], + "license": [ + "https://spdx.org/licenses/Apache-2.0" + ], + "maintainer": { + "@id": "/person/iam-person", + "@type": "Person", + "affiliation": { + "@id": "/org/iamorg", + "@type": "Organization", + "name": "iamorg" + }, + "email": "iam@mail.example", + "familyName": "Person", + "givenName": "Iam", + "position": 1, + "url": "https://iam.website" + }, + "name": "HERMES Workflow", + "operatingSystem": [ + "Linux", + "BSD", + "macOS" + ], + "readme": "https://github.com/hermes-hmc/workflow/blob/main/README.md", + "runtimePlatform": [ + "Python 3.10" + ], + "softwareRequirements": [ + { + "@id": "/dependency/click", + "@type": "SoftwareApplication", + "identifier": "click", + "name": "click", + "runtimePlatform": "Python 3" + } + ], + "targetProduct": { + "@id": "/commandlineapplication/hermes", + "@type": "CommandLineApplication", + "executableName": "hermes", + "name": "hermes", + "runtimePlatform": "Python 3" + }, + "url": [ + "https://software-metadata.pub", + "https://github.com/hermes-hmc/workflow.git" + ], + "version": "0" +} +""" + + +@pytest.fixture +def valid_codemeta(): + return json.loads(CODEMETA_JSON) + + +@pytest.fixture() +def valid_codemeta_path(tmp_path, valid_codemeta): + codemeta_path = tmp_path / 'codemeta.json' + with open(codemeta_path, 'w') as fo: + json.dump(valid_codemeta, fo) + return codemeta_path + + +def test_get_single_codemeta(tmp_path): + assert harvest._get_single_codemeta(tmp_path) is None + single_codemeta = tmp_path / 'codemeta.json' + single_codemeta.touch() + assert harvest._get_single_codemeta(tmp_path) == single_codemeta + + +def test_validate_fail(): + assert not harvest._validate(pathlib.Path("foobar")) + + +def test_validate_success(valid_codemeta_path): + assert harvest._validate(valid_codemeta_path) diff --git a/test/hermes_test/model/test_base_context.py b/test/hermes_test/model/test_base_context.py index 072fb116..bdf016b7 100644 --- a/test/hermes_test/model/test_base_context.py +++ b/test/hermes_test/model/test_base_context.py @@ -21,7 +21,7 @@ def test_context_hermes_dir_custom(): def test_context_get_cache_default(): ctx = HermesContext() - assert ctx.get_cache('spam', 'eggs') == Path('.') / '.hermes' / 'spam' / 'eggs' + assert ctx.get_cache('spam', 'eggs') == Path('.') / '.hermes' / 'spam' / 'eggs.json' def test_context_get_cache_cached(): @@ -34,5 +34,5 @@ def test_context_get_cache_create(tmpdir): ctx = HermesContext(tmpdir) subdir = Path(tmpdir) / '.hermes' / 'spam' - assert ctx.get_cache('spam', 'eggs', create=True) == subdir / 'eggs' + assert ctx.get_cache('spam', 'eggs', create=True) == subdir / 'eggs.json' assert subdir.exists() diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py index 5151611f..92e351e2 100644 --- a/test/hermes_test/model/test_harvest_context.py +++ b/test/hermes_test/model/test_harvest_context.py @@ -4,6 +4,7 @@ # SPDX-FileContributor: Michael Meinel +from datetime import datetime from importlib.metadata import EntryPoint import pytest @@ -14,27 +15,45 @@ @pytest.fixture def harvest_ctx(request: pytest.FixtureRequest): ctx = HermesContext() - return HermesHarvestContext(ctx, EntryPoint(name=request.function, group='hermes.harvest', value='hermes_test:ctx')) + return HermesHarvestContext( + ctx, + EntryPoint(name=request.function.__name__, group='hermes.harvest', value='hermes_test:ctx') + ) def test_context_default(harvest_ctx): harvest_ctx.update('spam', 'eggs', test=True) - assert harvest_ctx._data['spam'] == [['eggs', {'test': True}]] + assert harvest_ctx._data['spam'] == [ + ['eggs', {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_default'}] + ] def test_context_update_append(harvest_ctx): harvest_ctx.update('spam', 'noodles', index=0) harvest_ctx.update('spam', 'eggs', index=1) - assert harvest_ctx._data['spam'] == [['noodles', {'index': 0}], ['eggs', {'index': 1}]] + assert harvest_ctx._data['spam'] == [ + ['noodles', {'index': 0, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_update_append'}], + ['eggs', {'index': 1, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_update_append'}] + ] def test_context_update_replace(harvest_ctx): harvest_ctx.update('spam', 'noodles', test=True) harvest_ctx.update('spam', 'eggs', test=True) - assert harvest_ctx._data['spam'] == [['eggs', {'test': True}]] + assert harvest_ctx._data['spam'] == [ + ['eggs', {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_update_replace'}] + ] def test_context_bulk_flat(harvest_ctx): @@ -43,8 +62,16 @@ def test_context_bulk_flat(harvest_ctx): 'spam': 'eggs' }, test=True) - assert harvest_ctx._data['ans'] == [[42, {'test': True}]] - assert harvest_ctx._data['spam'] == [['eggs', {'test': True}]] + assert harvest_ctx._data['ans'] == [ + [42, {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_flat'}] + ] + assert harvest_ctx._data['spam'] == [ + ['eggs', {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_flat'}] + ] def test_context_bulk_complex(harvest_ctx): @@ -56,23 +83,53 @@ def test_context_bulk_complex(harvest_ctx): ] }, test=True) - assert harvest_ctx._data['ans'] == [[42, {'test': True}]] - assert harvest_ctx._data['author[0].name'] == [['Monty Python', {'test': True}]] - assert harvest_ctx._data['author[0].email'] == [['eggs@spam.io', {'test': True}]] - assert harvest_ctx._data['author[1].name'] == [['Herr Mes', {'test': True}]] + assert harvest_ctx._data['ans'] == [ + [42, {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_complex'}] + ] + assert harvest_ctx._data['author[0].name'] == [ + ['Monty Python', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_complex'}] + ] + assert harvest_ctx._data['author[0].email'] == [ + ['eggs@spam.io', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_complex'}] + ] + assert harvest_ctx._data['author[1].name'] == [ + ['Herr Mes', {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_complex'}] + ] def test_context_bulk_replace(harvest_ctx): harvest_ctx.update('author[0].name', 'Monty Python', test=True) harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, test=True) - assert harvest_ctx._data['author[0].name'] == [['Herr Mes', {'test': True}]] - assert harvest_ctx._data['author[0].email'] == [['eggs@spam.io', {'test': True}]] + assert harvest_ctx._data['author[0].name'] == [ + ['Herr Mes', {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_replace'}] + ] + assert harvest_ctx._data['author[0].email'] == [ + ['eggs@spam.io', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_replace'}] + ] def test_context_bulk_append(harvest_ctx): harvest_ctx.update('author[0].name', 'Monty Python', index=0) harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, index=1) - assert harvest_ctx._data['author[0].name'] == [['Monty Python', {'index': 0}], ['Herr Mes', {'index': 1}]] - assert harvest_ctx._data['author[0].email'] == [['eggs@spam.io', {'index': 1}]] + assert harvest_ctx._data['author[0].name'] == [ + ['Monty Python', {'index': 0, 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_append'}], + ['Herr Mes', {'index': 1, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_append'}] + ] + assert harvest_ctx._data['author[0].email'] == [ + ['eggs@spam.io', {'index': 1, 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_append'}] + ] diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index a083f860..14daf260 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -73,71 +73,71 @@ def test_workflow_invoke_with_cb(): cb_mock.assert_called_with(["spam", "eggs"]) -def test_haggis_full(): +def test_hermes_full(): runner = CliRunner() - result = runner.invoke(cli.haggis) + result = runner.invoke(cli.main) assert not result.exception -def test_haggis_harvest(): +def test_hermes_harvest(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('harvest', )) + result = runner.invoke(cli.main, args=('harvest', )) assert not result.exception -def test_haggis_process(): +def test_hermes_process(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('process', )) + result = runner.invoke(cli.main, args=('process', )) assert not result.exception -def test_haggis_with_deposit(): +def test_hermes_with_deposit(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--deposit', )) + result = runner.invoke(cli.main, args=('--deposit', )) assert not result.exception -def test_haggis_with_post(): +def test_haggis_with_postprocess(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--post', )) + result = runner.invoke(cli.main, args=('--postprocess', )) assert not result.exception -def test_haggis_with_path(): +def test_hermes_with_path(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--path', './')) + result = runner.invoke(cli.main, args=('--path', './')) assert not result.exception -def test_haggis_with_deposit_and_post(): +def test_haggis_with_deposit_and_postprocess(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--deposit', '--post')) + result = runner.invoke(cli.main, args=('--deposit', '--postprocess')) assert not result.exception -def test_haggis_with_deposit_and_path(): +def test_hermes_with_deposit_and_path(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--deposit', '--path', './')) + result = runner.invoke(cli.main, args=('--deposit', '--path', './')) assert not result.exception -def test_haggis_with_path_and_post(): +def test_haggis_with_path_and_postprocess(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--path', './', '--post')) + result = runner.invoke(cli.main, args=('--path', './', '--postprocess')) assert not result.exception -def test_haggis_with_deposit_and_post_and_path(): +def test_haggis_with_deposit_and_postprocess_and_path(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--deposit', '--post', '--path', './')) + result = runner.invoke(cli.main, args=('--deposit', '--postprocess', '--path', './')) assert not result.exception