From 77055666dccd17831ea096cf69b5811126854ff9 Mon Sep 17 00:00:00 2001 From: Jeffrey Kelling Date: Thu, 1 Sep 2022 10:57:42 +0200 Subject: [PATCH 01/52] Add simple git author harvester --- pyproject.toml | 1 + src/hermes/commands/harvest/git.py | 84 ++++++++++++++++++++++++++++++ 2 files changed, 85 insertions(+) create mode 100644 src/hermes/commands/harvest/git.py diff --git a/pyproject.toml b/pyproject.toml index 9c98e618..a940bec6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ haggis = "hermes.cli:haggis" [tool.poetry.plugins."hermes.harvest"] cff = "hermes.commands.harvest.cff:harvest_cff" +git = "hermes.commands.harvest.git:harvest_git" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py new file mode 100644 index 00000000..feefa062 --- /dev/null +++ b/src/hermes/commands/harvest/git.py @@ -0,0 +1,84 @@ +import glob +import os +import json +import pathlib +import urllib.request +import typing as t + +import jsonschema +import click +import subprocess +import shutil + +from hermes.model.context import HermesHarvestContext +from hermes.model.errors import HermesValidationError + +# TODO: can and should we get this somehow? +SHELL_ENCODING = 'utf-8' + +class AuthorData: + def __init__(self, line): + self.name = line[0] + self.email = set((line[1],)) + self.tFirst = line[2] + self.tLast = self.tFirst + + def update(self, line: t.List): + assert(self.name == line[0]) + + self.email.add(line[1]) + t = line[2] + if t < self.tFirst: + self.tFirst = t + elif t > self.tFirst: + self.tLast = t + +def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): + """ + Implementation of a harvester that provides autor data from Git. + + :param click_ctx: Click context that this command was run inside (might be used to extract command line arguments). + :param ctx: The harvesting context that should contain the provided metadata. + """ + # Get the parent context (every subcommand has its own context with the main click context as parent) + parent_ctx = click_ctx.parent + if parent_ctx is None: + raise RuntimeError('No parent context!') + path = parent_ctx.params['path'] + + gitExe = shutil.which('git') + if not gitExe: + raise RuntimeError('Git not available!') + + p = subprocess.run([gitExe, "rev-parse", "--abbrev-ref", "HEAD"], capture_output=True) + if p.returncode: + raise RuntimeError("`git branch` command failed with code {}: '{}'!".format(p.returncode, p.stderr.decode(SHELL_ENCODING))) + gitBranch = p.stdout.decode(SHELL_ENCODING).strip() + # TODO: should we warn or error if the HEAD is detached? + + # Get history of currently checked-out branch + authors = {} + p = subprocess.run([gitExe, "log", "--pretty=%an_%ae_%ad", "--date=unix"], capture_output=True) + if p.returncode: + raise RuntimeError("`git log` command failed with code {}: '{}'!".format(p.returncode, p.stderr.decode(SHELL_ENCODING))) + + log = p.stdout.decode(SHELL_ENCODING).split('\n') + for l in log: + d = l.split('_') + if len(d) != 3: + continue + try: + d[2] = int(d[2]) + except ValueError: + continue + + if d[0] in authors: + authors[d[0]].update(d) + else: + authors[d[0]] = AuthorData(d) + + for a in authors.values(): + ctx.update("author.since", a.tFirst, name=a.name, branch=gitBranch) + ctx.update("author.until", a.tLast, name=a.name, branch=gitBranch) + for e in a.email: + ctx.update("author.email", e, name=a.name, branch=gitBranch, email=e) From acfdf826c27d6d550ee4bb8f92dcf8ffd489afd0 Mon Sep 17 00:00:00 2001 From: jkelling Date: Thu, 1 Sep 2022 13:08:33 +0200 Subject: [PATCH 02/52] harvest/git.py: AuthorData: Add arg type constaint Co-authored-by: Stephan Druskat --- src/hermes/commands/harvest/git.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index feefa062..e34a1e85 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -17,7 +17,7 @@ SHELL_ENCODING = 'utf-8' class AuthorData: - def __init__(self, line): + def __init__(self, line: t.List): self.name = line[0] self.email = set((line[1],)) self.tFirst = line[2] From 8947b81f3621945b05fbb6ab0eeedae60e52ca8c Mon Sep 17 00:00:00 2001 From: Jeffrey Kelling Date: Thu, 1 Sep 2022 14:48:24 +0200 Subject: [PATCH 03/52] git harvester: Add committer --- src/hermes/commands/harvest/git.py | 47 ++++++++++++++++++------------ 1 file changed, 28 insertions(+), 19 deletions(-) diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index e34a1e85..b65b90d3 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -16,18 +16,18 @@ # TODO: can and should we get this somehow? SHELL_ENCODING = 'utf-8' -class AuthorData: - def __init__(self, line: t.List): - self.name = line[0] - self.email = set((line[1],)) - self.tFirst = line[2] +class ConributorData: + def __init__(self, d: t.List): + self.name = d[0] + self.email = set((d[1],)) + self.tFirst = d[2] self.tLast = self.tFirst - def update(self, line: t.List): - assert(self.name == line[0]) + def update(self, d: t.List): + assert(self.name == d[0]) - self.email.add(line[1]) - t = line[2] + self.email.add(d[1]) + t = d[2] if t < self.tFirst: self.tFirst = t elif t > self.tFirst: @@ -58,27 +58,36 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): # Get history of currently checked-out branch authors = {} - p = subprocess.run([gitExe, "log", "--pretty=%an_%ae_%ad", "--date=unix"], capture_output=True) + committers = {} + p = subprocess.run([gitExe, "log", "--pretty=%an_%ae_%at_%cn_%ce_%ct"], capture_output=True) if p.returncode: raise RuntimeError("`git log` command failed with code {}: '{}'!".format(p.returncode, p.stderr.decode(SHELL_ENCODING))) log = p.stdout.decode(SHELL_ENCODING).split('\n') for l in log: d = l.split('_') - if len(d) != 3: + if len(d) != 6: continue try: d[2] = int(d[2]) except ValueError: continue - if d[0] in authors: - authors[d[0]].update(d) - else: - authors[d[0]] = AuthorData(d) + _updateContributor(authors, d[0:3]) + _updateContributor(committers, d[3:7]) - for a in authors.values(): - ctx.update("author.since", a.tFirst, name=a.name, branch=gitBranch) - ctx.update("author.until", a.tLast, name=a.name, branch=gitBranch) + _ctxUpdateContributors(ctx, authors, "author", branch=gitBranch) + _ctxUpdateContributors(ctx, committers, "committer", branch=gitBranch) + +def _updateContributor(contributors: t.Dict, d: t.List): + if d[0] in contributors: + contributors[d[0]].update(d[0:3]) + else: + contributors[d[0]] = ConributorData(d[0:3]) + +def _ctxUpdateContributors(ctx: HermesHarvestContext, contributors: t.Dict, kind: str, **kwargs): + for a in contributors.values(): + ctx.update(f"{kind}.since", a.tFirst, name=a.name, **kwargs) + ctx.update(f"{kind}.until", a.tLast, name=a.name, **kwargs) for e in a.email: - ctx.update("author.email", e, name=a.name, branch=gitBranch, email=e) + ctx.update(f"{kind}.email", e, name=a.name, email=e, **kwargs) From b5a20c3d9b41075032bf8bde422ebd271053aa9e Mon Sep 17 00:00:00 2001 From: jkelling Date: Thu, 1 Sep 2022 16:08:54 +0200 Subject: [PATCH 04/52] Apply suggestions from code review PEP-ified Co-authored-by: Michael Meinel --- src/hermes/commands/harvest/git.py | 37 ++++++++++++++++-------------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index b65b90d3..0a2b79b8 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -16,22 +16,24 @@ # TODO: can and should we get this somehow? SHELL_ENCODING = 'utf-8' + class ConributorData: def __init__(self, d: t.List): self.name = d[0] self.email = set((d[1],)) - self.tFirst = d[2] - self.tLast = self.tFirst + self.t_first= d[2] + self.t_last= self.t_first def update(self, d: t.List): assert(self.name == d[0]) self.email.add(d[1]) t = d[2] - if t < self.tFirst: - self.tFirst = t - elif t > self.tFirst: - self.tLast = t + if t < self.t_first: + self.t_first = t + elif t > self.t_first: + self.t_last = t + def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): """ @@ -46,20 +48,20 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): raise RuntimeError('No parent context!') path = parent_ctx.params['path'] - gitExe = shutil.which('git') - if not gitExe: + git_exe = shutil.which('git') + if not git_exe: raise RuntimeError('Git not available!') - p = subprocess.run([gitExe, "rev-parse", "--abbrev-ref", "HEAD"], capture_output=True) + p = subprocess.run([git_exe, "rev-parse", "--abbrev-ref", "HEAD"], capture_output=True) if p.returncode: raise RuntimeError("`git branch` command failed with code {}: '{}'!".format(p.returncode, p.stderr.decode(SHELL_ENCODING))) - gitBranch = p.stdout.decode(SHELL_ENCODING).strip() + git_branch = p.stdout.decode(SHELL_ENCODING).strip() # TODO: should we warn or error if the HEAD is detached? # Get history of currently checked-out branch authors = {} committers = {} - p = subprocess.run([gitExe, "log", "--pretty=%an_%ae_%at_%cn_%ce_%ct"], capture_output=True) + p = subprocess.run([git_exe, "log", "--pretty=%an_%ae_%at_%cn_%ce_%ct"], capture_output=True) if p.returncode: raise RuntimeError("`git log` command failed with code {}: '{}'!".format(p.returncode, p.stderr.decode(SHELL_ENCODING))) @@ -76,18 +78,19 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): _updateContributor(authors, d[0:3]) _updateContributor(committers, d[3:7]) - _ctxUpdateContributors(ctx, authors, "author", branch=gitBranch) - _ctxUpdateContributors(ctx, committers, "committer", branch=gitBranch) + _ctx_update_contributors(ctx, authors, "author", branch=git_branch) + _ctx_update_contributors(ctx, committers, "committer", branch=git_branch) -def _updateContributor(contributors: t.Dict, d: t.List): +def _update_contributor(contributors: t.Dict, d: t.List): if d[0] in contributors: contributors[d[0]].update(d[0:3]) else: contributors[d[0]] = ConributorData(d[0:3]) -def _ctxUpdateContributors(ctx: HermesHarvestContext, contributors: t.Dict, kind: str, **kwargs): + +def _ctx_update_contributors(ctx: HermesHarvestContext, contributors: t.Dict, kind: str, **kwargs): for a in contributors.values(): - ctx.update(f"{kind}.since", a.tFirst, name=a.name, **kwargs) - ctx.update(f"{kind}.until", a.tLast, name=a.name, **kwargs) + ctx.update(f"{kind}.since", a.t_first, name=a.name, **kwargs) + ctx.update(f"{kind}.until", a.t_last, name=a.name, **kwargs) for e in a.email: ctx.update(f"{kind}.email", e, name=a.name, email=e, **kwargs) From 4a61db3a43a52cfe8c7674535d39adb744a24526 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 2 Sep 2022 15:09:18 +0200 Subject: [PATCH 05/52] Add entry point for (pre-)processing --- pyproject.toml | 3 +++ src/hermes/commands/process/__init__.py | 0 2 files changed, 3 insertions(+) create mode 100644 src/hermes/commands/process/__init__.py diff --git a/pyproject.toml b/pyproject.toml index 9c98e618..7c3d7fb6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,9 @@ haggis = "hermes.cli:haggis" [tool.poetry.plugins."hermes.harvest"] cff = "hermes.commands.harvest.cff:harvest_cff" +[tool.poetry.plugins."hermes.preprocess"] +_copy = "hermes.commands.process:copy_from_harvest" + [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" diff --git a/src/hermes/commands/process/__init__.py b/src/hermes/commands/process/__init__.py new file mode 100644 index 00000000..e69de29b From 8d518a9833c1abc24bcd26f8847e7776f8ff0a1b Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 2 Sep 2022 18:28:16 +0200 Subject: [PATCH 06/52] Add entrypoint for codemeta harvesting --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 9c98e618..bbec4f1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ haggis = "hermes.cli:haggis" [tool.poetry.plugins."hermes.harvest"] cff = "hermes.commands.harvest.cff:harvest_cff" +codemeta = "hermes.commands.harvest.codemeta:harvest_codemeta" [build-system] requires = ["poetry-core>=1.0.0"] From fe86e181f8a046636633e71d1806f74ba185eb12 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 2 Sep 2022 18:28:43 +0200 Subject: [PATCH 07/52] Add the most basic CodeMeta harvester --- src/hermes/commands/harvest/codemeta.py | 57 +++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 src/hermes/commands/harvest/codemeta.py diff --git a/src/hermes/commands/harvest/codemeta.py b/src/hermes/commands/harvest/codemeta.py new file mode 100644 index 00000000..fa268aa6 --- /dev/null +++ b/src/hermes/commands/harvest/codemeta.py @@ -0,0 +1,57 @@ +import glob +import json +import pathlib +import typing as t + +import click + +from hermes.model.context import HermesHarvestContext +from hermes.model.errors import HermesValidationError + + +def harvest_codemeta(click_ctx: click.Context, ctx: HermesHarvestContext): + """ + Implementation of a harvester that provides data from a codemeta.json file format. + + :param click_ctx: Click context that this command was run inside (might be used to extract command line arguments). + :param ctx: The harvesting context that should contain the provided metadata. + """ + # Get the parent context (every subcommand has its own context with the main click context as parent) + parent_ctx = click_ctx.parent + if parent_ctx is None: + raise RuntimeError('No parent context!') + path = parent_ctx.params['path'] + + # Get source files + codemeta_file = _get_single_codemeta(path) + if not codemeta_file: + raise HermesValidationError(f'{path} contains either no or more than 1 codemeta.json file. Aborting harvesting ' + f'for this metadata source.') + + # Read the content + codemeta_str = codemeta_file.read_text() + + if not _validate(codemeta_file): + raise HermesValidationError(codemeta_file) + + codemeta = json.loads(codemeta_str) + print(codemeta) + ctx.update_from(codemeta, local_path=str(codemeta_file)) + + +def _validate(codemeta_file: pathlib.Path) -> bool: + # TODO: Implement + return True + + +def _get_single_codemeta(path: pathlib.Path) -> t.Optional[pathlib.Path]: + # Find CodeMeta files in directories and subdirectories + # TODO: Do we really want to search recursive? Maybe add another option to enable pointing to a single file? + # (So this stays "convention over configuration") + files = glob.glob(str(path / '**' / 'codemeta.json'), recursive=True) + if len(files) == 1: + return pathlib.Path(files[0]) + # TODO: Shouldn't we log/echo the found CFF files so a user can debug/cleanup? + # TODO: Do we want to hand down a logging instance via Hermes context or just encourage + # peeps to use the Click context? + return None From 62da3b2d77844396ca34f73c117ced87b13a1683 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 2 Sep 2022 18:32:07 +0200 Subject: [PATCH 08/52] Remove print statement --- src/hermes/commands/harvest/codemeta.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/hermes/commands/harvest/codemeta.py b/src/hermes/commands/harvest/codemeta.py index fa268aa6..705d75c5 100644 --- a/src/hermes/commands/harvest/codemeta.py +++ b/src/hermes/commands/harvest/codemeta.py @@ -35,7 +35,6 @@ def harvest_codemeta(click_ctx: click.Context, ctx: HermesHarvestContext): raise HermesValidationError(codemeta_file) codemeta = json.loads(codemeta_str) - print(codemeta) ctx.update_from(codemeta, local_path=str(codemeta_file)) From 8bdc3c8567722cfc72573e9bae5fcaf4bf61e117 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 2 Sep 2022 18:51:14 +0200 Subject: [PATCH 09/52] Add tests and failing test for validation --- src/hermes/commands/harvest/codemeta.py | 2 +- .../commands/harvest/test_codemeta.py | 143 ++++++++++++++++++ 2 files changed, 144 insertions(+), 1 deletion(-) create mode 100644 test/hermes_test/commands/harvest/test_codemeta.py diff --git a/src/hermes/commands/harvest/codemeta.py b/src/hermes/commands/harvest/codemeta.py index 705d75c5..ebe49aed 100644 --- a/src/hermes/commands/harvest/codemeta.py +++ b/src/hermes/commands/harvest/codemeta.py @@ -40,7 +40,7 @@ def harvest_codemeta(click_ctx: click.Context, ctx: HermesHarvestContext): def _validate(codemeta_file: pathlib.Path) -> bool: # TODO: Implement - return True + return codemeta_file.exists() def _get_single_codemeta(path: pathlib.Path) -> t.Optional[pathlib.Path]: diff --git a/test/hermes_test/commands/harvest/test_codemeta.py b/test/hermes_test/commands/harvest/test_codemeta.py new file mode 100644 index 00000000..a0f72d4f --- /dev/null +++ b/test/hermes_test/commands/harvest/test_codemeta.py @@ -0,0 +1,143 @@ +import pathlib +from collections import deque +import json +from ruamel.yaml import YAML + +import pytest + +import hermes.commands.harvest.codemeta as harvest + + +CODEMETA_JSON = """\ +{ + "@context": [ + "https://raw.githubusercontent.com/codemeta/codemeta/2.0/codemeta.jsonld", + "https://raw.githubusercontent.com/schemaorg/schemaorg/main/data/releases/13.0/schemaorgcontext.jsonld", + "https://w3id.org/software-types", + "https://w3id.org/software-iodata" + ], + "@id": "https://github.com/hermes-hms/workflow.git", + "@type": "SoftwareSourceCode", + "applicationCategory": "Software Development", + "audience": { + "@id": "/audience/developers", + "@type": "Audience", + "audienceType": "Developers" + }, + "author": { + "@id": "/person/iam-person", + "@type": "Person", + "affiliation": { + "@id": "/org/iamorg", + "@type": "Organization", + "name": "iamorg" + }, + "email": "iam@mail.example", + "familyName": "Person", + "givenName": "Iam", + "position": 1, + "url": "https://iam.website" + }, + "codeRepository": "https://github.com/hermes-hms/workflow.git", + "contributor": { + "@id": "/person/iam-person", + "@type": "Person", + "affiliation": { + "@id": "/org/iamorg", + "@type": "Organization", + "name": "iamorg" + }, + "email": "iam@mail.example", + "familyName": "Person", + "givenName": "Iam", + "position": 1, + "url": "https://iam.website" + }, + "dateCreated": "2023-06-31T10:54:22Z+0200", + "dateModified": "2023-12-31T121:52:34Z+0200", + "description": "Test Codemeta harvesting", + "developmentStatus": "https://www.repostatus.org/#active", + "identifier": "workflow", + "issueTracker": "https://github.com/hermes-hmc/workflow/issues", + "keywords": [ + "metadata", + "scientific", + "codemeta", + "hermes", + "software metadata", + "software publication" + ], + "license": [ + "https://spdx.org/licenses/Apache-2.0" + ], + "maintainer": { + "@id": "/person/iam-person", + "@type": "Person", + "affiliation": { + "@id": "/org/iamorg", + "@type": "Organization", + "name": "iamorg" + }, + "email": "iam@mail.example", + "familyName": "Person", + "givenName": "Iam", + "position": 1, + "url": "https://iam.website" + }, + "name": "HERMES Workflow", + "operatingSystem": [ + "Linux", + "BSD", + "macOS" + ], + "readme": "https://github.com/hermes-hmc/workflow/blob/main/README.md", + "runtimePlatform": [ + "Python 3.10" + ], + "softwareRequirements": [ + { + "@id": "/dependency/click", + "@type": "SoftwareApplication", + "identifier": "click", + "name": "click", + "runtimePlatform": "Python 3" + } + ], + "targetProduct": { + "@id": "/commandlineapplication/haggis", + "@type": "CommandLineApplication", + "executableName": "haggis", + "name": "haggis", + "runtimePlatform": "Python 3" + }, + "url": [ + "https://software-metadata.pub", + "https://github.com/hermes-hmc/workflow.git" + ], + "version": "0" +} +""" + + +@pytest.fixture +def codemeta(): + return json.loads(CODEMETA_JSON) + + +@pytest.fixture() +def valid_codemeta(tmp_path): + codemeta_json = json.loads(CODEMETA_JSON) + codemeta_file = tmp_path / 'codemeta.json' + json.dump(codemeta_json, codemeta_file) + return codemeta_file + + +def test_get_single_codemeta(tmp_path): + assert harvest._get_single_codemeta(tmp_path) is None + single_codemeta = tmp_path / 'codemeta.json' + single_codemeta.touch() + assert harvest._get_single_codemeta(tmp_path) == single_codemeta + + +def test_validate_success(codemeta): + assert harvest._validate(pathlib.Path("foobar")) From e475dfc80d1f2c667a3018505523ad393735cbba Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 2 Sep 2022 18:52:37 +0200 Subject: [PATCH 10/52] Clean up imports --- test/hermes_test/commands/harvest/test_codemeta.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/hermes_test/commands/harvest/test_codemeta.py b/test/hermes_test/commands/harvest/test_codemeta.py index a0f72d4f..b8559a80 100644 --- a/test/hermes_test/commands/harvest/test_codemeta.py +++ b/test/hermes_test/commands/harvest/test_codemeta.py @@ -1,7 +1,5 @@ import pathlib -from collections import deque import json -from ruamel.yaml import YAML import pytest From 05d53db97b19f3d4152e74ae1124d010fc7419ee Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Mon, 19 Sep 2022 15:09:29 +0200 Subject: [PATCH 11/52] After the weekend, save old stuff --- .mailmap | 10 + CITATION.cff | 25 +++ pyproject.toml | 66 +++++- src/hermes/cli.py | 31 +++ src/hermes/commands/harvest/cff.py | 49 +++-- src/hermes/commands/harvest/git.py | 250 +++++++++++++++++----- src/hermes/commands/process/cff.py | 10 + src/hermes/commands/process/git.py | 29 +++ src/hermes/commands/process/merge.py | 101 +++++++++ src/hermes/commands/workflow.py | 65 +++++- src/hermes/config.py | 49 +++++ src/hermes/model/context.py | 170 ++++++++++++++- src/hermes/model/errors.py | 9 + src/hermes/model/path.py | 300 +++++++++++++++++++++++++++ 14 files changed, 1081 insertions(+), 83 deletions(-) create mode 100644 .mailmap create mode 100644 CITATION.cff create mode 100644 src/hermes/commands/process/cff.py create mode 100644 src/hermes/commands/process/git.py create mode 100644 src/hermes/commands/process/merge.py create mode 100644 src/hermes/config.py create mode 100644 src/hermes/model/path.py diff --git a/.mailmap b/.mailmap new file mode 100644 index 00000000..b242681c --- /dev/null +++ b/.mailmap @@ -0,0 +1,10 @@ +# Mapping of email addresses only. Format (one pair per line): +# + + + + +# Mapping of user names. Format (one pair per line): +# Real Name nickname +# Real Name Name, Real +Jeffrey Kelling jkelling diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 00000000..277723f6 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,25 @@ +# This CITATION.cff file was generated with cffinit. +# Visit https://bit.ly/cffinit to generate yours today! + +cff-version: 1.2.0 +title: HERMES Aggregated Interface Script +message: >- + If you use this software, please cite it using the + metadata from this file. +type: software +authors: + - given-names: Michael + family-names: Meinel + email: michael.meinel@dlr.de + affiliation: German Aerospace Center (DLR) + orcid: 'https://orcid.org/0000-0001-6372-3853' + - given-names: Stephan + family-names: Druskat + email: stephan.druskat@dlr.de + affiliation: German Aerospace Center (DLR) + orcid: 'https://orcid.org/0000-0003-4925-7248' + - given-names: Jeffrey + family-names: Kelling + email: j.kelling@hzdr.de + affiliation: Helmholtz Zentrum Dresden-Rossendorf (HZDR) + orcid: 'https://orcid.org/0000-0003-1761-2591' diff --git a/pyproject.toml b/pyproject.toml index 2bfff1e0..0869317d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,9 +4,6 @@ version = "0.1.0" description = "Workflow to publish research software with rich metadata" authors = ["Stephan Druskat ", "Michael Meinel "] -[tool.poetry.extras] -cff = ['ruamel.yaml', 'cffconvert'] - [tool.poetry.dependencies] python = "^3.10" click = "^8.1" @@ -24,11 +21,68 @@ myst-parser = "^0.17.2" haggis = "hermes.cli:haggis" [tool.poetry.plugins."hermes.harvest"] -cff = "hermes.commands.harvest.cff:harvest_cff" -git = "hermes.commands.harvest.git:harvest_git" +000_cff = "hermes.commands.harvest.cff:harvest_cff" +010_git = "hermes.commands.harvest.git:harvest_git" [tool.poetry.plugins."hermes.preprocess"] -_copy = "hermes.commands.process:copy_from_harvest" +000_cff = "hermes.commands.process.cff:add_name" +010_git = "hermes.commands.process.git:flag_authors" + +[tool.hermes.harvest] +000_git.enabled = false + +[tool.hermes.logging.formatters.plain] +format = "%(message)s" + +[tool.hermes.logging.formatters.logfile] +format = "%(created)16f:%(name)20s:%(levelname)10s | %(message)s" + +[tool.hermes.logging.formatters.auditlog] +format = "%(asctime)s %(name)-20s %(message)s" + +[tool.hermes.logging] +version = 1 + +[tool.hermes.logging.handlers.terminal] +class = "logging.StreamHandler" +formatter = "plain" +level = "INFO" +stream = "ext://sys.stdout" + +[tool.hermes.logging.handlers.logfile] +class = "logging.FileHandler" +formatter = "logfile" +level = "DEBUG" +filename = "hermes.log" + +[tool.hermes.logging.handlers.auditfile] +class = "logging.FileHandler" +formatter = "plain" +level = "DEBUG" +filename = "hermes-audit.md" + +[tool.hermes.logging.handlers.hintfile] +class = "logging.FileHandler" +formatter = "plain" +level = "DEBUG" +filename = "quickfix.sh" + +[tool.hermes.logging.loggers.cli] +level = "DEBUG" +handlers = ["terminal"] + +[tool.hermes.logging.loggers.hermes] +level = "WARNING" +handlers = ["terminal", "logfile"] + +[tool.hermes.logging.loggers.audit] +level = "DEBUG" +handlers = ["terminal", "auditfile"] + +[tool.hermes.logging.loggers.audit.hint] +level = "DEBUG" +propagate = false +handlers = ["terminal", "hintfile"] [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/src/hermes/cli.py b/src/hermes/cli.py index 1c25a608..50a194f8 100644 --- a/src/hermes/cli.py +++ b/src/hermes/cli.py @@ -3,10 +3,37 @@ """ import typing as t import pathlib +from importlib import metadata import click +from hermes import config from hermes.commands import workflow +from hermes.config import configure, init_logging + + +def log_header(header, summary=None): + _log = config.getLogger('cli') + + dist = metadata.distribution('hermes') + meta = dist.metadata + + if header is None: + title = f"{dist.name} workflow ({dist.version})" + + _log.info(title) + _log.info("=" * len(title)) + _log.info('') + + if 'Summary' in meta: + _log.info('%s', meta['Summary']) + _log.info('') + + else: + _log.info("%s", header) + if summary: + _log.info("%s", summary) + _log.info('') class WorkflowCommand(click.Group): @@ -49,6 +76,10 @@ def invoke(self, ctx: click.Context) -> t.Any: :param ctx: Context for the command. """ + configure() + init_logging() + log_header(None) + if ctx.protected_args: return super().invoke(ctx) diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index 64ffaa1c..84604b94 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -1,5 +1,6 @@ import collections import glob +import logging import os import json import pathlib @@ -12,13 +13,16 @@ import click from cffconvert import Citation -from hermes.model.context import HermesHarvestContext +from hermes.model.context import HermesHarvestContext, ContextPath from hermes.model.errors import HermesValidationError # TODO: should this be configurable via a CLI option? _CFF_VERSION = '1.2.0' +_log = logging.getLogger('cli.harvest.cff') + + def harvest_cff(click_ctx: click.Context, ctx: HermesHarvestContext): """ Implementation of a harvester that provides data from CFF in Codemeta format. @@ -66,34 +70,51 @@ def _convert_cff_to_codemeta(cff_data: str) -> t.Any: def _validate(cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: cff_schema_url = f'https://citation-file-format.github.io/{_CFF_VERSION}/schema.json' - # TODO: we should ship the schema we reference to by default to avoid unnecessary network traffic. - # If the requested version is not already downloaded, go ahead and download it. - with urllib.request.urlopen(cff_schema_url) as cff_schema_response: - schema_data = json.loads(cff_schema_response.read()) + with open('cff-schema@1.2.0.json', 'r') as cff_schema_file: + schema_data = json.load(cff_schema_file) + + if not schema_data: + # TODO: we should ship the schema we reference to by default to avoid unnecessary network traffic. + # If the requested version is not already downloaded, go ahead and download it. + with urllib.request.urlopen(cff_schema_url) as cff_schema_response: + schema_data = json.loads(cff_schema_response.read()) + + audit_log = logging.getLogger('audit.cff') validator = jsonschema.Draft7Validator(schema_data) errors = sorted(validator.iter_errors(cff_dict), key=lambda e: e.path) if len(errors) > 0: - click.echo(f'{cff_file} is not valid according to {cff_schema_url}!') + audit_log.warning('!! %s is not valid according to %s', cff_file, cff_schema_url) + for error in errors: - path_str = _build_nodepath_str(error.absolute_path) - click.echo(f' - Invalid input for path {path_str}.\n' - f' Value: {error.instance} -> {error.message}') - click.echo(f' See the Citation File Format schema guide for further details: ' - f'https://github.com/citation-file-format/citation-file-format/blob/{_CFF_VERSION}/schema' - f'-guide.md.') + path = ContextPath(error.absolute_path.popleft()) + for next in error.absolute_path: + path = path[next] + + audit_log.info('. Invalid input for %s.', str(path)) + audit_log.info(' %s', error.message) + audit_log.debug(' Value: %s', error.instance) + + audit_log.info('') + audit_log.info('# See the Citation File Format schema guide for further details:') + audit_log.info('# https://github.com/citation-file-format/citation-file-format/blob/{_CFF_VERSION}/schema-guide.md.') return False + elif len(errors) == 0: - click.echo(f'Found valid Citation File Format file at: {cff_file}') + audit_log.info('- Found valid Citation File Format file at: %s', cff_file) return True def _get_single_cff(path: pathlib.Path) -> t.Optional[pathlib.Path]: # Find CFF files in directories and subdirectories + cff_file = path / 'CITATION.cff' + if cff_file.exists(): + return cff_file + # TODO: Do we really want to search recursive? CFF convention is the file should be at the topmost dir, # which is given via the --path arg. Maybe add another option to enable pointing to a single file? # (So this stays "convention over configuration") - files = glob.glob(str(path / '**' / 'CITATION.cff'), recursive=True) + files = path.rglob('**/CITATION.cff') if len(files) == 1: return pathlib.Path(files[0]) # TODO: Shouldn't we log/echo the found CFF files so a user can debug/cleanup? diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index 0a2b79b8..cb04ee1c 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -1,38 +1,188 @@ -import glob +import datetime +import logging import os -import json import pathlib -import urllib.request import typing as t -import jsonschema import click import subprocess import shutil -from hermes.model.context import HermesHarvestContext -from hermes.model.errors import HermesValidationError +from hermes.model.context import HermesHarvestContext, ContextPath -# TODO: can and should we get this somehow? -SHELL_ENCODING = 'utf-8' +_log = logging.getLogger('harvest.git') -class ConributorData: - def __init__(self, d: t.List): - self.name = d[0] - self.email = set((d[1],)) - self.t_first= d[2] - self.t_last= self.t_first - def update(self, d: t.List): - assert(self.name == d[0]) +# TODO: can and should we get this somehow? +SHELL_ENCODING = 'utf-8' - self.email.add(d[1]) - t = d[2] - if t < self.t_first: - self.t_first = t - elif t > self.t_first: - self.t_last = t +_GIT_SEP = '|' +#_GIT_FORMAT = ['%an', '%ae', '%aI'] +_GIT_FORMAT = ['%aN', '%aE', '%aI'] +#_GIT_ARGS = ['--reverse'] +_GIT_ARGS = [] + +class ContributorData: + def __init__(self, name: str | t.List[str], email: str | t.List[str], ts: str | t.List[str]): + self.name = [] + self.email = [] + self.ts = [] + + self.update(name=name, email=email, ts=ts) + + def __str__(self): + parts = [] + if self.name: parts.append(self.name[0]) + if self.email: parts.append(f'<{self.email[0]}>') + return f'"{" ".join(parts)}"' + + def _update_attr(self, target, value, unique=True): + match value: + case list(): + target.extend([v for v in value if not unique or v not in target]) + case str() if not unique or value not in target: + target.append(value) + + def update(self, name=None, email=None, ts=None): + self._update_attr(self.name, name) + self._update_attr(self.email, email) + self._update_attr(self.ts, ts, unique=False) + + def merge(self, other: 'ContributorData'): + self.name += [n for n in other.name if n not in self.name] + self.email += [e for e in other.email if e not in self.email] + self.ts += other.ts + + def to_codemeta(self): + res = { + '@type': ['Person', 'hermes:contributor'], + } + + if self.name: + res['name'] = self.name.pop() + if self.name: + res['alternateName'] = list(self.name) + + if self.email: + res['email'] = self.email.pop(0) + if self.email: + res['contactPoint'] = [{'@type': 'ContactPoint', 'email': email} for email in self.email] + + if self.ts: + ts_start, *_, ts_end = sorted(self.ts + [self.ts[0]]) + res['startTime'] = ts_start + res['endTime'] = ts_end + + return res + + @classmethod + def from_codemeta(cls, data): + name = [data['name']] + data.get('alternateName', []) + email = [data['email']] + [contact['email'] for contact in data.get('contactPoint', [])] + ts = [data['startTime'], data['endTime']] + return cls(name, email, ts) + + +class NodeRegister: + def __init__(self, cls, *order, **mapping): + self.cls = cls + self.order = order + self.mapping = mapping + self._all = [] + self._node_by = {key: {} for key in self.order} + + def add(self, node): + self._all.append(node) + + for key in self.order: + mapping = self.mapping.get(key, lambda x: x) + attr = getattr(node, key, None) + match attr: + case None: + continue + case list(): + for value in attr: + self._node_by[key][mapping(value)] = node + + def update(self, **kwargs): + missing = [] + tail = list(self.order) + while tail: + key, *tail = tail + if key not in kwargs: + continue + + arg = kwargs[key] + node = self._node_by[key].get(arg, None) + if node is None: + missing.append((key, arg)) + continue + + node.update(**kwargs) + break + else: + node = self.cls(**kwargs) + self._all.append(node) + + for key in tail: + if key not in kwargs: + continue + + arg = kwargs[key] + alt_node = self._node_by[key].get(arg, None) + if alt_node is None: + missing.append((key, arg)) + + elif alt_node != node: + node.merge(alt_node) + self._all.remove(alt_node) + self._node_by[key][arg] = node + + for key, arg in missing: + self._node_by[key][arg] = node + + +def _audit_authors(authors, audit_log: logging.Logger): + unmapped_authors = [] + for author in authors._all: + if len(author.email) > 1 or len(author.name) > 1: + unmapped_authors.append(author) + + if unmapped_authors: + audit_log.warning("You have unmapped authors in your Git history.") + for author in unmapped_authors: + if len(author.email) > 1: + audit_log.info(f"- %s has alternate email: %s", str(author), ', '.join(author.email[1:])) + if len(author.name) > 1: + audit_log.info(f"- %s has alternate names: %s", str(author), ', '.join(author.name[1:])) + + hint_log = audit_log.parent.getChild('hints') + hint_log.debug("# Write a '.maillog' to resolve Git ambiguities.") + hint_log.info("cat > .maillog << EOF") + + unmapped_email = [a for a in unmapped_authors if a.email[1:]] + if unmapped_email: + hint_log.debug('# Mapping of email addresses only. Format (one pair per line):') + hint_log.debug('# ') + + for author in unmapped_email: + for email in author.email[1:]: + hint_log.info("<%s> <%s>", str(author.email[0]), str(email)) + hint_log.debug('') + + unmapped_name = [a for a in unmapped_authors if a.name[1:]] + if unmapped_name: + hint_log.debug('# Mapping of user names. Format (one pair per line):') + hint_log.debug('# Real Name nickname') + hint_log.debug('# Real Name Name, Real') + + for author in [a for a in unmapped_authors if a.name[1:]]: + for name in author.name[1:]: + hint_log.info('%s <%s> %s', str(author.name[0]), str(author.email[0]), str(name)) + + hint_log.info("EOF") + hint_log.info('') def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): @@ -42,55 +192,55 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): :param click_ctx: Click context that this command was run inside (might be used to extract command line arguments). :param ctx: The harvesting context that should contain the provided metadata. """ + _log = logging.getLogger('cli.harvest.git') + # Get the parent context (every subcommand has its own context with the main click context as parent) parent_ctx = click_ctx.parent if parent_ctx is None: raise RuntimeError('No parent context!') - path = parent_ctx.params['path'] + + _log.debug(". Get history of currently checked-out branch") + + authors = NodeRegister(ContributorData, 'email', 'name', email=str.upper) +# for author_data in ctx.get_data().get('author', []): +# authors.add(ContributorData.from_codemeta(author_data)) git_exe = shutil.which('git') if not git_exe: raise RuntimeError('Git not available!') + path = parent_ctx.params['path'] + old_path = pathlib.Path.cwd() + if path != old_path: + os.chdir(path) + p = subprocess.run([git_exe, "rev-parse", "--abbrev-ref", "HEAD"], capture_output=True) if p.returncode: raise RuntimeError("`git branch` command failed with code {}: '{}'!".format(p.returncode, p.stderr.decode(SHELL_ENCODING))) git_branch = p.stdout.decode(SHELL_ENCODING).strip() # TODO: should we warn or error if the HEAD is detached? - # Get history of currently checked-out branch - authors = {} - committers = {} - p = subprocess.run([git_exe, "log", "--pretty=%an_%ae_%at_%cn_%ce_%ct"], capture_output=True) + p = subprocess.run([git_exe, "log", f"--pretty={_GIT_SEP.join(_GIT_FORMAT)}"] + _GIT_ARGS, capture_output=True) if p.returncode: raise RuntimeError("`git log` command failed with code {}: '{}'!".format(p.returncode, p.stderr.decode(SHELL_ENCODING))) log = p.stdout.decode(SHELL_ENCODING).split('\n') for l in log: - d = l.split('_') - if len(d) != 6: - continue try: - d[2] = int(d[2]) + name, email, ts = l.split(_GIT_SEP) except ValueError: continue - _updateContributor(authors, d[0:3]) - _updateContributor(committers, d[3:7]) - - _ctx_update_contributors(ctx, authors, "author", branch=git_branch) - _ctx_update_contributors(ctx, committers, "committer", branch=git_branch) - -def _update_contributor(contributors: t.Dict, d: t.List): - if d[0] in contributors: - contributors[d[0]].update(d[0:3]) - else: - contributors[d[0]] = ConributorData(d[0:3]) - - -def _ctx_update_contributors(ctx: HermesHarvestContext, contributors: t.Dict, kind: str, **kwargs): - for a in contributors.values(): - ctx.update(f"{kind}.since", a.t_first, name=a.name, **kwargs) - ctx.update(f"{kind}.until", a.t_last, name=a.name, **kwargs) - for e in a.email: - ctx.update(f"{kind}.email", e, name=a.name, email=e, **kwargs) + authors.update(email=email, name=name, ts=ts) + + _audit_authors(authors, logging.getLogger('audit.git')) + + ctx.update_from({ + '@context': [ + "https://doi.org/10.5063/schema/codemeta-2.0", + {'hermes': 'https://software-metadata.pub/ns/hermes/'} + ], + + '@type': "SoftwareSourceCode", + 'author': [author.to_codemeta() for author in authors._all], + }, branch=git_branch) diff --git a/src/hermes/commands/process/cff.py b/src/hermes/commands/process/cff.py new file mode 100644 index 00000000..d1cfa0f0 --- /dev/null +++ b/src/hermes/commands/process/cff.py @@ -0,0 +1,10 @@ +from hermes.model.context import HermesHarvestContext, ContextPath, CodeMetaContext + + +def add_name(ctx: CodeMetaContext, harvest_ctx: HermesHarvestContext): + data = harvest_ctx.get_data() + author_path = ContextPath('author') + + for i, author in enumerate(data.get('author', [])): + if 'name' not in author: + harvest_ctx.update(str(author_path[i]["name"]), f"{author['givenName']} {author['familyName']}", stage='preprocess') diff --git a/src/hermes/commands/process/git.py b/src/hermes/commands/process/git.py new file mode 100644 index 00000000..aa2f314b --- /dev/null +++ b/src/hermes/commands/process/git.py @@ -0,0 +1,29 @@ +from hermes.model.context import CodeMetaContext, HermesHarvestContext, ContextPath + + +def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): + data = harverst_ctx.get_data(tags=(tags := {})) + + contributors = [] + author_path = ContextPath('author') + + for i, contributor in enumerate(data.get('author', [])): + author_key = ctx.find_key(author_path, contributor) + contributor_key = author_path[i] + + contributor_tags = {} + for k, t in tags.items(): + if ContextPath.parse(k) in contributor_key: + subkey = k.lstrip(str(contributor_key) + '.') + contributor_tags[subkey] = t + + if not author_key: + contributor['projectRole'] = 'Others' + contributors.append((contributor, contributor_tags)) + else: + ctx.update(author_key, contributor, tags=contributor_tags) + + harverst_ctx.finish() + + for author, author_tags in contributors: + ctx.update(author_path['*'], author, tags=author_tags) diff --git a/src/hermes/commands/process/merge.py b/src/hermes/commands/process/merge.py new file mode 100644 index 00000000..699f79bf --- /dev/null +++ b/src/hermes/commands/process/merge.py @@ -0,0 +1,101 @@ +import logging +import typing as t + +from hermes.model.errors import MergeError +from hermes.model.path import ContextPath + + +class MergeBase: + default_keys = [] + default_cast = {} + + def __init__(self, keys=None): + self._keys = keys or self.default_keys + self._log = logging.getLogger('hermes.merge') + + def __call__(self, path, target, value): + updates = self.collect_updates(path, target, value) + updates = self.check_updates(updates) + + self._log.debug(". %s changes to %s", len(updates), str(path)) + return updates + + def collect_updates(self, path, target, value): + updates = [] + return updates + + def check_updates(self, updates): + checked_updates = [] + + for key, new_value, old_value in updates: + if key is None: + checked_updates.append((None, new_value, old_value)) + continue + + key = ContextPath.parse(key) + cast = self.default_cast.get(('key', key.item), lambda x: x) + if new_value is None or cast(old_value) == cast(new_value): + continue + elif old_value is None: + checked_updates.append((key, new_value, old_value)) + + return checked_updates + + def is_equal(self, left, right): + if left.parent == right.parent and left.item == right.item: + return True + + +class ObjectMerge(MergeBase): + default_keys = ['@id'] + default_cast = { + ('key', 'email'): str.upper, + } + + def __call__(self, path, target, value): + updates = super().__call__(path, target, value) + merged_keys = [] + + for subkey, new_value, old_value in updates: + if subkey.item not in target: + subkey.insert(target, new_value) + else: + subkey.update(target[subkey.item], new_value) + + return merged_keys + + def _active_keys(self, path, target, value): + match value: + case dict(): + value = [(path[_key], _value) for _key, _value in value.items()] + case list(): + value = [(path[i], _value) for i, _value in enumerate(value)] + case _: + value = [] + + return [ + (_key, _value, target if _key is None else target[_key]) + for _key, _value in value + if _key is None or _key in target + ] + + def collect_updates(self, path, target, value): + _target, _prefix, _trace = path.resolve(target) + updates = super().collect_updates(path, target, value) + updates += self._active_keys(value, _target, value) + return updates + + +class CollectionMerge(MergeBase): + default_keys = ['@id'] + + def __call__(self, path, target, value): + updates = super().__call__(path, target, value) + + for _key, _old, _new in updates: + self.default_cast[_key.item]() + if _old is not None and _old != _new: + raise MergeError(path, _old, _new) + + elif _new is not None and _old != _new: + pass diff --git a/src/hermes/commands/workflow.py b/src/hermes/commands/workflow.py index 89dfc5aa..eb309a4a 100644 --- a/src/hermes/commands/workflow.py +++ b/src/hermes/commands/workflow.py @@ -1,8 +1,23 @@ +import json +import logging from importlib import metadata import click -from hermes.model.context import HermesContext, HermesHarvestContext +from hermes import cli + +from hermes.model.context import HermesContext, HermesHarvestContext, CodeMetaContext +from hermes.model.errors import MergeError +from hermes.model.path import MergeRunner + +from hermes.commands.process.merge import CollectionMerge, ObjectMerge + + +MergeRunner.register('person', ObjectMerge(['@id', 'email', 'name']), type='Person') +MergeRunner.register('list', CollectionMerge(), type='list') +MergeRunner.register('map', ObjectMerge(), type='map') + + @click.group(invoke_without_command=True) @@ -11,7 +26,8 @@ def harvest(click_ctx: click.Context): """ Automatic harvest of metadata """ - click.echo("Metadata harvesting") + _log = logging.getLogger('cli.harvest') + cli.log_header("=== Metadata harvesting", None) # Create Hermes context (i.e., all collected metadata for all stages...) ctx = HermesContext() @@ -19,17 +35,58 @@ def harvest(click_ctx: click.Context): # Get all harvesters harvesters = metadata.entry_points(group='hermes.harvest') for harvester in harvesters: + _log.info("- Running harvester %s", harvester.name) + + _log.debug(". Loading harvester from %s", harvester.value) + harvest = harvester.load() + with HermesHarvestContext(ctx, harvester) as harvest_ctx: - harvest = harvester.load() harvest(click_ctx, harvest_ctx) + for _key, ((_value, _tag), *_trace) in harvest_ctx._data.items(): + if any(v != _value and t == _tag for v, t in _trace): + raise MergeError(_key, None, _value) + + _log.info('') + @click.group(invoke_without_command=True) def process(): """ Process metadata and prepare it for deposition """ - click.echo("Metadata processing") + _log = logging.getLogger('cli.process') + + cli.log_header("=== Metadata processing", None) + + ctx = CodeMetaContext() + + harvesters = metadata.entry_points(group='hermes.harvest') + for harvester in harvesters: + _log.info('- Merge data harvested by "%s"', harvester.name) + + harvest_context = HermesHarvestContext(ctx, harvester) + harvest_context.load_cache() + + processors = metadata.entry_points(group='hermes.preprocess', name=harvester.name) + for processor in processors: + _log.debug(". Loading context processor %s", processor.value) + process = processor.load() + + _log.debug(". Apply processor %s", processor.value) + process(ctx, harvest_context) + + ctx.merge_from(harvest_context) + _log.info('') + + tags_path = ctx.get_cache('process', 'tags', create=True) + with tags_path.open('w') as tags_file: + json.dump(ctx.tags, tags_file, indent=' ') + + with open('codemeta.json', 'w') as codemeta_file: + json.dump(ctx._data, codemeta_file, indent=' ') + + ctx.annotate() @click.group(invoke_without_command=True) diff --git a/src/hermes/config.py b/src/hermes/config.py new file mode 100644 index 00000000..c862499c --- /dev/null +++ b/src/hermes/config.py @@ -0,0 +1,49 @@ +import logging +import toml + + +_config = {} + + +def configure(): + if _config: + return + + # Load configuration if not present + with open('pyproject.toml', 'r') as config_file: + config_toml = toml.load(config_file) + hermes_config = config_toml['tool']['hermes'] + _config['hermes'] = hermes_config + _config['logging'] = hermes_config['logging'] + + +def get(name): + if name not in _config: + _config['hermes'][name] = {} + _config[name] = _config['hermes'][name] + + return _config.get(name) + + +_loggers = {} + + +def init_logging(): + if _loggers: + return + + # Inintialize logging system + import logging.config + + configure() + + logging.config.dictConfig(_config['logging']) + for log_name in _config['logging']['loggers']: + _loggers[log_name] = logging.getLogger(log_name) + + +def getLogger(log_name): + init_logging() + if log_name not in _loggers: + _loggers[log_name] = logging.getLogger(log_name) + return _loggers.get(log_name) diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index 1805288d..3375aa09 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -1,3 +1,6 @@ +import datetime +import pathlib +import re import traceback import json import logging @@ -6,6 +9,7 @@ from pathlib import Path from importlib.metadata import EntryPoint +from hermes.model.path import ContextPath from hermes.model.errors import HermesValidationError @@ -57,7 +61,7 @@ def get_cache(self, *path: str, create: bool = False) -> Path: cache_dir = self.hermes_dir.joinpath(*subdir) if create: cache_dir.mkdir(parents=True, exist_ok=True) - data_file = cache_dir / name + data_file = cache_dir / (name + '.json') self._caches[path] = data_file return data_file @@ -128,7 +132,7 @@ def store_cache(self): data_file = self.get_cache('harvest', self._ep.name, create=True) self._log.debug("Writing cache to %s...", data_file) - json.dump(self._data, data_file.open('w')) + json.dump(self._data, data_file.open('w'), indent=' ') def __enter__(self): self.load_cache() @@ -141,6 +145,17 @@ def __exit__(self, exc_type, exc_val, exc_tb): self._base.error(self._ep, exc) return True + def get_data(self, data=None, tags=None): + data = data or {} + for key, ((value, tag), *tail) in self._data.items(): + key = ContextPath.parse(key) + if tags is not None: + tags[str(key)] = tag + + key.update(data, value) + + return data + def update(self, _key: str, _value: t.Any, **kwargs: t.Any): """ The updates are added to a list of values. @@ -163,28 +178,44 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): See :py:meth:`HermesContext.update` for more information. """ + base_key = ContextPath.parse(_key) + + ts = kwargs.pop('ts', datetime.datetime.now().isoformat()) + ep = kwargs.pop('ep', self._ep.name) + if _key not in self._data: self._data[_key] = [] for entry in self._data[_key]: - if entry[1] == kwargs: - self._log.debug("Update %s: %s -> %s (%s)", _key, entry[0], _value, entry[1]) + value, tag = entry + tag_ts = tag.pop('ts') + + if tag == kwargs: + self._log.debug("Update %s: %s -> %s (%s)", _key, str(value), _value, str(tag)) entry[0] = _value + tag['ts'] = ts + tag['ep'] = ep break + + tag['ts'] = tag_ts + tag['ep'] = ep + else: + kwargs['ts'] = ts + kwargs['ep'] = ep self._data[_key].append([_value, kwargs]) - def _update_key_from(self, _key: str, _value: t.Any, **kwargs): + def _update_key_from(self, _key: ContextPath, _value: t.Any, **kwargs): if isinstance(_value, dict): for key, value in _value.items(): - self._update_key_from(f'{_key}.{key}', value, **kwargs) + self._update_key_from(_key[key], value, **kwargs) elif isinstance(_value, (list, tuple)): for index, value in enumerate(_value): - self._update_key_from(f'{_key}[{index}]', value, **kwargs) + self._update_key_from(_key[index], value, **kwargs) else: - self.update(_key, _value, **kwargs) + self.update(str(_key), _value, **kwargs) def update_from(self, data: t.Dict[str, t.Any], **kwargs: t.Any): """ @@ -210,7 +241,7 @@ def update_from(self, data: t.Dict[str, t.Any], **kwargs: t.Any): """ for key, value in data.items(): - self._update_key_from(key, value, **kwargs) + self._update_key_from(ContextPath(key), value, **kwargs) def error(self, ep: EntryPoint, error: Exception): """ @@ -219,3 +250,124 @@ def error(self, ep: EntryPoint, error: Exception): ep = ep or self._ep self._base.error(ep, error) + + def finish(self): + """ + Calling this method will lead to further processors not handling the context anymore. + """ + self._data.clear() + + +class CodeMetaContext(HermesContext): + _PRIMARY_ATTR = { + 'author': ('@id', 'email', 'name'), + } + + def __init__(self, project_dir: pathlib.Path | None = None): + super().__init__(project_dir) + self.tags = {} + + def merge_from(self, other: HermesHarvestContext): + other.get_data(self._data, tags=self.tags) + + def update(self, _key: ContextPath, _value: t.Any, tags: t.Dict[str, t.Dict] | None = None): + if _key.item == '*': + _item_path = self.find_key(_key.parent, _value) + if _item_path: + target, context = _item_path.get_from(self._data, None) + _item_path.merge_from(_value) + _key.item = _item_path.item + else: + target, context = _key.parent.get_from(self._data, None) + _key.item = len(target) + + if tags: + values = {} + + for subkey in tags.keys(): + tag_key = ContextPath.parse(str(_key) + '.' + subkey) + try: + tag_value, context = tag_key.get_from(self._data, None) + values[subkey] = tag_value + except KeyError: + pass + + _key.update_in(self._data, _value) + + if tags: + for subkey, tag in tags.items(): + tag_key = ContextPath.parse(f'{str(_key)}.{subkey}') + tag_value, context = tag_key.get_from(self._data, context) + if values.get(subkey) != tag_value: + self.tags[str(tag_key)] = tag + + def annotate(self): + + def _annotate_list(path, data, indent): + tag = self.tags.get(str(path)) + if tag: + _tag = {k: v for k, v in tag.items() if k not in ('ep', 'ts')} + print(indent + f'# {str(path)} harvested by {tag["ep"]} at {tag["ts"]} from {_tag}') + + print(indent + '[') + for i, item in enumerate(data): + item_path = path[i] + + match item: + case list() as list_data: + _annotate_list(item_path, list_data, indent + ' ') + + case dict() as dict_data: + _annotate_dict(item_path, dict_data, indent + ' ') + + case _ as data: + tag = self.tags.get(str(item_path)) + if tag: + _tag = {k: v for k, v in tag.items() if k not in ('ep', 'ts')} + print(indent + f'# {str(item_path)} harvested by {tag["ep"]} at {tag["ts"]} from {_tag}') + print(indent + ' ' + f'{str(data)}') + + print(indent + ']') + + def _annotate_dict(path, data, indent): + tag = self.tags.get(str(path)) + if tag: + _tag = {k: v for k, v in tag.items() if k not in ('ep', 'ts')} + print(indent + f'# {str(path)} harvested by {tag["ep"]} at {tag["ts"]} from {_tag}') + + print(indent + '{') + for k, v in data.items(): + if path is None: + item_path = ContextPath(k) + else: + item_path = path[k] + + match v: + case list(): + print(indent + ' ' + str(k) + ':') + _annotate_list(item_path, v, indent + ' ') + + case dict(): + print(indent + ' ' + str(k) + ':') + _annotate_dict(item_path, v, indent + ' ') + + case _: + tag = self.tags.get(str(item_path)) + if tag: + _tag = {k: v for k, v in tag.items() if k not in ('ep', 'ts')} + print(indent + f'# {str(item_path)} havested by {tag["ep"]} at {tag["ts"]} from {_tag}') + + print(indent + ' ' + str(k) + ': ' + str(v)) + + print(indent + '}') + + _annotate_dict(None, self._data, '') + + def find_key(self, item, other): + data, context = item.get_from(self._data, None) + + for i, node in enumerate(data): + match = [(k, node[k]) for k in self._PRIMARY_ATTR.get(str(item), ('@id',)) if k in node] + if any(other.get(k, None) == v for k, v in match): + return item[i] + return None diff --git a/src/hermes/model/errors.py b/src/hermes/model/errors.py index 5c9b3f8b..51d54665 100644 --- a/src/hermes/model/errors.py +++ b/src/hermes/model/errors.py @@ -13,3 +13,12 @@ class HermesValidationError(Exception): """ pass + + +class MergeError(Exception): + def __init__(self, path, old_Value, new_value, **kwargs): + self.path = path + self.old_value = old_Value + self.new_value = new_value + self.tag = kwargs + super().__init__(f'Error merging {self.path} (ambiguous values "{self.old_value}" and "{self.new_value}")') diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py new file mode 100644 index 00000000..0c14e52f --- /dev/null +++ b/src/hermes/model/path.py @@ -0,0 +1,300 @@ +import typing as t + +from hermes import config +from hermes.model.errors import MergeError + + +_log = config.getLogger('hermes.model') + + +class MergeRunner: + _registry = {} + + def __init__(self, strategies): + self._strategies = strategies + _log.debug(". Loaded %d strategies", len(self._strategies)) + + def __call__(self, path, target, value, **kwargs): + merged_keys = [] + + for merge in self._strategies: + try: + print(path, target, value) + _log.info(". Trying merge using %s", merge) + result = merge(path, target, value, **kwargs) + + except MergeError as e: + _log.warning("! %s failed:", merge) + _log.info("> %s", e) + continue + + else: + merged_keys.extend(result) + break + + else: + return False + + return merged_keys + + def compare(self, path, other): + if other is not None and path.item == other.item and path.parent == other.parent: + return True + return False + + @classmethod + def _filter_matches(cls, filter, kwargs): + for key, value in filter.items(): + print(key, value, kwargs) + if key not in kwargs or kwargs[key] in value: + return True + + return False + + @classmethod + def register(cls, name, merge, **kwargs): + cls._registry[name or str(merge)] = (kwargs, merge) + + @classmethod + def query(cls, **kwargs): + strategies = [] + for filter, strategy in cls._registry.values(): + if cls._filter_matches(filter, kwargs): + strategies.append(strategy) + + return cls(strategies) + + +class ContextPath: + def __init__(self, item: str | int, parent: t.Optional['ContextPath'] = None): + self._item = item + self._parent = parent + self._type = None + + @property + def parent(self) -> t.Optional['ContextPath']: + return self._parent + + @property + def item(self) -> t.Optional[str | int]: + return self._item + + @property + def is_container(self): + return self._type in (list, dict) + + def __getitem__(self, item: str | int) -> 'ContextPath': + match item: + case str(): self._type = dict + case int(): self._type = list + + return ContextPath(item, self) + + def __str__(self) -> str: + item = str(self._item) + + if self._parent is not None: + parent = str(self._parent) + + match self._item: + case '*' | int(): item = parent + f'[{item}]' + case str(): item = parent + '.' + item + case _: raise ValueError(self.item) + + return item + + def __repr__(self) -> str: + return f'ContextPath.parse("{str(self)}")' + + def __eq__(self, other: 'ContextPath') -> bool: + if (other is None) or (self.parent != other.parent) \ + or (self.item != '*' and other.item != '*' and self.item != other.item): + return False + + return True + + def __contains__(self, other: 'ContextPath') -> bool: + while other is not None: + if other == self: + return True + other = other.parent + return False + + def _get_trace(self): + if self.parent: + return self.parent._get_trace() + [self._item] + else: + return [self._item] + + def new(self): + return self._type() + + def _select_from(self, _target, _head, *_trace): + _prefix = self[_head] + + match _target, _head: + case list(), int() if len(_target) > _head: + if _trace: + _target, _prefix, _trace = _prefix._select_from(_target[_head], *_trace) + else: + _target = _target[_head] + + case dict(), str() if _head in _target: + if _trace: + _target, _prefix, _trace = _prefix._select_from(_target[_head], *_trace) + else: + _target = _target[_head] + + case (list(), '*' | int()) | (dict(), str()): + pass + + case _, _: + raise KeyError(_target, _head) + + return _target, _prefix, _trace + + def _set_in_target(self, _target, value): + match _target: + case list(): + if self.item == '*' or self.item == len(_target): + self._item = len(_target) + _target.append(value) + elif self.item > len(_target): + raise IndexError() + else: + # TODO use update instead of replace... + _target[self._item] = value + + case dict(): + if self.item not in _target: + _target[self._item] = value + else: + # TODO use update instead of replace... + _target[self._item] = value + + case _: + raise TypeError() + + def resolve(self, target): + _head, *_trace = self._get_trace() + _prefix = ContextPath(_head) + _target = target + + if _head not in target: + tail = [_prefix] + for item in _trace: + tail.append(tail[-1][item]) + + return _prefix._select_from(target, _head, *_trace) + + def select(self, target: t.Dict | t.List) -> 'ContextPath': + head, *trace = self._get_trace() + if head in target: + _, _prefix, _ = ContextPath(head)._select_from(target[head], *trace) + else: + _prefix = None + return _prefix + + def update(self, target: t.Dict[str, t.Any] | t.List, value: t.Any, **kwargs: t.Any): + _head, *_trace = self._get_trace() + _target = target + _prefix = ContextPath(_head) + + if _head in target: + _target, _prefix, _trace = ContextPath(_head)._select_from(target[_head], *_trace) + + if _head not in _target: + _prefix.insert(_target, value, **kwargs) + + q = {'path': str(self)} + if _prefix._type is list: q['type'] = 'list' + if _prefix._type is dict: q['type'] = 'map' + print(_prefix, _prefix._type, q) + + merge_runner = MergeRunner.query(**q) + return merge_runner(self, _target, value, **kwargs) + + def insert(self, target, value, **kwargs): + keys_added = [] + _target, _prefix, _trace = self.resolve(target) + + while _prefix.is_container: + _prefix._set_in_target(_target, _prefix.new()) + _target = _target[_prefix.item] + + _prefix._set_in_target(_target, value) + + return keys_added + + @classmethod + def parse(cls, path: str) -> 'ContextPath': + full_path = None + for part in path.split('.'): + name, _, index = part.partition('[') + + if full_path is None: + full_path = ContextPath(name) + else: + full_path = full_path[name] + + if not index: continue + + for idx in index[:-1].split(']['): + try: + idx = int(idx) + except ValueError: + pass + finally: + full_path = full_path[idx] + + return full_path + + +class query_dict: + def __init__(self, data=None, **kwargs): + self.data = data or {} + self.data.update(**kwargs) + + def __contains__(self, item): + return all(self.data.get(k) == v for k, v in item.items()) + + def __repr__(self): + return repr(self.data) + + def __str__(self): + return str(self.data) + + +if __name__ == '__main__': + from hermes.commands.process.merge import ObjectMerge, CollectionMerge + + MergeRunner.register('default', ObjectMerge(['@id', 'email', 'name']), ) + + logging.basicConfig(level=logging.DEBUG, format="%(message)s") + class query_dict: + def __init__(self, data=None, **kwargs): + self.data = data or {} + self.data.update(**kwargs) + + def __contains__(self, item): + return all(self.data.get(k) == v for k, v in item.items()) + + def __repr__(self): + return repr(self.data) + + def __str__(self): + return str(self.data) + + data = { + 'author': [ + {'@type': ['Person', 'hermes:contributor'], 'name': 'Michael Meinel', 'email': 'michael.meinel@DLR.de'}, + {'@type': 'Person', 'name': 'Stephan Druskat'}, + ] + } + + + author = ContextPath('author') + author[0].update(data, {'givenName': 'Michael', 'familyName': 'Meinel', 'email': "Michael.Meinel@dlr.de"}, ep='git', stage='harvest') + author[1].update(data, {'email': 'spam@egg.com'}) + + print(data) From 6c63604d4acd8864a8c0468b9945ceb838a455a5 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 21 Sep 2022 09:43:01 +0200 Subject: [PATCH 12/52] Ignore more build output --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 3d2f844b..a75ba9fc 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ htmlcov docs/source/api docs/build/ +/dist/ .idea/ .venv/ From c9a98005fabc05d5a500c8dd790c6f24899096a3 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 21 Sep 2022 09:44:52 +0200 Subject: [PATCH 13/52] Extract merge strategies and switch to ContextPath style access --- src/hermes/commands/process/merge.py | 101 ------- src/hermes/model/context.py | 93 ++++--- src/hermes/model/merge.py | 173 ++++++++++++ src/hermes/model/path.py | 402 +++++++++++---------------- 4 files changed, 395 insertions(+), 374 deletions(-) delete mode 100644 src/hermes/commands/process/merge.py create mode 100644 src/hermes/model/merge.py diff --git a/src/hermes/commands/process/merge.py b/src/hermes/commands/process/merge.py deleted file mode 100644 index 699f79bf..00000000 --- a/src/hermes/commands/process/merge.py +++ /dev/null @@ -1,101 +0,0 @@ -import logging -import typing as t - -from hermes.model.errors import MergeError -from hermes.model.path import ContextPath - - -class MergeBase: - default_keys = [] - default_cast = {} - - def __init__(self, keys=None): - self._keys = keys or self.default_keys - self._log = logging.getLogger('hermes.merge') - - def __call__(self, path, target, value): - updates = self.collect_updates(path, target, value) - updates = self.check_updates(updates) - - self._log.debug(". %s changes to %s", len(updates), str(path)) - return updates - - def collect_updates(self, path, target, value): - updates = [] - return updates - - def check_updates(self, updates): - checked_updates = [] - - for key, new_value, old_value in updates: - if key is None: - checked_updates.append((None, new_value, old_value)) - continue - - key = ContextPath.parse(key) - cast = self.default_cast.get(('key', key.item), lambda x: x) - if new_value is None or cast(old_value) == cast(new_value): - continue - elif old_value is None: - checked_updates.append((key, new_value, old_value)) - - return checked_updates - - def is_equal(self, left, right): - if left.parent == right.parent and left.item == right.item: - return True - - -class ObjectMerge(MergeBase): - default_keys = ['@id'] - default_cast = { - ('key', 'email'): str.upper, - } - - def __call__(self, path, target, value): - updates = super().__call__(path, target, value) - merged_keys = [] - - for subkey, new_value, old_value in updates: - if subkey.item not in target: - subkey.insert(target, new_value) - else: - subkey.update(target[subkey.item], new_value) - - return merged_keys - - def _active_keys(self, path, target, value): - match value: - case dict(): - value = [(path[_key], _value) for _key, _value in value.items()] - case list(): - value = [(path[i], _value) for i, _value in enumerate(value)] - case _: - value = [] - - return [ - (_key, _value, target if _key is None else target[_key]) - for _key, _value in value - if _key is None or _key in target - ] - - def collect_updates(self, path, target, value): - _target, _prefix, _trace = path.resolve(target) - updates = super().collect_updates(path, target, value) - updates += self._active_keys(value, _target, value) - return updates - - -class CollectionMerge(MergeBase): - default_keys = ['@id'] - - def __call__(self, path, target, value): - updates = super().__call__(path, target, value) - - for _key, _old, _new in updates: - self.default_cast[_key.item]() - if _old is not None and _old != _new: - raise MergeError(path, _old, _new) - - elif _new is not None and _old != _new: - pass diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index 3375aa09..490d8e17 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -16,6 +16,8 @@ _log = logging.getLogger(__name__) +ContextPath.init_merge_strategies() + class HermesContext: """ The HermesContext stores the metadata for a certain project. @@ -41,6 +43,9 @@ def __init__(self, project_dir: t.Optional[Path] = None): self._data = {} self._errors = [] + def keys(self): + return [ContextPath.parse(k) for k in self._data.keys()] + def get_cache(self, *path: str, create: bool = False) -> Path: """ Retrieve a cache file for a given *path*. @@ -79,6 +84,16 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): pass + def get_data(self, data: t.Optional[dict] = None, path: t.Optional['ContextPath'] = None, tags: t.Optional[dict] = None) -> dict: + if data is None: + data = {} + if path is not None: + data.update(path.get_from(self._data)) + else: + for key in self.keys(): + data.update(key.get_from(self._data)) + return data + def error(self, ep: EntryPoint, error: Exception): """ Add an error that occurred during processing to the error log. @@ -145,17 +160,6 @@ def __exit__(self, exc_type, exc_val, exc_tb): self._base.error(self._ep, exc) return True - def get_data(self, data=None, tags=None): - data = data or {} - for key, ((value, tag), *tail) in self._data.items(): - key = ContextPath.parse(key) - if tags is not None: - tags[str(key)] = tag - - key.update(data, value) - - return data - def update(self, _key: str, _value: t.Any, **kwargs: t.Any): """ The updates are added to a list of values. @@ -251,6 +255,25 @@ def error(self, ep: EntryPoint, error: Exception): ep = ep or self._ep self._base.error(ep, error) + def _check_values(self, path, values): + (value, tag), *values = values + for alt_value, alt_tag in values: + if value != alt_value: + raise ValueError(f'{path}') + return value, tag + + def get_data(self, data: t.Optional[dict] = None, path: t.Optional['ContextPath'] = None, tags: t.Optional[dict] = None) -> dict: + if data is None: + data = {} + for key, values in self._data.items(): + key = ContextPath.parse(key) + if path is None or key in path: + value, tag = self._check_values(key, values) + key.update(data, value, tags, **tag) + if tags is not None and tag: + tags[str(key)] = tag + return data + def finish(self): """ Calling this method will lead to further processors not handling the context anymore. @@ -271,35 +294,25 @@ def merge_from(self, other: HermesHarvestContext): other.get_data(self._data, tags=self.tags) def update(self, _key: ContextPath, _value: t.Any, tags: t.Dict[str, t.Dict] | None = None): - if _key.item == '*': - _item_path = self.find_key(_key.parent, _value) - if _item_path: - target, context = _item_path.get_from(self._data, None) - _item_path.merge_from(_value) - _key.item = _item_path.item + if _key._item == '*': + _item_path, _item, _path = _key.resolve(self._data, query=_value, create=True) + if tags: + _tags = {k.lstrip(str(_key) + '.'): t for k, t in tags.items() if ContextPath.parse(k) in _key} else: - target, context = _key.parent.get_from(self._data, None) - _key.item = len(target) - - if tags: - values = {} - - for subkey in tags.keys(): - tag_key = ContextPath.parse(str(_key) + '.' + subkey) - try: - tag_value, context = tag_key.get_from(self._data, None) - values[subkey] = tag_value - except KeyError: - pass - - _key.update_in(self._data, _value) - - if tags: - for subkey, tag in tags.items(): - tag_key = ContextPath.parse(f'{str(_key)}.{subkey}') - tag_value, context = tag_key.get_from(self._data, context) - if values.get(subkey) != tag_value: - self.tags[str(tag_key)] = tag + _tags = {} + _path.update(_item, _value, _tags) + if tags is not None and _tags: + for k, v in _tags.items(): + if not v: + continue + + if _key: + tag_key = str(_key) + '.' + k + else: + tag_key = k + tags[tag_key] = v + else: + _key.update(self._data, _value, tags) def annotate(self): @@ -364,7 +377,7 @@ def _annotate_dict(path, data, indent): _annotate_dict(None, self._data, '') def find_key(self, item, other): - data, context = item.get_from(self._data, None) + data = item.get_from(self._data) for i, node in enumerate(data): match = [(k, node[k]) for k in self._PRIMARY_ATTR.get(str(item), ('@id',)) if k in node] diff --git a/src/hermes/model/merge.py b/src/hermes/model/merge.py new file mode 100644 index 00000000..4d8f9799 --- /dev/null +++ b/src/hermes/model/merge.py @@ -0,0 +1,173 @@ +from hermes.model.path import ContextPath + + +class MergeStrategies: + def __init__(self): + self._strategies = [] + + def select(self, **kwargs): + fitting_strategies = [ + strategy + for strategy in self._strategies + if strategy.can_handle(kwargs) + ] + if fitting_strategies: + return fitting_strategies[0] + else: + return None + + def register(self, strategy): + self._strategies.append(strategy) + + +class MergeStrategy: + @staticmethod + def _check_types(item, value): + match item: + case list(): return any(t in value for t in item) + case str(): return item in value + return False + + @staticmethod + def _check_path(item, value): + item = ContextPath.parse(item) + value = ContextPath.parse(value) + if item == value or item in value: + return True + return False + + checks = { + 'type': _check_types, + 'path': _check_path, + } + + def __init__(self, **filter): + self._filter = filter + + def _check(self, key, filter, value): + if key in filter: + check = self.checks.get(key, lambda item, vaue: item in value) + return check(filter[key], value) + return True + + def can_handle(self, filter: dict): + return all( + self._check(key, filter, value) + for key, value in self._filter.items() + ) + + def are_equal(self, left, right): + return left == right + + +class CollectionMergeStrategy(MergeStrategy): + def __init__(self, **filter): + super().__init__(**filter) + + def are_equal(self, left, right): + return all( + any(a == b for b in right) + for a in left + ) + + def __call__(self, target, path, value, **kwargs): + match target, path._item: + case list(), int() as index if index < len(target): + match target[index]: + case dict() as t: t.update(value) + case list() as l: l[:] = value + case _: target[index] = value + + case list(), '*': + path._item = len(target) + target.append(value) + + case list(), int() as index if index == len(target): + target.append(value) + + case list(), int() as index: + raise IndexError(f'Index {index} out of bounds to set in {path.parent}.') + case list(), _ as index: + raise TypeError(f'Invalid index type {type(index)} to set in {path.parent}.') + + case dict(), str() as key if key in target: + match target[key]: + case dict() as t: t.update(value) + case list() as l: l[:] = value + case _: target[key] = value + + case dict(), str() as key: + target[key] = value + + case dict(), _ as key: + raise TypeError(f'Invalid key type {type(key)} to set in {path.parent}.') + + case _, _: + raise TypeError(f'Cannot handle target type {type(target)} to set {path}.') + + return value + + +class ObjectMergeStrategy(MergeStrategy): + def __init__(self, *id_keys, **filter): + super().__init__(**filter) + self.id_keys = id_keys or ('@id', ) + + def are_equal(self, left, right): + if not self.id_keys: + return super().are_equal(left, right) + else: + return any(left[key] == right[key] for key in self.id_keys if key in left and key in right) + + def __call__(self, target, path, value, **kwargs): + match target, path._item: + case dict(), str() as key if key in target: + match target[key]: + case dict() as t: t.update(value) + case list() as l: l[:] = value + case _: target[key] = value + + case dict(), str() as key: + target[key] = value + + case dict(), _ as key: + raise TypeError(f'Invalid key type {type(key)} to set in {path.parent}.') + + case list(), int() as index if index < len(target): + match target[index]: + case dict() as t: t.update(value) + case list() as l: l[:] = value + case _: target[index] = value + + case list(), '*': + path._item = len(target) + target.append(value) + + case list(), int() as index if index == len(target): + target.append(value) + + case list(), int() as index: + raise IndexError(f'Index {index} out of bounds to set in {path.parent}.') + case list(), _ as index: + raise TypeError(f'Invalid index type {type(index)} to set in {path.parent}.') + + case _, _: + raise TypeError(f'Cannot handle target type {type(target)} to set {path}.') + + return value + + +default_merge_strategies = [ + ObjectMergeStrategy( + '@id', 'email', 'name', + path='author[*]', + ), + + CollectionMergeStrategy( + type=['list'], + ), + + ObjectMergeStrategy( + type=['map'], + ) +] diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py index 0c14e52f..cd6f7947 100644 --- a/src/hermes/model/path.py +++ b/src/hermes/model/path.py @@ -1,117 +1,76 @@ import typing as t -from hermes import config -from hermes.model.errors import MergeError - - -_log = config.getLogger('hermes.model') - - -class MergeRunner: - _registry = {} +import pyparsing as pp - def __init__(self, strategies): - self._strategies = strategies - _log.debug(". Loaded %d strategies", len(self._strategies)) - - def __call__(self, path, target, value, **kwargs): - merged_keys = [] - - for merge in self._strategies: - try: - print(path, target, value) - _log.info(". Trying merge using %s", merge) - result = merge(path, target, value, **kwargs) - - except MergeError as e: - _log.warning("! %s failed:", merge) - _log.info("> %s", e) - continue - - else: - merged_keys.extend(result) - break - - else: - return False - - return merged_keys - - def compare(self, path, other): - if other is not None and path.item == other.item and path.parent == other.parent: - return True - return False +from hermes import config - @classmethod - def _filter_matches(cls, filter, kwargs): - for key, value in filter.items(): - print(key, value, kwargs) - if key not in kwargs or kwargs[key] in value: - return True +_log = config.getLogger('hermes.model.path') - return False - @classmethod - def register(cls, name, merge, **kwargs): - cls._registry[name or str(merge)] = (kwargs, merge) +class ContextPathGrammar: + key = pp.Word('@' + pp.alphas) + index = pp.Word(pp.nums).set_parse_action(lambda tok: [int(tok[0])]) | pp.Char('*') + field = key + (pp.Suppress('[') + index + pp.Suppress(']'))[...] + path = field + (pp.Suppress('.') + field)[...] @classmethod - def query(cls, **kwargs): - strategies = [] - for filter, strategy in cls._registry.values(): - if cls._filter_matches(filter, kwargs): - strategies.append(strategy) - - return cls(strategies) + def parse(cls, text: str): + return cls.path.parse_string(text) class ContextPath: + merge_strategies = None + def __init__(self, item: str | int, parent: t.Optional['ContextPath'] = None): self._item = item self._parent = parent self._type = None + @classmethod + def init_merge_strategies(cls): + if cls.merge_strategies is None: + from hermes.model.merge import MergeStrategies, default_merge_strategies + + cls.merge_strategies = MergeStrategies() + for strategy in default_merge_strategies: + cls.merge_strategies.register(strategy) + @property def parent(self) -> t.Optional['ContextPath']: return self._parent @property - def item(self) -> t.Optional[str | int]: - return self._item - - @property - def is_container(self): - return self._type in (list, dict) + def path(self) -> t.List['ContextPath']: + if self._parent is None: + return [self] + else: + return self._parent.path + [self] def __getitem__(self, item: str | int) -> 'ContextPath': match item: case str(): self._type = dict case int(): self._type = list - return ContextPath(item, self) def __str__(self) -> str: item = str(self._item) - if self._parent is not None: parent = str(self._parent) - match self._item: case '*' | int(): item = parent + f'[{item}]' case str(): item = parent + '.' + item case _: raise ValueError(self.item) - return item def __repr__(self) -> str: return f'ContextPath.parse("{str(self)}")' def __eq__(self, other: 'ContextPath') -> bool: - if (other is None) or (self.parent != other.parent) \ - or (self.item != '*' and other.item != '*' and self.item != other.item): - return False - - return True + return ( + other is not None + and (self._item == other._item or self._item == '*' or other._item == '*') + and self._parent == other._parent + ) def __contains__(self, other: 'ContextPath') -> bool: while other is not None: @@ -120,181 +79,158 @@ def __contains__(self, other: 'ContextPath') -> bool: other = other.parent return False - def _get_trace(self): - if self.parent: - return self.parent._get_trace() + [self._item] - else: - return [self._item] - def new(self): - return self._type() - - def _select_from(self, _target, _head, *_trace): - _prefix = self[_head] - - match _target, _head: - case list(), int() if len(_target) > _head: - if _trace: - _target, _prefix, _trace = _prefix._select_from(_target[_head], *_trace) - else: - _target = _target[_head] - - case dict(), str() if _head in _target: - if _trace: - _target, _prefix, _trace = _prefix._select_from(_target[_head], *_trace) - else: - _target = _target[_head] - - case (list(), '*' | int()) | (dict(), str()): - pass + if self._type is not None: + return self._type() + raise TypeError() + + @staticmethod + def _get_item(target: dict | list, path: 'ContextPath') -> t.Optional['ContextPath']: + match target, path._item: + case list(), '*': + raise IndexError(f'Cannot resolve any(*) from {path}.') + case list(), int() as index if index < len(target): + return target[index] + case list(), int() as index: + raise IndexError(f'Index {index} out of bounds for {path.parent}.') + case list(), _ as index: + raise TypeError(f'Invalid index type {type(index)} to access {path.parent}.') + + case dict(), str() as key if key in target: + return target[key] + case dict(), str() as key: + raise KeyError(f'Key {key} not in {path.parent}.') + case dict(), _ as key: + raise TypeError(f'Invalid key type {type(key)} to access {path.parent}.') case _, _: - raise KeyError(_target, _head) - - return _target, _prefix, _trace - - def _set_in_target(self, _target, value): - match _target: - case list(): - if self.item == '*' or self.item == len(_target): - self._item = len(_target) - _target.append(value) - elif self.item > len(_target): - raise IndexError() - else: - # TODO use update instead of replace... - _target[self._item] = value + raise TypeError(f'Cannot handle target type {type(target)} for {path}.') - case dict(): - if self.item not in _target: - _target[self._item] = value - else: - # TODO use update instead of replace... - _target[self._item] = value - - case _: - raise TypeError() - - def resolve(self, target): - _head, *_trace = self._get_trace() - _prefix = ContextPath(_head) - _target = target - - if _head not in target: - tail = [_prefix] - for item in _trace: - tail.append(tail[-1][item]) + def _find_in_parent(self, target: dict, path: 'ContextPath') -> t.Any: + _item = path._item + _path = path.parent + while _path is not None: + try: + item = self._get_item(target, _path[_item]) + _log.debug("Using type %s from §%s.", item, _path) + return item - return _prefix._select_from(target, _head, *_trace) + except (KeyError, IndexError, TypeError) as e: + _log.debug("%s: %s", _path, e) + _path = _path.parent + continue - def select(self, target: t.Dict | t.List) -> 'ContextPath': - head, *trace = self._get_trace() - if head in target: - _, _prefix, _ = ContextPath(head)._select_from(target[head], *trace) + return None + + def _find_setter(self, target: dict | list, path: 'ContextPath', value: t.Any = None, **kwargs) -> t.Callable: + filter = { + 'name': path._item, + } + + if isinstance(path._item, str) or path._parent is not None: + filter['path'] = str(path) + + if type := self._find_in_parent(target, path['@type']): + filter['type'] = type + elif value is not None: + match value: + case list(): filter['type'] = 'list' + case dict(): filter['type'] = 'map' + elif path._type is list: + filter['type'] = 'list' + elif path._type is dict: + filter['type'] = 'map' + + if ep := kwargs.get('ep', None): + filter['ep'] = ep + + setter = self.merge_strategies.select(**filter) + if setter is None: + return self._set_item else: - _prefix = None - return _prefix - - def update(self, target: t.Dict[str, t.Any] | t.List, value: t.Any, **kwargs: t.Any): - _head, *_trace = self._get_trace() - _target = target - _prefix = ContextPath(_head) - - if _head in target: - _target, _prefix, _trace = ContextPath(_head)._select_from(target[_head], *_trace) + return setter + + def _set_item(self, target: dict | list, path: 'ContextPath', value: t.Any, **kwargs) -> t.Optional['ContextPath']: + match target, path._item: + case list(), int() as index if index < len(target): + match target[index]: + case dict() as t: t.update(value) + case list() as l: l[:] = value + case _: target[index] = value + + case dict(), str() as key if key in target: + match target[key]: + case dict() as t: t.update(value) + case list() as l: l[:] = value + case _: target[key] = value + + case dict(), str() as key: + target[key] = value + case list(), '*': + path._item = len(target) + target.append(value) + case list(), int() as index if index == len(target): + target.append(value) + + case dict(), _ as key: + raise TypeError(f'Invalid key type {type(key)} to set in {path.parent}.') + case list(), int() as index: + raise IndexError(f'Index {index} out of bounds to set in {path.parent}.') + case list(), _ as index: + raise TypeError(f'Invalid index type {type(index)} to set in {path.parent}.') - if _head not in _target: - _prefix.insert(_target, value, **kwargs) + case _, _: + raise TypeError(f'Cannot handle target type {type(target)} to set {path}.') - q = {'path': str(self)} - if _prefix._type is list: q['type'] = 'list' - if _prefix._type is dict: q['type'] = 'map' - print(_prefix, _prefix._type, q) + return value - merge_runner = MergeRunner.query(**q) - return merge_runner(self, _target, value, **kwargs) + def resolve(self, _target: list | dict, create: bool = False, query: t.Any = None) -> ('ContextPath', list | dict, 'ContextPath'): + head, *tail = self.path + target = _target + while head._type and tail: + try: + target = self._get_item(target, head) + except (IndexError, KeyError, TypeError): + if create and self.parent is not None: + new_head = head.new() + setter = self._find_setter(_target, head, new_head) + setter(target, head, new_head) + target = new_head + else: + break + head, *tail = tail + + if head._item == '*': + for i, item in enumerate(target): + if all(item[k] == v for k, v in query.items() if k in item): + head._item = i + break + else: + if create: + head._item = len(target) - def insert(self, target, value, **kwargs): - keys_added = [] - _target, _prefix, _trace = self.resolve(target) + if not hasattr(head, 'set_item'): + head.set_item = self._find_setter(_target, head) + tail_path = ContextPath(head._item) + for t in tail: + tail_path = tail_path[t._item] - while _prefix.is_container: - _prefix._set_in_target(_target, _prefix.new()) - _target = _target[_prefix.item] + return head, target, tail_path - _prefix._set_in_target(_target, value) + def get_from(self, target: dict | list) -> t.Any: + prefix, target, path = self.resolve(target) + return self._get_item(target, path) - return keys_added + def update(self, target: t.Dict[str, t.Any] | t.List, value: t.Any, tags: t.Optional[dict] = None, **kwargs): + prefix, target, tail = self.resolve(target, create=True) + prefix.set_item(target, tail, value, **kwargs) + if tags is not None and kwargs: + tags[str(self)] = kwargs @classmethod def parse(cls, path: str) -> 'ContextPath': - full_path = None - for part in path.split('.'): - name, _, index = part.partition('[') - - if full_path is None: - full_path = ContextPath(name) - else: - full_path = full_path[name] - - if not index: continue - - for idx in index[:-1].split(']['): - try: - idx = int(idx) - except ValueError: - pass - finally: - full_path = full_path[idx] - - return full_path - - -class query_dict: - def __init__(self, data=None, **kwargs): - self.data = data or {} - self.data.update(**kwargs) - - def __contains__(self, item): - return all(self.data.get(k) == v for k, v in item.items()) - - def __repr__(self): - return repr(self.data) - - def __str__(self): - return str(self.data) - - -if __name__ == '__main__': - from hermes.commands.process.merge import ObjectMerge, CollectionMerge - - MergeRunner.register('default', ObjectMerge(['@id', 'email', 'name']), ) - - logging.basicConfig(level=logging.DEBUG, format="%(message)s") - class query_dict: - def __init__(self, data=None, **kwargs): - self.data = data or {} - self.data.update(**kwargs) - - def __contains__(self, item): - return all(self.data.get(k) == v for k, v in item.items()) - - def __repr__(self): - return repr(self.data) - - def __str__(self): - return str(self.data) - - data = { - 'author': [ - {'@type': ['Person', 'hermes:contributor'], 'name': 'Michael Meinel', 'email': 'michael.meinel@DLR.de'}, - {'@type': 'Person', 'name': 'Stephan Druskat'}, - ] - } - - - author = ContextPath('author') - author[0].update(data, {'givenName': 'Michael', 'familyName': 'Meinel', 'email': "Michael.Meinel@dlr.de"}, ep='git', stage='harvest') - author[1].update(data, {'email': 'spam@egg.com'}) - - print(data) + head, *tail = ContextPathGrammar.parse(path) + path = cls(head) + for item in tail: + path = path[item] + return path From 4652e919937d050531bc9dd95140da28b7b0505d Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 21 Sep 2022 09:45:34 +0200 Subject: [PATCH 14/52] Adapt workflow to latest changes --- src/hermes/commands/process/git.py | 31 ++++++++++++------------------ src/hermes/commands/workflow.py | 10 ---------- 2 files changed, 12 insertions(+), 29 deletions(-) diff --git a/src/hermes/commands/process/git.py b/src/hermes/commands/process/git.py index aa2f314b..c04c8918 100644 --- a/src/hermes/commands/process/git.py +++ b/src/hermes/commands/process/git.py @@ -1,29 +1,22 @@ from hermes.model.context import CodeMetaContext, HermesHarvestContext, ContextPath -def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): - data = harverst_ctx.get_data(tags=(tags := {})) +_AUTHOR_KEYS = ('@id', 'email', 'name') - contributors = [] - author_path = ContextPath('author') - for i, contributor in enumerate(data.get('author', [])): - author_key = ctx.find_key(author_path, contributor) - contributor_key = author_path[i] +def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): + tags = {} + data = harverst_ctx.get_data(tags=tags) + author_path = ContextPath('author') - contributor_tags = {} - for k, t in tags.items(): - if ContextPath.parse(k) in contributor_key: - subkey = k.lstrip(str(contributor_key) + '.') - contributor_tags[subkey] = t + for i, contributor in enumerate(author_path.get_from(data)): + query = {k: contributor[k] for k in _AUTHOR_KEYS if k in contributor} + author_key, target, path = author_path['*'].resolve(ctx._data, query=query) - if not author_key: + if author_key._item == '*': contributor['projectRole'] = 'Others' - contributors.append((contributor, contributor_tags)) - else: - ctx.update(author_key, contributor, tags=contributor_tags) - harverst_ctx.finish() + ctx.update(author_key, contributor, tags=tags) - for author, author_tags in contributors: - ctx.update(author_path['*'], author, tags=author_tags) + ctx.tags.update(tags) + harverst_ctx.finish() diff --git a/src/hermes/commands/workflow.py b/src/hermes/commands/workflow.py index eb309a4a..1a2f87cb 100644 --- a/src/hermes/commands/workflow.py +++ b/src/hermes/commands/workflow.py @@ -8,16 +8,6 @@ from hermes.model.context import HermesContext, HermesHarvestContext, CodeMetaContext from hermes.model.errors import MergeError -from hermes.model.path import MergeRunner - -from hermes.commands.process.merge import CollectionMerge, ObjectMerge - - -MergeRunner.register('person', ObjectMerge(['@id', 'email', 'name']), type='Person') -MergeRunner.register('list', CollectionMerge(), type='list') -MergeRunner.register('map', ObjectMerge(), type='map') - - @click.group(invoke_without_command=True) From cac0dc824a73a7fa7dd6a822e73d96bf96fcc293 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 21 Sep 2022 15:50:11 +0200 Subject: [PATCH 15/52] Improve documentation and logging - The `audit` logger writes into a markdown file --- src/hermes/cli.py | 12 +++ src/hermes/commands/harvest/cff.py | 27 +++--- src/hermes/commands/harvest/git.py | 112 +++++++++++++++++++--- src/hermes/commands/process/cff.py | 15 +++ src/hermes/commands/process/git.py | 21 ++++- src/hermes/commands/workflow.py | 41 ++++++-- src/hermes/model/context.py | 76 +++------------ src/hermes/model/path.py | 145 ++++++++++++++++++++++++----- 8 files changed, 326 insertions(+), 123 deletions(-) diff --git a/src/hermes/cli.py b/src/hermes/cli.py index 50a194f8..e60b0395 100644 --- a/src/hermes/cli.py +++ b/src/hermes/cli.py @@ -1,6 +1,7 @@ """ This module provides the main entry point for the HERMES command line application. """ +import logging import typing as t import pathlib from importlib import metadata @@ -80,6 +81,17 @@ def invoke(self, ctx: click.Context) -> t.Any: init_logging() log_header(None) + audit_log = logging.getLogger('audit') + audit_log.info("# Running Hermes") + audit_log.info("Running Hermes command line in: %s", ctx.params['path'].absolute()) + audit_log.debug("") + audit_log.debug("Invoked `%s` with", ctx.invoked_subcommand or self.name) + audit_log.debug("") + for k, v in ctx.params.items(): + audit_log.debug("`--%s`", k) + audit_log.debug(": `%s`", v) + audit_log.debug("") + if ctx.protected_args: return super().invoke(ctx) diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index 84604b94..c260ceda 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -31,6 +31,10 @@ def harvest_cff(click_ctx: click.Context, ctx: HermesHarvestContext): :param ctx: The harvesting context that should contain the provided metadata. """ # Get the parent context (every subcommand has its own context with the main click context as parent) + audit_log = logging.getLogger('audit.cff') + audit_log.info('') + audit_log.info("## Citation File Format") + parent_ctx = click_ctx.parent if parent_ctx is None: raise RuntimeError('No parent context!') @@ -68,9 +72,11 @@ def _convert_cff_to_codemeta(cff_data: str) -> t.Any: def _validate(cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: + audit_log = logging.getLogger('audit.cff') + cff_schema_url = f'https://citation-file-format.github.io/{_CFF_VERSION}/schema.json' - with open('cff-schema@1.2.0.json', 'r') as cff_schema_file: + with (pathlib.Path(__file__).parent / f'cff-schema@{_CFF_VERSION}.json').open('r') as cff_schema_file: schema_data = json.load(cff_schema_file) if not schema_data: @@ -79,25 +85,20 @@ def _validate(cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: with urllib.request.urlopen(cff_schema_url) as cff_schema_response: schema_data = json.loads(cff_schema_response.read()) - audit_log = logging.getLogger('audit.cff') - validator = jsonschema.Draft7Validator(schema_data) errors = sorted(validator.iter_errors(cff_dict), key=lambda e: e.path) if len(errors) > 0: - audit_log.warning('!! %s is not valid according to %s', cff_file, cff_schema_url) + audit_log.warning('!!! warning "%s is not valid according to <%s>"', cff_file, cff_schema_url) for error in errors: - path = ContextPath(error.absolute_path.popleft()) - for next in error.absolute_path: - path = path[next] - - audit_log.info('. Invalid input for %s.', str(path)) - audit_log.info(' %s', error.message) - audit_log.debug(' Value: %s', error.instance) + path = ContextPath.make(error.absolute_path) + audit_log.info(' Invalid input for `%s`.', str(path)) + audit_log.info(' !!! message "%s"', error.message) + audit_log.debug(' !!! value "%s"', error.instance) audit_log.info('') - audit_log.info('# See the Citation File Format schema guide for further details:') - audit_log.info('# https://github.com/citation-file-format/citation-file-format/blob/{_CFF_VERSION}/schema-guide.md.') + audit_log.info('See the Citation File Format schema guide for further details:') + audit_log.info(f'.') return False elif len(errors) == 0: diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index cb04ee1c..2c750084 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -18,13 +18,27 @@ SHELL_ENCODING = 'utf-8' _GIT_SEP = '|' -#_GIT_FORMAT = ['%an', '%ae', '%aI'] _GIT_FORMAT = ['%aN', '%aE', '%aI'] -#_GIT_ARGS = ['--reverse'] _GIT_ARGS = [] + +# TODO The following code contains a lot of duplicate implementation that can be found in hermes.model +# (In fact, it was kind of the prototype for lots of stuff there.) +# Clean up and refactor to use hermes.model instead + class ContributorData: + """ + Stores contributor data information from Git history. + """ + def __init__(self, name: str | t.List[str], email: str | t.List[str], ts: str | t.List[str]): + """ + Initialize a new contributor dataset. + + :param name: Name as returned by the `git log` command (i.e., with `.mailmap` applied). + :param email: Email address as returned by the `git log` command (also with `.mailmap` applied). + :param ts: Timestamp when the respective commit was done. + """ self.name = [] self.email = [] self.ts = [] @@ -45,16 +59,35 @@ def _update_attr(self, target, value, unique=True): target.append(value) def update(self, name=None, email=None, ts=None): + """ + Update the current contributor with the given data. + + :param name: New name to assign (addtionally). + :param email: New email to assign (additionally). + :param ts: New timestamp to adapt time range. + """ self._update_attr(self.name, name) self._update_attr(self.email, email) self._update_attr(self.ts, ts, unique=False) def merge(self, other: 'ContributorData'): + """ + Merge another :ref:`ContributorData` instance into this one. + + All attributes will be merged yet kept unique if required. + + :param other: The other instance that should contribute to this. + """ self.name += [n for n in other.name if n not in self.name] self.email += [e for e in other.email if e not in self.email] self.ts += other.ts - def to_codemeta(self): + def to_codemeta(self) -> dict: + """ + Return the current dataset as CodeMeta. + + :return: The CodeMeta representation of this dataset. + """ res = { '@type': ['Person', 'hermes:contributor'], } @@ -77,7 +110,13 @@ def to_codemeta(self): return res @classmethod - def from_codemeta(cls, data): + def from_codemeta(cls, data) -> 'ContributorData': + """ + Initialize a new instance from CodeMeta representation. + + :param data: The CodeMeta dataset to initialize from. + :return: The newly created instance. + """ name = [data['name']] + data.get('alternateName', []) email = [data['email']] + [contact['email'] for contact in data.get('contactPoint', [])] ts = [data['startTime'], data['endTime']] @@ -85,14 +124,32 @@ def from_codemeta(cls, data): class NodeRegister: + """ + Helper class to unify Git commit authors / contributors. + + This class keeps track of all registered instances and merges two :ref:`ContributorData` instances if some + attributes match. + """ + def __init__(self, cls, *order, **mapping): + """ + Initalize a new register. + + :param cls: Type of objects to store. + :param order: The order of attributes to compare. + :param mapping: A mapping to convert attributes (will be applied for comparison). + """ self.cls = cls self.order = order self.mapping = mapping self._all = [] self._node_by = {key: {} for key in self.order} - def add(self, node): + def add(self, node: t.Any): + """ + Add (or merge) a new node to the register. + :param node: The node that should be added. + """ self._all.append(node) for key in self.order: @@ -106,6 +163,13 @@ def add(self, node): self._node_by[key][mapping(value)] = node def update(self, **kwargs): + """ + Add (or merge) a new item to the register with the given attribute values. + + :fixme: This is not a good implementation strategy at all. + + :param kwargs: The attribute values to be stored. + """ missing = [] tail = list(self.order) while tail: @@ -144,22 +208,29 @@ def update(self, **kwargs): def _audit_authors(authors, audit_log: logging.Logger): + # Collect all authors that have ambiguous data unmapped_authors = [] for author in authors._all: if len(author.email) > 1 or len(author.name) > 1: unmapped_authors.append(author) if unmapped_authors: - audit_log.warning("You have unmapped authors in your Git history.") + # Report to the audit about our findings + audit_log.warning('!!! warning "You have unmapped authors in your Git history."') for author in unmapped_authors: if len(author.email) > 1: - audit_log.info(f"- %s has alternate email: %s", str(author), ', '.join(author.email[1:])) + audit_log.info(f" - %s has alternate email: %s", str(author), ', '.join(author.email[1:])) if len(author.name) > 1: - audit_log.info(f"- %s has alternate names: %s", str(author), ', '.join(author.name[1:])) + audit_log.info(f" - %s has alternate names: %s", str(author), ', '.join(author.name[1:])) + audit_log.warning('') + + audit_log.info("Please consider adding a `.maillog` file to your repository to disambiguate these contributors.") + audit_log.info('') + audit_log.info('``` .mailmap') + # Provide some example configuration for the hint log hint_log = audit_log.parent.getChild('hints') - hint_log.debug("# Write a '.maillog' to resolve Git ambiguities.") - hint_log.info("cat > .maillog << EOF") + hint_log.debug("# '.maillog' to resolve git ambiguities.") unmapped_email = [a for a in unmapped_authors if a.email[1:]] if unmapped_email: @@ -181,9 +252,10 @@ def _audit_authors(authors, audit_log: logging.Logger): for name in author.name[1:]: hint_log.info('%s <%s> %s', str(author.name[0]), str(author.email[0]), str(name)) - hint_log.info("EOF") hint_log.info('') + audit_log.info('```') + def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): """ @@ -193,6 +265,9 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): :param ctx: The harvesting context that should contain the provided metadata. """ _log = logging.getLogger('cli.harvest.git') + audit_log = logging.getLogger('audit.cff') + audit_log.info('') + audit_log.info("## Git History") # Get the parent context (every subcommand has its own context with the main click context as parent) parent_ctx = click_ctx.parent @@ -202,8 +277,11 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): _log.debug(". Get history of currently checked-out branch") authors = NodeRegister(ContributorData, 'email', 'name', email=str.upper) -# for author_data in ctx.get_data().get('author', []): -# authors.add(ContributorData.from_codemeta(author_data)) + try: + for author_data in ctx.get_data().get('author', []): + authors.add(ContributorData.from_codemeta(author_data)) + except ValueError: + pass git_exe = shutil.which('git') if not git_exe: @@ -244,3 +322,11 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): '@type': "SoftwareSourceCode", 'author': [author.to_codemeta() for author in authors._all], }, branch=git_branch) + + try: + ctx.get_data() + except ValueError as e: + audit_log.error('!!! warning "Inconsistent data"') + audit_log.info(' The data collected from git is ambiguous.') + audit_log.info(' Consider deleting `%s` to avoid problems.', ctx.hermes_dir) + audit_log.error('') diff --git a/src/hermes/commands/process/cff.py b/src/hermes/commands/process/cff.py index d1cfa0f0..1b178c73 100644 --- a/src/hermes/commands/process/cff.py +++ b/src/hermes/commands/process/cff.py @@ -1,10 +1,25 @@ +import logging + from hermes.model.context import HermesHarvestContext, ContextPath, CodeMetaContext def add_name(ctx: CodeMetaContext, harvest_ctx: HermesHarvestContext): + """ + Augment each author with a `name` attribute (if not present). + + This will allow better matching against the git authors and can be removed in a post-process step. + + :param ctx: The resulting context that should contain the harmonized data. + :param harvest_ctx: The harvest context containing all raw harvested data. + """ + audit_log = logging.getLogger('audit.cff') + audit_log.info('') + audit_log.info('### Add author names') + data = harvest_ctx.get_data() author_path = ContextPath('author') for i, author in enumerate(data.get('author', [])): if 'name' not in author: harvest_ctx.update(str(author_path[i]["name"]), f"{author['givenName']} {author['familyName']}", stage='preprocess') + audit_log.debug(f"- {author['givenName']} {author['familyName']}") diff --git a/src/hermes/commands/process/git.py b/src/hermes/commands/process/git.py index c04c8918..b66ed899 100644 --- a/src/hermes/commands/process/git.py +++ b/src/hermes/commands/process/git.py @@ -1,3 +1,5 @@ +import logging + from hermes.model.context import CodeMetaContext, HermesHarvestContext, ContextPath @@ -5,9 +7,23 @@ def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): - tags = {} - data = harverst_ctx.get_data(tags=tags) + """ + Identify all authors that are not yet in the target context and flag them with role `Other`. + + :param ctx: The target context containting harmonized data. + :param harverst_ctx: Data as it was harvested. + """ + audit_log = logging.getLogger('audit.git') + audit_log.info('') + audit_log.info('### Flag new authors') + author_path = ContextPath('author') + tags = {} + try: + data = harverst_ctx.get_data(tags=tags) + except ValueError: + audit_log.info("- Inconsistent data, skipping.") + return for i, contributor in enumerate(author_path.get_from(data)): query = {k: contributor[k] for k in _AUTHOR_KEYS if k in contributor} @@ -15,6 +31,7 @@ def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): if author_key._item == '*': contributor['projectRole'] = 'Others' + audit_log.debug('- %s', contributor['name']) ctx.update(author_key, contributor, tags=tags) diff --git a/src/hermes/commands/workflow.py b/src/hermes/commands/workflow.py index 1a2f87cb..169aeddd 100644 --- a/src/hermes/commands/workflow.py +++ b/src/hermes/commands/workflow.py @@ -3,8 +3,7 @@ from importlib import metadata import click - -from hermes import cli +import markdown as markdown from hermes.model.context import HermesContext, HermesHarvestContext, CodeMetaContext from hermes.model.errors import MergeError @@ -17,7 +16,8 @@ def harvest(click_ctx: click.Context): Automatic harvest of metadata """ _log = logging.getLogger('cli.harvest') - cli.log_header("=== Metadata harvesting", None) + audit_log = logging.getLogger('audit') + audit_log.info("# Metadata harvesting") # Create Hermes context (i.e., all collected metadata for all stages...) ctx = HermesContext() @@ -32,12 +32,27 @@ def harvest(click_ctx: click.Context): with HermesHarvestContext(ctx, harvester) as harvest_ctx: harvest(click_ctx, harvest_ctx) - for _key, ((_value, _tag), *_trace) in harvest_ctx._data.items(): if any(v != _value and t == _tag for v, t in _trace): raise MergeError(_key, None, _value) - _log.info('') + audit_log.info('') + + +_HTML_PREFIX = """ + + + Hermes Report + + +""" @click.group(invoke_without_command=True) @@ -47,13 +62,14 @@ def process(): """ _log = logging.getLogger('cli.process') - cli.log_header("=== Metadata processing", None) + audit_log = logging.getLogger('audit') + audit_log.info("# Metadata processing") ctx = CodeMetaContext() harvesters = metadata.entry_points(group='hermes.harvest') for harvester in harvesters: - _log.info('- Merge data harvested by "%s"', harvester.name) + audit_log.info("## Process data from %s", harvester.name) harvest_context = HermesHarvestContext(ctx, harvester) harvest_context.load_cache() @@ -68,6 +84,7 @@ def process(): ctx.merge_from(harvest_context) _log.info('') + audit_log.info('') tags_path = ctx.get_cache('process', 'tags', create=True) with tags_path.open('w') as tags_file: @@ -76,7 +93,15 @@ def process(): with open('codemeta.json', 'w') as codemeta_file: json.dump(ctx._data, codemeta_file, indent=' ') - ctx.annotate() + logging.shutdown() + + with open('hermes-audit.md', 'r') as auditlog_file: + html_data = markdown.markdown(auditlog_file.read(), extensions=['admonition', 'def_list', 'fenced_code']) + + with open('hermes-audit.html', 'w') as html_file: + html_file.write(_HTML_PREFIX) + html_file.write(html_data) + html_file.write('') @click.group(invoke_without_command=True) diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index 490d8e17..5857133a 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -1,6 +1,5 @@ import datetime import pathlib -import re import traceback import json import logging @@ -18,6 +17,7 @@ ContextPath.init_merge_strategies() + class HermesContext: """ The HermesContext stores the metadata for a certain project. @@ -263,6 +263,18 @@ def _check_values(self, path, values): return value, tag def get_data(self, data: t.Optional[dict] = None, path: t.Optional['ContextPath'] = None, tags: t.Optional[dict] = None) -> dict: + """ + Retrieve the data from a given path. + + This method can be used to extract data and whole sub-trees from the context. + If you want a complete copy of the data, you can also call this method without giving a path. + + :param data: Optional a target dictionary where the data is stored. If not given, a new one is created. + :param path: The path to extract data from. + :param tags: An optional dictionary to collect the tags that belog to the extracted data. + The full path will be used as key for this dictionary. + :return: The extracted data (i.e., the `data` parameter if it was given). + """ if data is None: data = {} for key, values in self._data.items(): @@ -314,68 +326,6 @@ def update(self, _key: ContextPath, _value: t.Any, tags: t.Dict[str, t.Dict] | N else: _key.update(self._data, _value, tags) - def annotate(self): - - def _annotate_list(path, data, indent): - tag = self.tags.get(str(path)) - if tag: - _tag = {k: v for k, v in tag.items() if k not in ('ep', 'ts')} - print(indent + f'# {str(path)} harvested by {tag["ep"]} at {tag["ts"]} from {_tag}') - - print(indent + '[') - for i, item in enumerate(data): - item_path = path[i] - - match item: - case list() as list_data: - _annotate_list(item_path, list_data, indent + ' ') - - case dict() as dict_data: - _annotate_dict(item_path, dict_data, indent + ' ') - - case _ as data: - tag = self.tags.get(str(item_path)) - if tag: - _tag = {k: v for k, v in tag.items() if k not in ('ep', 'ts')} - print(indent + f'# {str(item_path)} harvested by {tag["ep"]} at {tag["ts"]} from {_tag}') - print(indent + ' ' + f'{str(data)}') - - print(indent + ']') - - def _annotate_dict(path, data, indent): - tag = self.tags.get(str(path)) - if tag: - _tag = {k: v for k, v in tag.items() if k not in ('ep', 'ts')} - print(indent + f'# {str(path)} harvested by {tag["ep"]} at {tag["ts"]} from {_tag}') - - print(indent + '{') - for k, v in data.items(): - if path is None: - item_path = ContextPath(k) - else: - item_path = path[k] - - match v: - case list(): - print(indent + ' ' + str(k) + ':') - _annotate_list(item_path, v, indent + ' ') - - case dict(): - print(indent + ' ' + str(k) + ':') - _annotate_dict(item_path, v, indent + ' ') - - case _: - tag = self.tags.get(str(item_path)) - if tag: - _tag = {k: v for k, v in tag.items() if k not in ('ep', 'ts')} - print(indent + f'# {str(item_path)} havested by {tag["ep"]} at {tag["ts"]} from {_tag}') - - print(indent + ' ' + str(k) + ': ' + str(v)) - - print(indent + '}') - - _annotate_dict(None, self._data, '') - def find_key(self, item, other): data = item.get_from(self._data) diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py index cd6f7947..036daa71 100644 --- a/src/hermes/model/path.py +++ b/src/hermes/model/path.py @@ -8,26 +8,77 @@ class ContextPathGrammar: + """ + The pyparsing grammar for ContextGrammar paths. + """ + key = pp.Word('@' + pp.alphas) index = pp.Word(pp.nums).set_parse_action(lambda tok: [int(tok[0])]) | pp.Char('*') field = key + (pp.Suppress('[') + index + pp.Suppress(']'))[...] path = field + (pp.Suppress('.') + field)[...] @classmethod - def parse(cls, text: str): + def parse(cls, text: str) -> pp.ParseResults: + """ + Parse a ContextPath string representation into its individual tokens. + + :param text: The path to parse. + :return: The pyparsing.ParseResult. + """ return cls.path.parse_string(text) class ContextPath: + """ + This class is used to access the different contexts. + + On the one hand, the class allows you to define and manage pathes. + You can simply build them up like follows: + + >>> path = ContextPath('spam')['eggs'][1]['ham'] + + will result in a `path` like `spam.eggs[1].ham`. + + hint :: + The paths are idenpendent from any context. + You can create and even re-use them independently for different contexts. + + To construct wildcard paths, you can use the `'*'` as accessor. + + If you need a shortcut for building paths from a list of accessors, you can use :py:meth:`ContextPath.make`. + To parse the string representation, use :py:meth:`ContextPath.parse`. + """ + merge_strategies = None - def __init__(self, item: str | int, parent: t.Optional['ContextPath'] = None): - self._item = item - self._parent = parent + def __init__(self, item: str | int | t.List[str | int], parent: t.Optional['ContextPath'] = None): + """ + Initialize a new path element. + + The path stores a reference to it's parent. + This means that + + >>> path ContextPath('foo', parent=ContextPath('bar')) + + will result in the path `bar.foo`. + + :param item: The accessor to the current path item. + :param parent: The path of the parent item. + """ + if isinstance(item, (list, tuple)) and item: + *head, self._item = item + if head: + self._parent = ContextPath(head, parent) + else: + self._parent = parent + else: + self._item = item + self._parent = parent self._type = None @classmethod def init_merge_strategies(cls): + # TODO refactor if cls.merge_strategies is None: from hermes.model.merge import MergeStrategies, default_merge_strategies @@ -37,22 +88,35 @@ def init_merge_strategies(cls): @property def parent(self) -> t.Optional['ContextPath']: + """ + Accessor to the parent node. + """ return self._parent @property def path(self) -> t.List['ContextPath']: + """ + Get the whole path from the root as list of items. + """ if self._parent is None: return [self] else: return self._parent.path + [self] def __getitem__(self, item: str | int) -> 'ContextPath': + """ + Create a sub-path for the given `item`. + """ match item: case str(): self._type = dict case int(): self._type = list return ContextPath(item, self) def __str__(self) -> str: + """ + Get the string representation of the path. + The result is parsable by :py:meth:`ContextPath.parse` + """ item = str(self._item) if self._parent is not None: parent = str(self._parent) @@ -66,6 +130,10 @@ def __repr__(self) -> str: return f'ContextPath.parse("{str(self)}")' def __eq__(self, other: 'ContextPath') -> bool: + """ + This match includes semantics for wildcards. + Items that access `'*'` will automatically match everything (except for None). + """ return ( other is not None and (self._item == other._item or self._item == '*' or other._item == '*') @@ -73,13 +141,21 @@ def __eq__(self, other: 'ContextPath') -> bool: ) def __contains__(self, other: 'ContextPath') -> bool: + """ + Check whether `other` is a true child of this path. + """ while other is not None: if other == self: return True other = other.parent return False - def new(self): + def new(self) -> t.Any: + """ + Create a new instance of the container this node represents. + + For this to work, the node need to have at least on child node derive (e.g., by using `self["child"]'). + """ if self._type is not None: return self._type() raise TypeError() @@ -112,7 +188,7 @@ def _find_in_parent(self, target: dict, path: 'ContextPath') -> t.Any: while _path is not None: try: item = self._get_item(target, _path[_item]) - _log.debug("Using type %s from §%s.", item, _path) + _log.debug("Using type %s from %s.", item, _path) return item except (KeyError, IndexError, TypeError) as e: @@ -184,18 +260,37 @@ def _set_item(self, target: dict | list, path: 'ContextPath', value: t.Any, **kw return value - def resolve(self, _target: list | dict, create: bool = False, query: t.Any = None) -> ('ContextPath', list | dict, 'ContextPath'): + def resolve(self, target: list | dict, create: bool = False, query: t.Any = None) -> ('ContextPath', list | dict, 'ContextPath'): + """ + Resolve a given path releative to a given target. + + The method will incrementally try to resolve the entries in the `_target.path`. + It stops when the requested item was found or when the resolution could not be completed. + If you set `create` to true, the method tries to create the direct target that contains the selected node. + + :param target: Container to resolve node in. + :param create: Flags whether missing containers should be created. + :param query: + :return: The method returns a tuple with the following values: + - The path to the last item that could be resolved (e.g., the container of the requested element). + - The container for the path from the first return value. + - The rest of the path that could not be resolved. + """ head, *tail = self.path - target = _target - while head._type and tail: + next_target = target + while tail: try: - target = self._get_item(target, head) + next_target = self._get_item(next_target, head) except (IndexError, KeyError, TypeError): if create and self.parent is not None: - new_head = head.new() - setter = self._find_setter(_target, head, new_head) - setter(target, head, new_head) - target = new_head + try: + new_target = head.new() + except TypeError: + pass + else: + setter = self._find_setter(target, head, new_target) + setter(next_target, head, new_target) + next_target = new_target else: break head, *tail = tail @@ -210,12 +305,9 @@ def resolve(self, _target: list | dict, create: bool = False, query: t.Any = Non head._item = len(target) if not hasattr(head, 'set_item'): - head.set_item = self._find_setter(_target, head) - tail_path = ContextPath(head._item) - for t in tail: - tail_path = tail_path[t._item] - - return head, target, tail_path + head.set_item = self._find_setter(target, head) + tail = ContextPath.make([head._item] + tail) + return head, next_target, tail def get_from(self, target: dict | list) -> t.Any: prefix, target, path = self.resolve(target) @@ -227,10 +319,15 @@ def update(self, target: t.Dict[str, t.Any] | t.List, value: t.Any, tags: t.Opti if tags is not None and kwargs: tags[str(self)] = kwargs + @classmethod + def make(cls, path: t.Iterable[str | int]) -> 'ContextPath': + head, *tail = path + path = ContextPath(head) + for next in tail: + path = path[next] + return path + @classmethod def parse(cls, path: str) -> 'ContextPath': - head, *tail = ContextPathGrammar.parse(path) - path = cls(head) - for item in tail: - path = path[item] + path = cls.make(ContextPathGrammar.parse(path)) return path From 448f6f8bb5d9f5ee565d8f65087848ad6088594d Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 21 Sep 2022 17:03:00 +0200 Subject: [PATCH 16/52] Fix broken tests --- src/hermes/cli.py | 2 +- src/hermes/commands/harvest/cff.py | 4 +- src/hermes/model/context.py | 3 +- src/hermes/model/path.py | 4 +- test/hermes_test/model/test_base_context.py | 4 +- .../hermes_test/model/test_harvest_context.py | 65 +++++++++++++++---- 6 files changed, 60 insertions(+), 22 deletions(-) diff --git a/src/hermes/cli.py b/src/hermes/cli.py index e60b0395..d0a70538 100644 --- a/src/hermes/cli.py +++ b/src/hermes/cli.py @@ -83,7 +83,7 @@ def invoke(self, ctx: click.Context) -> t.Any: audit_log = logging.getLogger('audit') audit_log.info("# Running Hermes") - audit_log.info("Running Hermes command line in: %s", ctx.params['path'].absolute()) + audit_log.info("Running Hermes command line in: %s", ctx.params.get('path', pathlib.Path.cwd()).absolute()) audit_log.debug("") audit_log.debug("Invoked `%s` with", ctx.invoked_subcommand or self.name) audit_log.debug("") diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index c260ceda..0f1ddf5c 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -91,7 +91,7 @@ def _validate(cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: audit_log.warning('!!! warning "%s is not valid according to <%s>"', cff_file, cff_schema_url) for error in errors: - path = ContextPath.make(error.absolute_path) + path = ContextPath.make(error.absolute_path or ['root']) audit_log.info(' Invalid input for `%s`.', str(path)) audit_log.info(' !!! message "%s"', error.message) audit_log.debug(' !!! value "%s"', error.instance) @@ -115,7 +115,7 @@ def _get_single_cff(path: pathlib.Path) -> t.Optional[pathlib.Path]: # TODO: Do we really want to search recursive? CFF convention is the file should be at the topmost dir, # which is given via the --path arg. Maybe add another option to enable pointing to a single file? # (So this stays "convention over configuration") - files = path.rglob('**/CITATION.cff') + files = list(path.rglob('**/CITATION.cff')) if len(files) == 1: return pathlib.Path(files[0]) # TODO: Shouldn't we log/echo the found CFF files so a user can debug/cleanup? diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index 5857133a..93fb7559 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -193,6 +193,7 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): for entry in self._data[_key]: value, tag = entry tag_ts = tag.pop('ts') + tag_ep = tag.pop('ep') if tag == kwargs: self._log.debug("Update %s: %s -> %s (%s)", _key, str(value), _value, str(tag)) @@ -202,7 +203,7 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): break tag['ts'] = tag_ts - tag['ep'] = ep + tag['ep'] = tag_ep else: kwargs['ts'] = ts diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py index 036daa71..4f5edd42 100644 --- a/src/hermes/model/path.py +++ b/src/hermes/model/path.py @@ -1,10 +1,10 @@ +import logging import typing as t import pyparsing as pp -from hermes import config -_log = config.getLogger('hermes.model.path') +_log = logging.getLogger('hermes.model.path') class ContextPathGrammar: diff --git a/test/hermes_test/model/test_base_context.py b/test/hermes_test/model/test_base_context.py index 4f864e32..38fda7bb 100644 --- a/test/hermes_test/model/test_base_context.py +++ b/test/hermes_test/model/test_base_context.py @@ -15,7 +15,7 @@ def test_context_hermes_dir_custom(): def test_context_get_cache_default(): ctx = HermesContext() - assert ctx.get_cache('spam', 'eggs') == Path('.') / '.hermes' / 'spam' / 'eggs' + assert ctx.get_cache('spam', 'eggs') == Path('.') / '.hermes' / 'spam' / 'eggs.json' def test_context_get_cache_cached(): @@ -28,5 +28,5 @@ def test_context_get_cache_create(tmpdir): ctx = HermesContext(tmpdir) subdir = Path(tmpdir) / '.hermes' / 'spam' - assert ctx.get_cache('spam', 'eggs', create=True) == subdir / 'eggs' + assert ctx.get_cache('spam', 'eggs', create=True) == subdir / 'eggs.json' assert subdir.exists() diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py index bd363eb2..e956736e 100644 --- a/test/hermes_test/model/test_harvest_context.py +++ b/test/hermes_test/model/test_harvest_context.py @@ -1,3 +1,4 @@ +from datetime import datetime from importlib.metadata import EntryPoint import pytest @@ -8,27 +9,37 @@ @pytest.fixture def harvest_ctx(request: pytest.FixtureRequest): ctx = HermesContext() - return HermesHarvestContext(ctx, EntryPoint(name=request.function, group='hermes.harvest', value='hermes_test:hctx')) + return HermesHarvestContext( + ctx, + EntryPoint(name=request.function.__name__, group='hermes.harvest', value='hermes_test:ctx') + ) def test_context_default(harvest_ctx): harvest_ctx.update('spam', 'eggs', test=True) - assert harvest_ctx._data['spam'] == [['eggs', {'test': True}]] + assert harvest_ctx._data['spam'] == [ + ['eggs', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_default'}] + ] def test_context_update_append(harvest_ctx): harvest_ctx.update('spam', 'noodles', index=0) harvest_ctx.update('spam', 'eggs', index=1) - assert harvest_ctx._data['spam'] == [['noodles', {'index': 0}], ['eggs', {'index': 1}]] + assert harvest_ctx._data['spam'] == [ + ['noodles', {'index': 0, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_update_append'}], + ['eggs', {'index': 1, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_update_append'}] + ] def test_context_update_replace(harvest_ctx): harvest_ctx.update('spam', 'noodles', test=True) harvest_ctx.update('spam', 'eggs', test=True) - assert harvest_ctx._data['spam'] == [['eggs', {'test': True}]] + assert harvest_ctx._data['spam'] == [ + ['eggs', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_update_replace'}] + ] def test_context_bulk_flat(harvest_ctx): @@ -37,8 +48,12 @@ def test_context_bulk_flat(harvest_ctx): 'spam': 'eggs' }, test=True) - assert harvest_ctx._data['ans'] == [[42, {'test': True}]] - assert harvest_ctx._data['spam'] == [['eggs', {'test': True}]] + assert harvest_ctx._data['ans'] == [ + [42, {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_flat'}] + ] + assert harvest_ctx._data['spam'] == [ + ['eggs', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_flat'}] + ] def test_context_bulk_complex(harvest_ctx): @@ -50,23 +65,45 @@ def test_context_bulk_complex(harvest_ctx): ] }, test=True) - assert harvest_ctx._data['ans'] == [[42, {'test': True}]] - assert harvest_ctx._data['author[0].name'] == [['Monty Python', {'test': True}]] - assert harvest_ctx._data['author[0].email'] == [['eggs@spam.io', {'test': True}]] - assert harvest_ctx._data['author[1].name'] == [['Herr Mes', {'test': True}]] + assert harvest_ctx._data['ans'] == [ + [42, {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_complex'}] + ] + assert harvest_ctx._data['author[0].name'] == [ + ['Monty Python', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), + 'ep': 'test_context_bulk_complex'}] + ] + assert harvest_ctx._data['author[0].email'] == [ + ['eggs@spam.io', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), + 'ep': 'test_context_bulk_complex'}] + ] + assert harvest_ctx._data['author[1].name'] == [ + ['Herr Mes', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_complex'}] + ] def test_context_bulk_replace(harvest_ctx): harvest_ctx.update('author[0].name', 'Monty Python', test=True) harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, test=True) - assert harvest_ctx._data['author[0].name'] == [['Herr Mes', {'test': True}]] - assert harvest_ctx._data['author[0].email'] == [['eggs@spam.io', {'test': True}]] + assert harvest_ctx._data['author[0].name'] == [ + ['Herr Mes', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_replace'}] + ] + assert harvest_ctx._data['author[0].email'] == [ + ['eggs@spam.io', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), + 'ep': 'test_context_bulk_replace'}] + ] def test_context_bulk_append(harvest_ctx): harvest_ctx.update('author[0].name', 'Monty Python', index=0) harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, index=1) - assert harvest_ctx._data['author[0].name'] == [['Monty Python', {'index': 0}], ['Herr Mes', {'index': 1}]] - assert harvest_ctx._data['author[0].email'] == [['eggs@spam.io', {'index': 1}]] + assert harvest_ctx._data['author[0].name'] == [ + ['Monty Python', {'index': 0, 'ts': pytest.approx(datetime.now().isoformat()), + 'ep': 'test_context_bulk_append'}], + ['Herr Mes', {'index': 1, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_append'}] + ] + assert harvest_ctx._data['author[0].email'] == [ + ['eggs@spam.io', {'index': 1, 'ts': pytest.approx(datetime.now().isoformat()), + 'ep': 'test_context_bulk_append'}] + ] From 4cd6008a8998ccd691a16d5074bbb3a6073f4a71 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 22 Sep 2022 14:15:02 +0200 Subject: [PATCH 17/52] Add tests to use fixtures --- .../commands/harvest/test_codemeta.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/test/hermes_test/commands/harvest/test_codemeta.py b/test/hermes_test/commands/harvest/test_codemeta.py index b8559a80..5ae066cd 100644 --- a/test/hermes_test/commands/harvest/test_codemeta.py +++ b/test/hermes_test/commands/harvest/test_codemeta.py @@ -118,16 +118,16 @@ @pytest.fixture -def codemeta(): +def valid_codemeta(): return json.loads(CODEMETA_JSON) @pytest.fixture() -def valid_codemeta(tmp_path): - codemeta_json = json.loads(CODEMETA_JSON) - codemeta_file = tmp_path / 'codemeta.json' - json.dump(codemeta_json, codemeta_file) - return codemeta_file +def valid_codemeta_path(tmp_path, valid_codemeta): + codemeta_path = tmp_path / 'codemeta.json' + with open(codemeta_path, 'w') as fo: + json.dump(valid_codemeta, fo) + return codemeta_path def test_get_single_codemeta(tmp_path): @@ -137,5 +137,9 @@ def test_get_single_codemeta(tmp_path): assert harvest._get_single_codemeta(tmp_path) == single_codemeta -def test_validate_success(codemeta): - assert harvest._validate(pathlib.Path("foobar")) +def test_validate_fail(): + assert not harvest._validate(pathlib.Path("foobar")) + + +def test_validate_success(valid_codemeta_path): + assert harvest._validate(valid_codemeta_path) From 47901e8de0ac4cb79be338f2a9e8dbae1521f9e0 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Thu, 22 Sep 2022 16:01:08 +0200 Subject: [PATCH 18/52] Fix errors that occured when merging codemeta - Collect and handle errors during merge - Better handling of incompatible types --- src/hermes/commands/workflow.py | 6 ++++++ src/hermes/model/context.py | 12 ++++++++---- src/hermes/model/path.py | 27 +++++++++++++++++++-------- 3 files changed, 33 insertions(+), 12 deletions(-) diff --git a/src/hermes/commands/workflow.py b/src/hermes/commands/workflow.py index 169aeddd..5d7e5e3a 100644 --- a/src/hermes/commands/workflow.py +++ b/src/hermes/commands/workflow.py @@ -86,6 +86,12 @@ def process(): _log.info('') audit_log.info('') + if ctx._errors: + audit_log.error('!!! warning "Errors during merge"') + + for ep, error in ctx._errors: + audit_log.info(' - %s: %s', ep.name, error) + tags_path = ctx.get_cache('process', 'tags', create=True) with tags_path.open('w') as tags_file: json.dump(ctx.tags, tags_file, indent=' ') diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index 93fb7559..4652c321 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -8,6 +8,7 @@ from pathlib import Path from importlib.metadata import EntryPoint +from hermes.model import errors from hermes.model.path import ContextPath from hermes.model.errors import HermesValidationError @@ -282,9 +283,12 @@ def get_data(self, data: t.Optional[dict] = None, path: t.Optional['ContextPath' key = ContextPath.parse(key) if path is None or key in path: value, tag = self._check_values(key, values) - key.update(data, value, tags, **tag) - if tags is not None and tag: - tags[str(key)] = tag + try: + key.update(data, value, tags, **tag) + if tags is not None and tag: + tags[str(key)] = tag + except errors.MergeError as e: + self.error(self._ep, e) return data def finish(self): @@ -313,7 +317,7 @@ def update(self, _key: ContextPath, _value: t.Any, tags: t.Dict[str, t.Dict] | N _tags = {k.lstrip(str(_key) + '.'): t for k, t in tags.items() if ContextPath.parse(k) in _key} else: _tags = {} - _path.update(_item, _value, _tags) + _path._set_item(_item, _path, _value, **_tags) if tags is not None and _tags: for k, v in _tags.items(): if not v: diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py index 4f5edd42..bb56a6fa 100644 --- a/src/hermes/model/path.py +++ b/src/hermes/model/path.py @@ -3,6 +3,7 @@ import pyparsing as pp +from hermes.model import errors _log = logging.getLogger('hermes.model.path') @@ -280,7 +281,13 @@ def resolve(self, target: list | dict, create: bool = False, query: t.Any = None next_target = target while tail: try: - next_target = self._get_item(next_target, head) + new_target = self._get_item(next_target, head) + if not isinstance(new_target, (list, dict)) and head.parent: + next_target = self._get_item(next_target, head.parent) + tail = [head._item] + tail + break + else: + next_target = new_target except (IndexError, KeyError, TypeError): if create and self.parent is not None: try: @@ -296,13 +303,14 @@ def resolve(self, target: list | dict, create: bool = False, query: t.Any = None head, *tail = tail if head._item == '*': - for i, item in enumerate(target): - if all(item[k] == v for k, v in query.items() if k in item): + for i, item in enumerate(next_target): + _keys = [k for k in query.keys() if k in item] + if _keys and all(item[k] == query[k] for k in _keys): head._item = i break else: if create: - head._item = len(target) + head._item = len(next_target) if not hasattr(head, 'set_item'): head.set_item = self._find_setter(target, head) @@ -314,10 +322,13 @@ def get_from(self, target: dict | list) -> t.Any: return self._get_item(target, path) def update(self, target: t.Dict[str, t.Any] | t.List, value: t.Any, tags: t.Optional[dict] = None, **kwargs): - prefix, target, tail = self.resolve(target, create=True) - prefix.set_item(target, tail, value, **kwargs) - if tags is not None and kwargs: - tags[str(self)] = kwargs + prefix, _target, tail = self.resolve(target, create=True) + try: + prefix.set_item(_target, tail, value, **kwargs) + if tags is not None and kwargs: + tags[str(self)] = kwargs + except (KeyError, IndexError, TypeError, ValueError) as e: + raise errors.MergeError(self, _target, value, **kwargs) @classmethod def make(cls, path: t.Iterable[str | int]) -> 'ContextPath': From 57be25a5ddcbd9234f38873db59e37524527ae05 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Thu, 22 Sep 2022 16:19:06 +0200 Subject: [PATCH 19/52] Add missing documentation --- src/hermes/config.py | 1 + src/hermes/model/context.py | 7 ++++--- src/hermes/model/errors.py | 16 +++++++++++++++- src/hermes/model/path.py | 28 ++++++++++++++++++++++++++++ 4 files changed, 48 insertions(+), 4 deletions(-) diff --git a/src/hermes/config.py b/src/hermes/config.py index c862499c..ffac295e 100644 --- a/src/hermes/config.py +++ b/src/hermes/config.py @@ -1,3 +1,4 @@ +# TODO this file contains only dummy implementations which in most cases will lead to a crash... import logging import toml diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index 4652c321..e4094846 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -44,7 +44,10 @@ def __init__(self, project_dir: t.Optional[Path] = None): self._data = {} self._errors = [] - def keys(self): + def keys(self) -> t.List[ContextPath]: + """ + Get all the keys for the data stored in this context. + """ return [ContextPath.parse(k) for k in self._data.keys()] def get_cache(self, *path: str, create: bool = False) -> Path: @@ -183,8 +186,6 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): See :py:meth:`HermesContext.update` for more information. """ - base_key = ContextPath.parse(_key) - ts = kwargs.pop('ts', datetime.datetime.now().isoformat()) ep = kwargs.pop('ep', self._ep.name) diff --git a/src/hermes/model/errors.py b/src/hermes/model/errors.py index 51d54665..b0a248c9 100644 --- a/src/hermes/model/errors.py +++ b/src/hermes/model/errors.py @@ -1,3 +1,6 @@ +import typing as t + + class HermesValidationError(Exception): """ This exception should be thrown when input validation (e.g., during harvest) occurs. @@ -16,7 +19,18 @@ class HermesValidationError(Exception): class MergeError(Exception): - def __init__(self, path, old_Value, new_value, **kwargs): + """ + This exception should be raised when there is an error during a merge / set operation. + """ + def __init__(self, path: 'ContextPath', old_Value: t.Any, new_value: t.Any, **kwargs): + """ + Create a new merge incident. + + :param path: The path where the merge error occured. + :param old_Value: Old value that was stored at `path`. + :param new_value: New value that was to be assinged. + :param kwargs: Tag data for the new value. + """ self.path = path self.old_value = old_Value self.new_value = new_value diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py index bb56a6fa..f46b4819 100644 --- a/src/hermes/model/path.py +++ b/src/hermes/model/path.py @@ -318,10 +318,26 @@ def resolve(self, target: list | dict, create: bool = False, query: t.Any = None return head, next_target, tail def get_from(self, target: dict | list) -> t.Any: + """ + Expand the path and return the referenced data from a concrete container. + + :param target: The list or dict that this path points into. + :return: The value stored at path. + """ prefix, target, path = self.resolve(target) return self._get_item(target, path) def update(self, target: t.Dict[str, t.Any] | t.List, value: t.Any, tags: t.Optional[dict] = None, **kwargs): + """ + Update the data stored at the path in a concrete container. + + How this method actually behaves heavily depends on the active MergeStrategy for the path. + + :param target: The dict inside which the value should be stored. + :param value: The value to store. + :param tags: Dictionary containing the tags for all stored values. + :param kwargs: The tag attibutes for the new value. + """ prefix, _target, tail = self.resolve(target, create=True) try: prefix.set_item(_target, tail, value, **kwargs) @@ -332,6 +348,12 @@ def update(self, target: t.Dict[str, t.Any] | t.List, value: t.Any, tags: t.Opti @classmethod def make(cls, path: t.Iterable[str | int]) -> 'ContextPath': + """ + Convert a list of item accessors into a ContextPath. + + :param path: The items in the order of access. + :return: A ContextPath that reference the selected value. + """ head, *tail = path path = ContextPath(head) for next in tail: @@ -340,5 +362,11 @@ def make(cls, path: t.Iterable[str | int]) -> 'ContextPath': @classmethod def parse(cls, path: str) -> 'ContextPath': + """ + Parse a string representation of a ContextPath into a proper object. + + :param path: The path to parse. + :return: A new ContextPath that references the selected path. + """ path = cls.make(ContextPathGrammar.parse(path)) return path From 34dea0093ed791faafab2c4875da95df9e042594 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Thu, 22 Sep 2022 16:21:41 +0200 Subject: [PATCH 20/52] Revert hacky local file schema access for now --- src/hermes/commands/harvest/cff.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index 0f1ddf5c..f22d2ad4 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -76,14 +76,10 @@ def _validate(cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: cff_schema_url = f'https://citation-file-format.github.io/{_CFF_VERSION}/schema.json' - with (pathlib.Path(__file__).parent / f'cff-schema@{_CFF_VERSION}.json').open('r') as cff_schema_file: - schema_data = json.load(cff_schema_file) - - if not schema_data: - # TODO: we should ship the schema we reference to by default to avoid unnecessary network traffic. - # If the requested version is not already downloaded, go ahead and download it. - with urllib.request.urlopen(cff_schema_url) as cff_schema_response: - schema_data = json.loads(cff_schema_response.read()) + # TODO: we should ship the schema we reference to by default to avoid unnecessary network traffic. + # If the requested version is not already downloaded, go ahead and download it. + with urllib.request.urlopen(cff_schema_url) as cff_schema_response: + schema_data = json.loads(cff_schema_response.read()) validator = jsonschema.Draft7Validator(schema_data) errors = sorted(validator.iter_errors(cff_dict), key=lambda e: e.path) From cdce797dbf67cddc7a3ae540687b04bff443c265 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Thu, 22 Sep 2022 16:40:40 +0200 Subject: [PATCH 21/52] Remove method that was of no use anymore --- src/hermes/commands/harvest/cff.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index f22d2ad4..4b8cc7f4 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -118,17 +118,3 @@ def _get_single_cff(path: pathlib.Path) -> t.Optional[pathlib.Path]: # TODO: Do we want to hand down a logging instance via Hermes context or just encourage # peeps to use the Click context? return None - - -def _build_nodepath_str(absolute_path: collections.deque) -> str: - # Path deque starts with field name, then index, then field name, etc. - path_str = "'" - for index, value in enumerate(absolute_path): - if index == 0: # First value - path_str += f'{value}' - elif index % 2 == 0: # value is a field name - path_str += f' -> {value}' - else: # Value is an index - path_str += f' {int(value) + 1}' # Use index starting at 1 - path_str += "'" - return path_str From 401dfc315b01463e8010a900dfeeb5eb8c6c4af9 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 23 Sep 2022 12:41:20 +0200 Subject: [PATCH 22/52] Remove tests for removed functionality (nodepath) --- test/hermes_test/commands/harvest/test_cff.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/test/hermes_test/commands/harvest/test_cff.py b/test/hermes_test/commands/harvest/test_cff.py index 514fd82b..87114e55 100644 --- a/test/hermes_test/commands/harvest/test_cff.py +++ b/test/hermes_test/commands/harvest/test_cff.py @@ -44,23 +44,6 @@ def test_convert_cff_to_codemeta(valid_minimal_cff, codemeta): assert codemeta == actual_result -@pytest.mark.parametrize("path, path_str", [ - (deque(['str1', 0]), "'str1 1'"), - (deque(['str1', 0, 'str2', 1, 'str3', 2]), "'str1 1 -> str2 2 -> str3 3'"), -]) -def test_build_nodepath_str(path, path_str): - assert harvest._build_nodepath_str(path) == path_str - - -@pytest.mark.parametrize("path, path_str", [ - ('str1', "'str1 1'"), - (deque([0, 'str1', 1, 'str2', 2, 'str3']), "'str1 1 -> str2 2 -> str3 3'"), -]) -def test_build_nodepath_str_fail(path, path_str): - with pytest.raises(Exception): - assert harvest._build_nodepath_str(path) == path_str - - def test_get_single_cff(tmp_path): assert harvest._get_single_cff(tmp_path) is None single_cff = tmp_path / 'CITATION.cff' From 8ce033f1d97f734e0aba3c5cd25242f0777b90ea Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 28 Sep 2022 15:20:36 +0200 Subject: [PATCH 23/52] Update dependencies: add toml, markdown --- poetry.lock | 379 ++++++++++++++++++++++++++++++++++++------------- pyproject.toml | 2 + 2 files changed, 285 insertions(+), 96 deletions(-) diff --git a/poetry.lock b/poetry.lock index ac6aac8e..1e78453a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6,30 +6,22 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] -name = "babel" +name = "Babel" version = "2.10.3" description = "Internationalization utilities" category = "dev" @@ -41,7 +33,7 @@ pytz = ">=2015.7" [[package]] name = "certifi" -version = "2022.6.15" +version = "2022.9.24" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -53,7 +45,7 @@ version = "2.0.0" description = "Command line program to validate and convert CITATION.cff files." category = "main" optional = false -python-versions = ">=3.6" +python-versions = "*" [package.dependencies] click = ">=7.0,<9" @@ -63,13 +55,13 @@ requests = ">=2.20,<3" "ruamel.yaml" = ">=0.16.0" [package.extras] -publishing = ["wheel", "twine"] +dev = ["isort", "prospector[with_pyroma] (>=1.4)", "pytest (>=6)", "pytest-cov"] gcloud = ["flask"] -dev = ["pytest-cov", "pytest (>=6)", "isort", "prospector[with_pyroma] (>=1.4)"] +publishing = ["twine", "wheel"] [[package]] name = "charset-normalizer" -version = "2.1.0" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -99,7 +91,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "6.4.2" +version = "6.4.4" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -129,7 +121,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false @@ -152,7 +144,7 @@ optional = false python-versions = "*" [[package]] -name = "jinja2" +name = "Jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "dev" @@ -176,11 +168,23 @@ python-versions = "*" [package.dependencies] attrs = ">=17.4.0" pyrsistent = ">=0.14.0" +setuptools = "*" six = ">=1.11.0" [package.extras] format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] +format_nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "Markdown" +version = "3.4.1" +description = "Python implementation of Markdown." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +testing = ["coverage", "pyyaml"] [[package]] name = "markdown-it-py" @@ -200,11 +204,11 @@ compare = ["commonmark (>=0.9.1,<0.10.0)", "markdown (>=3.3.6,<3.4.0)", "mistlet linkify = ["linkify-it-py (>=1.0,<2.0)"] plugins = ["mdit-py-plugins"] profiling = ["gprof2dot"] -rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx-book-theme"] +rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] -name = "markupsafe" +name = "MarkupSafe" version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" @@ -213,23 +217,23 @@ python-versions = ">=3.7" [[package]] name = "mdit-py-plugins" -version = "0.3.0" +version = "0.3.1" description = "Collection of plugins for markdown-it-py" category = "dev" optional = false -python-versions = "~=3.6" +python-versions = ">=3.7" [package.dependencies] markdown-it-py = ">=1.0.0,<3.0.0" [package.extras] -code_style = ["pre-commit (==2.6)"] -rtd = ["myst-parser (>=0.14.0,<0.15.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"] -testing = ["coverage", "pytest (>=3.6,<4)", "pytest-cov", "pytest-regressions"] +code_style = ["pre-commit"] +rtd = ["attrs", "myst-parser (>=0.16.1,<0.17.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "mdurl" -version = "0.1.1" +version = "0.1.2" description = "Markdown URL utilities" category = "dev" optional = false @@ -255,8 +259,8 @@ typing-extensions = "*" [package.extras] code_style = ["pre-commit (>=2.12,<3.0)"] linkify = ["linkify-it-py (>=1.0,<2.0)"] -rtd = ["ipython", "sphinx-book-theme", "sphinx-panels", "sphinxcontrib-bibtex (>=2.4,<3.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)", "sphinxcontrib.mermaid (>=0.7.1,<0.8.0)", "sphinxext-opengraph (>=0.6.3,<0.7.0)"] -testing = ["beautifulsoup4", "coverage", "docutils (>=0.17.0,<0.18.0)", "pytest (>=6,<7)", "pytest-cov", "pytest-regressions", "pytest-param-files (>=0.3.4,<0.4.0)"] +rtd = ["ipython", "sphinx-book-theme", "sphinx-panels", "sphinxcontrib-bibtex (>=2.4,<3.0)", "sphinxcontrib.mermaid (>=0.7.1,<0.8.0)", "sphinxext-opengraph (>=0.6.3,<0.7.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage", "docutils (>=0.17.0,<0.18.0)", "pytest (>=6,<7)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions"] [[package]] name = "packaging" @@ -290,13 +294,16 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] -name = "pygments" -version = "2.12.0" +name = "Pygments" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pykwalify" version = "1.8.0" @@ -319,7 +326,7 @@ optional = false python-versions = ">=3.6.8" [package.extras] -diagrams = ["railroad-diagrams", "jinja2"] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyrsistent" @@ -331,14 +338,13 @@ python-versions = ">=3.7" [[package]] name = "pytest" -version = "7.1.2" +version = "7.1.3" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" @@ -363,7 +369,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "python-dateutil" @@ -378,14 +384,14 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2022.1" +version = "2022.2.1" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" [[package]] -name = "pyyaml" +name = "PyYAML" version = "6.0" description = "YAML parser and emitter for Python" category = "dev" @@ -433,6 +439,19 @@ category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "setuptools" +version = "65.4.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -450,7 +469,7 @@ optional = false python-versions = "*" [[package]] -name = "sphinx" +name = "Sphinx" version = "4.5.0" description = "Python documentation generator" category = "dev" @@ -477,8 +496,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "docutils-stubs", "types-typed-ast", "types-requests"] -test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] [[package]] name = "sphinxcontrib-applehelp" @@ -489,7 +508,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -501,7 +520,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -513,8 +532,8 @@ optional = false python-versions = ">=3.6" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest", "html5lib"] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" @@ -525,7 +544,7 @@ optional = false python-versions = ">=3.5" [package.extras] -test = ["pytest", "flake8", "mypy"] +test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" @@ -536,7 +555,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -548,9 +567,17 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + [[package]] name = "tomli" version = "2.0.1" @@ -569,69 +596,143 @@ python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.11" +version = "1.26.12" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -[extras] -cff = ["ruamel.yaml", "cffconvert"] - [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "26df1c5939930d6665fefd1cb82cacaa158c6aae96ac3a582ff830db10cef04f" +content-hash = "e20eaecb8eff51fd77232b44f60851cba10a968e7518668bd63311ebdecde000" [metadata.files] alabaster = [ {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, ] -atomicwrites = [] attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] +Babel = [ + {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, + {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, +] +certifi = [ + {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, + {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, +] +cffconvert = [ + {file = "cffconvert-2.0.0-py3-none-any.whl", hash = "sha256:573c825e4e16173d99396dc956bd22ff5d4f84215cc16b6ab05299124f5373bb"}, + {file = "cffconvert-2.0.0.tar.gz", hash = "sha256:b4379ee415c6637dc9e3e7ba196605cb3cedcea24613e4ea242c607d9e98eb50"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, ] -babel = [] -certifi = [] -cffconvert = [] -charset-normalizer = [] click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] -colorama = [] -coverage = [] -docopt = [] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] +coverage = [ + {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, + {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"}, + {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"}, + {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"}, + {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"}, + {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"}, + {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"}, + {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"}, + {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"}, + {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"}, + {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"}, + {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"}, + {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"}, + {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"}, + {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, + {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, +] +docopt = [ + {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +] docutils = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] +imagesize = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] -imagesize = [] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] -jinja2 = [ +Jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] -jsonschema = [] +jsonschema = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] +Markdown = [ + {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"}, + {file = "Markdown-3.4.1.tar.gz", hash = "sha256:3b809086bb6efad416156e00a0da66fe47618a5d6918dd688f53f40c8e4cfeff"}, +] markdown-it-py = [ {file = "markdown-it-py-2.1.0.tar.gz", hash = "sha256:cf7e59fed14b5ae17c0006eff14a2d9a00ed5f3a846148153899a0224e2c07da"}, {file = "markdown_it_py-2.1.0-py3-none-any.whl", hash = "sha256:93de681e5c021a432c63147656fe21790bc01231e0cd2da73626f1aa3ac0fe27"}, ] -markupsafe = [ +MarkupSafe = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, @@ -674,12 +775,12 @@ markupsafe = [ {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] mdit-py-plugins = [ - {file = "mdit-py-plugins-0.3.0.tar.gz", hash = "sha256:ecc24f51eeec6ab7eecc2f9724e8272c2fb191c2e93cf98109120c2cace69750"}, - {file = "mdit_py_plugins-0.3.0-py3-none-any.whl", hash = "sha256:b1279701cee2dbf50e188d3da5f51fee8d78d038cdf99be57c6b9d1aa93b4073"}, + {file = "mdit-py-plugins-0.3.1.tar.gz", hash = "sha256:3fc13298497d6e04fe96efdd41281bfe7622152f9caa1815ea99b5c893de9441"}, + {file = "mdit_py_plugins-0.3.1-py3-none-any.whl", hash = "sha256:606a7f29cf56dbdfaf914acb21709b8f8ee29d857e8f29dcc33d8cb84c57bfa1"}, ] mdurl = [ - {file = "mdurl-0.1.1-py3-none-any.whl", hash = "sha256:6a8f6804087b7128040b2fb2ebe242bdc2affaeaa034d5fc9feeed30b443651b"}, - {file = "mdurl-0.1.1.tar.gz", hash = "sha256:f79c9709944df218a4cdb0fcc0b0c7ead2f44594e3e84dc566606f04ad749c20"}, + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] myst-parser = [ {file = "myst-parser-0.17.2.tar.gz", hash = "sha256:4c076d649e066f9f5c7c661bae2658be1ca06e76b002bb97f02a09398707686c"}, @@ -697,30 +798,58 @@ py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] -pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, +Pygments = [ + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +] +pykwalify = [ + {file = "pykwalify-1.8.0-py2.py3-none-any.whl", hash = "sha256:731dfa87338cca9f559d1fca2bdea37299116e3139b73f78ca90a543722d6651"}, + {file = "pykwalify-1.8.0.tar.gz", hash = "sha256:796b2ad3ed4cb99b88308b533fb2f559c30fa6efb4fa9fda11347f483d245884"}, ] -pykwalify = [] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] -pyrsistent = [] +pyrsistent = [ + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, +] pytest = [ - {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, - {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, + {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, + {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, ] pytest-cov = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, ] -python-dateutil = [] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] pytz = [ - {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, - {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, + {file = "pytz-2022.2.1-py2.py3-none-any.whl", hash = "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197"}, + {file = "pytz-2022.2.1.tar.gz", hash = "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5"}, ] -pyyaml = [ +PyYAML = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -728,6 +857,13 @@ pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, @@ -755,9 +891,50 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] -requests = [] -"ruamel.yaml" = [] -"ruamel.yaml.clib" = [] +requests = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] +"ruamel.yaml" = [ + {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, + {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, +] +"ruamel.yaml.clib" = [ + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, + {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, +] +setuptools = [ + {file = "setuptools-65.4.0-py3-none-any.whl", hash = "sha256:c2d2709550f15aab6c9110196ea312f468f41cd546bceb24127a1be6fdcaeeb1"}, + {file = "setuptools-65.4.0.tar.gz", hash = "sha256:a8f6e213b4b0661f590ccf40de95d28a177cd747d098624ad3f69c40287297e9"}, +] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -766,7 +943,7 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -sphinx = [ +Sphinx = [ {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, ] @@ -794,9 +971,19 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -typing-extensions = [] -urllib3 = [] +typing-extensions = [ + {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, + {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, +] +urllib3 = [ + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, +] diff --git a/pyproject.toml b/pyproject.toml index f41f81e7..c19c423e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,8 @@ click = "^8.1" "ruamel.yaml" = "^0.17.21" jsonschema = "^3.0.0" cffconvert = "^2.0.0" +toml = "^0.10.2" +Markdown = "^3.4.1" [tool.poetry.dev-dependencies] pytest = "^7.1.1" From e4fa5be27b5ad2003bc5b32c8863d69bc1281946 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 4 Oct 2022 21:44:15 +0200 Subject: [PATCH 24/52] Update CITATION.cff with @poikilotherm --- CITATION.cff | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CITATION.cff b/CITATION.cff index 277723f6..900e873f 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -23,3 +23,8 @@ authors: email: j.kelling@hzdr.de affiliation: Helmholtz Zentrum Dresden-Rossendorf (HZDR) orcid: 'https://orcid.org/0000-0003-1761-2591' + - given-names: Oliver + family-names: Bertuch + email: o.bertuch@fz-juelich.de + affiliation: Forschungszentrum Jülich GmbH (FZJ) + orcid: 'https://orcid.org/0000-0002-2702-3419' From 4ba9c76210bbf6de6a79f4f84c1117c4f86b5dae Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 7 Oct 2022 14:32:21 +0200 Subject: [PATCH 25/52] Update pyproject.toml to support Markdown output --- pyproject.toml | 31 +++++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f41f81e7..6944e808 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,24 @@ name = "hermes" version = "0.1.0" description = "Workflow to publish research software with rich metadata" -authors = ["Stephan Druskat ", "Michael Meinel "] +homepage = "https://software-metadata.pub" +license = "Apache-2.0" +authors = [ + "Stephan Druskat ", + "Michael Meinel ", + "Oliver Bertuch ", + "Jeffrey Kelling ", + "Oliver Knodel " +] + +readme = "README.md" +repository = "https://github.com/hermes-hmc/workflow" +documentation = "https://docs.software-metadata.pub" +keywords = ["publishing", "metadata", "automation"] + +include = [ + "hermes/schema/*.json", +] [tool.poetry.dependencies] python = "^3.10" @@ -10,12 +27,21 @@ click = "^8.1" "ruamel.yaml" = "^0.17.21" jsonschema = "^3.0.0" cffconvert = "^2.0.0" +toml = "^0.10.2" +Markdown = "^3.4.1" [tool.poetry.dev-dependencies] pytest = "^7.1.1" pytest-cov = "^3.0.0" Sphinx = "^4.5.0" myst-parser = "^0.17.2" +sphinx-book-theme = "^0.3.3" +sphinx-favicon = "^0.2" +sphinxcontrib-contentui = "^0.2.5" +sphinxcontrib-images = "^0.9.4" +sphinx-icon = "^0.1.2" +sphinxemoji = "^0.2.0" +sphinxext-opengraph = "^0.6.3" [tool.poetry.plugins.console_scripts] haggis = "hermes.cli:haggis" @@ -29,6 +55,7 @@ haggis = "hermes.cli:haggis" 000_cff = "hermes.commands.process.cff:add_name" 020_git = "hermes.commands.process.git:flag_authors" + [tool.hermes.harvest] 000_git.enabled = false @@ -74,7 +101,7 @@ level = "DEBUG" handlers = ["terminal"] [tool.hermes.logging.loggers.hermes] -level = "WARNING" +level = "DEBUG" handlers = ["terminal", "logfile"] [tool.hermes.logging.loggers.audit] From 40d4566dd60c65dc6678695ca5c70871acdb4df4 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 30 Nov 2022 08:30:33 +0100 Subject: [PATCH 26/52] Remove CITATION.cff from this branch --- CITATION.cff | 30 ------------------------------ 1 file changed, 30 deletions(-) delete mode 100644 CITATION.cff diff --git a/CITATION.cff b/CITATION.cff deleted file mode 100644 index 900e873f..00000000 --- a/CITATION.cff +++ /dev/null @@ -1,30 +0,0 @@ -# This CITATION.cff file was generated with cffinit. -# Visit https://bit.ly/cffinit to generate yours today! - -cff-version: 1.2.0 -title: HERMES Aggregated Interface Script -message: >- - If you use this software, please cite it using the - metadata from this file. -type: software -authors: - - given-names: Michael - family-names: Meinel - email: michael.meinel@dlr.de - affiliation: German Aerospace Center (DLR) - orcid: 'https://orcid.org/0000-0001-6372-3853' - - given-names: Stephan - family-names: Druskat - email: stephan.druskat@dlr.de - affiliation: German Aerospace Center (DLR) - orcid: 'https://orcid.org/0000-0003-4925-7248' - - given-names: Jeffrey - family-names: Kelling - email: j.kelling@hzdr.de - affiliation: Helmholtz Zentrum Dresden-Rossendorf (HZDR) - orcid: 'https://orcid.org/0000-0003-1761-2591' - - given-names: Oliver - family-names: Bertuch - email: o.bertuch@fz-juelich.de - affiliation: Forschungszentrum Jülich GmbH (FZJ) - orcid: 'https://orcid.org/0000-0002-2702-3419' From beaeed90288162ef5e4f74d318dbc4ea80d076f7 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 30 Nov 2022 08:50:59 +0100 Subject: [PATCH 27/52] Fix a number of typos Co-authored-by: Stephan Druskat --- src/hermes/model/context.py | 2 +- src/hermes/model/merge.py | 2 +- src/hermes/model/path.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index e4094846..61dfce59 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -274,7 +274,7 @@ def get_data(self, data: t.Optional[dict] = None, path: t.Optional['ContextPath' :param data: Optional a target dictionary where the data is stored. If not given, a new one is created. :param path: The path to extract data from. - :param tags: An optional dictionary to collect the tags that belog to the extracted data. + :param tags: An optional dictionary to collect the tags that belong to the extracted data. The full path will be used as key for this dictionary. :return: The extracted data (i.e., the `data` parameter if it was given). """ diff --git a/src/hermes/model/merge.py b/src/hermes/model/merge.py index 4d8f9799..3ee5bc2d 100644 --- a/src/hermes/model/merge.py +++ b/src/hermes/model/merge.py @@ -46,7 +46,7 @@ def __init__(self, **filter): def _check(self, key, filter, value): if key in filter: - check = self.checks.get(key, lambda item, vaue: item in value) + check = self.checks.get(key, lambda item, value: item in value) return check(filter[key], value) return True diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py index f46b4819..42f84a10 100644 --- a/src/hermes/model/path.py +++ b/src/hermes/model/path.py @@ -33,7 +33,7 @@ class ContextPath: """ This class is used to access the different contexts. - On the one hand, the class allows you to define and manage pathes. + On the one hand, the class allows you to define and manage paths. You can simply build them up like follows: >>> path = ContextPath('spam')['eggs'][1]['ham'] @@ -263,7 +263,7 @@ def _set_item(self, target: dict | list, path: 'ContextPath', value: t.Any, **kw def resolve(self, target: list | dict, create: bool = False, query: t.Any = None) -> ('ContextPath', list | dict, 'ContextPath'): """ - Resolve a given path releative to a given target. + Resolve a given path relative to a given target. The method will incrementally try to resolve the entries in the `_target.path`. It stops when the requested item was found or when the resolution could not be completed. From 3fe1014d08f5f87f0009fd863ceef1ceefb0da48 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 11:17:26 +0100 Subject: [PATCH 28/52] Add missing SPDX identifiers --- src/hermes/commands/harvest/cff.py | 4 +--- src/hermes/commands/harvest/codemeta.py | 7 +++++++ src/hermes/commands/harvest/git.py | 8 +++++++- src/hermes/commands/process/cff.py | 6 ++++++ src/hermes/commands/process/git.py | 6 ++++++ src/hermes/config.py | 6 ++++++ src/hermes/model/merge.py | 6 ++++++ 7 files changed, 39 insertions(+), 4 deletions(-) diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index 51c2ea01..360fbfd5 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -1,4 +1,3 @@ -import logging # SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) # # SPDX-License-Identifier: Apache-2.0 @@ -6,9 +5,8 @@ # SPDX-FileContributor: Stephan Druskat # SPDX-FileContributor: Michael Meinel -import collections -import glob import json +import logging import pathlib import urllib.request import typing as t diff --git a/src/hermes/commands/harvest/codemeta.py b/src/hermes/commands/harvest/codemeta.py index ebe49aed..a1aeb44c 100644 --- a/src/hermes/commands/harvest/codemeta.py +++ b/src/hermes/commands/harvest/codemeta.py @@ -1,3 +1,10 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Stephan Druskat +# SPDX-FileContributor: Michael Meinel + import glob import json import pathlib diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index 2c750084..94c3a9f2 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -1,4 +1,10 @@ -import datetime +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Jeffrey Kelling +# SPDX-FileContributor: Michael Meinel + import logging import os import pathlib diff --git a/src/hermes/commands/process/cff.py b/src/hermes/commands/process/cff.py index 1b178c73..76932d8b 100644 --- a/src/hermes/commands/process/cff.py +++ b/src/hermes/commands/process/cff.py @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + import logging from hermes.model.context import HermesHarvestContext, ContextPath, CodeMetaContext diff --git a/src/hermes/commands/process/git.py b/src/hermes/commands/process/git.py index b66ed899..ddb6f1a7 100644 --- a/src/hermes/commands/process/git.py +++ b/src/hermes/commands/process/git.py @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + import logging from hermes.model.context import CodeMetaContext, HermesHarvestContext, ContextPath diff --git a/src/hermes/config.py b/src/hermes/config.py index ffac295e..3ddd63d7 100644 --- a/src/hermes/config.py +++ b/src/hermes/config.py @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + # TODO this file contains only dummy implementations which in most cases will lead to a crash... import logging import toml diff --git a/src/hermes/model/merge.py b/src/hermes/model/merge.py index 3ee5bc2d..9b959c3e 100644 --- a/src/hermes/model/merge.py +++ b/src/hermes/model/merge.py @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + from hermes.model.path import ContextPath From 418dc3d8534bef8f7c1d68de17c5cbfc6508236d Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 11:17:49 +0100 Subject: [PATCH 29/52] Remove dependencies on markdown --- pyproject.toml | 1 - src/hermes/commands/workflow.py | 9 --------- 2 files changed, 10 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9e9dc311..089ab071 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,6 @@ click = "^8.1" jsonschema = "^3.0.0" cffconvert = "^2.0.0" toml = "^0.10.2" -Markdown = "^3.4.1" # Packages for developers [tool.poetry.group.dev.dependencies] diff --git a/src/hermes/commands/workflow.py b/src/hermes/commands/workflow.py index aa833e2c..84d2a688 100644 --- a/src/hermes/commands/workflow.py +++ b/src/hermes/commands/workflow.py @@ -10,7 +10,6 @@ from importlib import metadata import click -import markdown as markdown from hermes.model.context import HermesContext, HermesHarvestContext, CodeMetaContext from hermes.model.errors import MergeError @@ -108,14 +107,6 @@ def process(): logging.shutdown() - with open('hermes-audit.md', 'r') as auditlog_file: - html_data = markdown.markdown(auditlog_file.read(), extensions=['admonition', 'def_list', 'fenced_code']) - - with open('hermes-audit.html', 'w') as html_file: - html_file.write(_HTML_PREFIX) - html_file.write(html_data) - html_file.write('') - @click.group(invoke_without_command=True) def deposit(): From fc2b3c09f1e2c13417747bddfc8ecbd9f962ae72 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 11:24:36 +0100 Subject: [PATCH 30/52] Add more missing SPDX tags --- .mailmap | 10 +++++++--- src/hermes/model/path.py | 6 ++++++ test/hermes_test/commands/harvest/test_codemeta.py | 6 ++++++ 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/.mailmap b/.mailmap index b242681c..fb25ea93 100644 --- a/.mailmap +++ b/.mailmap @@ -1,10 +1,14 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: CC0-1.0 + # Mapping of email addresses only. Format (one pair per line): -# +# # Mapping of user names. Format (one pair per line): -# Real Name nickname -# Real Name Name, Real +# Real Name nickname +# Real Name Name, Real Jeffrey Kelling jkelling diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py index 42f84a10..90a51b0d 100644 --- a/src/hermes/model/path.py +++ b/src/hermes/model/path.py @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + import logging import typing as t diff --git a/test/hermes_test/commands/harvest/test_codemeta.py b/test/hermes_test/commands/harvest/test_codemeta.py index 5ae066cd..0b524159 100644 --- a/test/hermes_test/commands/harvest/test_codemeta.py +++ b/test/hermes_test/commands/harvest/test_codemeta.py @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Stephan Druskat + import pathlib import json From 2cb8cfbb5efeb43419607adfc21098614b7ce915 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 11 Jan 2023 11:25:24 +0100 Subject: [PATCH 31/52] Fix #85: Improve email mapping terminology - Although this file was used initially for dogfooding and isn't part of documentation anymore, this is a relevant meta file for our project as a software project as such, and is therefore kept. - The proposed example strings replace the original strings, and typos are fixed --- .mailmap | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.mailmap b/.mailmap index b242681c..72c6e948 100644 --- a/.mailmap +++ b/.mailmap @@ -1,10 +1,10 @@ # Mapping of email addresses only. Format (one pair per line): -# +# # Mapping of user names. Format (one pair per line): -# Real Name nickname -# Real Name Name, Real +# Real Name nickname +# Real Name Name, Real Jeffrey Kelling jkelling From 43ac2aa7f53a356e7b923c0d6362bcffa256ad87 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 11:26:51 +0100 Subject: [PATCH 32/52] Add missing import for type spec --- src/hermes/model/errors.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/hermes/model/errors.py b/src/hermes/model/errors.py index 61f47b25..24c3ad64 100644 --- a/src/hermes/model/errors.py +++ b/src/hermes/model/errors.py @@ -6,6 +6,8 @@ import typing as t +from hermes.model import path as path_model + class HermesValidationError(Exception): """ @@ -28,7 +30,7 @@ class MergeError(Exception): """ This exception should be raised when there is an error during a merge / set operation. """ - def __init__(self, path: 'ContextPath', old_Value: t.Any, new_value: t.Any, **kwargs): + def __init__(self, path: path_model.ContextPath, old_Value: t.Any, new_value: t.Any, **kwargs): """ Create a new merge incident. From 134db0961f9c0d8e4db0167ccf6fc1289610ed74 Mon Sep 17 00:00:00 2001 From: David Pape Date: Wed, 11 Jan 2023 11:28:46 +0100 Subject: [PATCH 33/52] =?UTF-8?q?Rename=20role=20of=20git=20contributors?= =?UTF-8?q?=20Other=20=E2=86=92=20Contributor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/hermes/commands/process/git.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/hermes/commands/process/git.py b/src/hermes/commands/process/git.py index b66ed899..56dce92f 100644 --- a/src/hermes/commands/process/git.py +++ b/src/hermes/commands/process/git.py @@ -8,7 +8,7 @@ def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): """ - Identify all authors that are not yet in the target context and flag them with role `Other`. + Identify all authors that are not yet in the target context and flag them with role `Contributor`. :param ctx: The target context containting harmonized data. :param harverst_ctx: Data as it was harvested. @@ -30,7 +30,7 @@ def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): author_key, target, path = author_path['*'].resolve(ctx._data, query=query) if author_key._item == '*': - contributor['projectRole'] = 'Others' + contributor['projectRole'] = 'Contributor' audit_log.debug('- %s', contributor['name']) ctx.update(author_key, contributor, tags=tags) From f21b5be0383e843d854c83176dd1e155addb918a Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 11:31:04 +0100 Subject: [PATCH 34/52] Update poetry.lock --- poetry.lock | 428 +++++++++++++++++++++++++++------------------------- 1 file changed, 223 insertions(+), 205 deletions(-) diff --git a/poetry.lock b/poetry.lock index 86ca8ed3..9753da5b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -8,7 +8,7 @@ python-versions = "*" [[package]] name = "astroid" -version = "2.12.13" +version = "2.13.2" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -16,6 +16,7 @@ python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" +typing-extensions = ">=4.0.0" wrapt = [ {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, @@ -23,20 +24,21 @@ wrapt = [ [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] -name = "Babel" +name = "babel" version = "2.11.0" description = "Internationalization utilities" category = "dev" @@ -73,7 +75,7 @@ python-versions = "*" chardet = ">=3.0.2" [[package]] -name = "boolean.py" +name = "boolean-py" version = "4.0" description = "Define boolean algebras, create and parse boolean expressions and create custom boolean DSL." category = "dev" @@ -82,7 +84,7 @@ python-versions = "*" [[package]] name = "certifi" -version = "2022.9.24" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -104,17 +106,17 @@ requests = ">=2.20,<3" "ruamel.yaml" = ">=0.16.0" [package.extras] -dev = ["isort", "prospector[with_pyroma] (>=1.4)", "pytest (>=6)", "pytest-cov"] +dev = ["isort", "prospector[with-pyroma] (>=1.4)", "pytest (>=6)", "pytest-cov"] gcloud = ["flask"] publishing = ["twine", "wheel"] [[package]] name = "chardet" -version = "5.0.0" +version = "5.1.0" description = "Universal encoding detector for Python 3" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "charset-normalizer" @@ -125,7 +127,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "click" @@ -148,7 +150,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 [[package]] name = "coverage" -version = "6.5.0" +version = "7.0.5" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -161,7 +163,7 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 toml = ["tomli"] [[package]] -name = "Deprecated" +name = "deprecated" version = "1.2.13" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." category = "dev" @@ -192,7 +194,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false @@ -232,14 +234,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [[package]] -name = "Jinja2" +name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "dev" @@ -268,11 +270,11 @@ six = ">=1.11.0" [package.extras] format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format_nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] [[package]] name = "lazy-object-proxy" -version = "1.8.0" +version = "1.9.0" description = "A fast and thorough lazy object proxy." category = "dev" optional = false @@ -318,7 +320,7 @@ mdurl = ">=0.1,<1.0" [package.extras] benchmarking = ["psutil", "pytest", "pytest-benchmark (>=3.2,<4.0)"] -code_style = ["pre-commit (==2.6)"] +code-style = ["pre-commit (==2.6)"] compare = ["commonmark (>=0.9.1,<0.10.0)", "markdown (>=3.3.6,<3.4.0)", "mistletoe (>=0.8.1,<0.9.0)", "mistune (>=2.0.2,<2.1.0)", "panflute (>=2.1.3,<2.2.0)"] linkify = ["linkify-it-py (>=1.0,<2.0)"] plugins = ["mdit-py-plugins"] @@ -327,7 +329,7 @@ rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx- testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] -name = "MarkupSafe" +name = "markupsafe" version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" @@ -344,7 +346,7 @@ python-versions = ">=3.6" [[package]] name = "mdit-py-plugins" -version = "0.3.1" +version = "0.3.3" description = "Collection of plugins for markdown-it-py" category = "dev" optional = false @@ -354,7 +356,7 @@ python-versions = ">=3.7" markdown-it-py = ">=1.0.0,<3.0.0" [package.extras] -code_style = ["pre-commit"] +code-style = ["pre-commit"] rtd = ["attrs", "myst-parser (>=0.16.1,<0.17.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] @@ -392,21 +394,18 @@ sphinx = ">=4,<6" typing-extensions = "*" [package.extras] -code_style = ["pre-commit (>=2.12,<3.0)"] +code-style = ["pre-commit (>=2.12,<3.0)"] linkify = ["linkify-it-py (>=1.0,<2.0)"] rtd = ["ipython", "sphinx-book-theme", "sphinx-design", "sphinxcontrib.mermaid (>=0.7.1,<0.8.0)", "sphinxext-opengraph (>=0.6.3,<0.7.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=6,<7)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx (<5.2)", "sphinx-pytest"] [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" [[package]] name = "pluggy" @@ -468,8 +467,8 @@ optional = false python-versions = ">=3.6" [[package]] -name = "Pygments" -version = "2.13.0" +name = "pygments" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false @@ -491,20 +490,9 @@ docopt = ">=0.6.2" python-dateutil = ">=2.8.0" "ruamel.yaml" = ">=0.16.0" -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "dev" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - [[package]] name = "pyrsistent" -version = "0.19.2" +version = "0.19.3" description = "Persistent/Functional/Immutable data structures" category = "main" optional = false @@ -569,14 +557,14 @@ chardet = "*" [[package]] name = "pytz" -version = "2022.6" +version = "2022.7" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" [[package]] -name = "PyYAML" +name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" category = "dev" @@ -599,27 +587,26 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "reuse" -version = "1.0.0" +version = "1.1.0" description = "reuse is a tool for compliance with the REUSE recommendations." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.2,<4.0.0" [package.dependencies] -binaryornot = "*" -"boolean.py" = "*" -Jinja2 = "*" -license-expression = "*" -python-debian = "*" -requests = "*" +binaryornot = ">=0.4.4,<0.5.0" +"boolean.py" = ">=3.8" +Jinja2 = ">=3.0.0,<4.0.0" +license-expression = ">=1.0" +python-debian = ">=0.1.38,<0.1.45 || >0.1.45,<0.1.46 || >0.1.46,<0.1.47 || >0.1.47,<0.2.0" setuptools = "*" [[package]] -name = "ruamel.yaml" +name = "ruamel-yaml" version = "0.17.21" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" category = "main" @@ -634,7 +621,7 @@ docs = ["ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] -name = "ruamel.yaml.clib" +name = "ruamel-yaml-clib" version = "0.2.7" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" category = "main" @@ -643,7 +630,7 @@ python-versions = ">=3.5" [[package]] name = "setuptools" -version = "65.6.0" +version = "65.6.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false @@ -679,7 +666,7 @@ optional = false python-versions = ">=3.6" [[package]] -name = "Sphinx" +name = "sphinx" version = "4.5.0" description = "Python documentation generator" category = "dev" @@ -759,7 +746,7 @@ pyyaml = "*" sphinx = ">=3,<5" [package.extras] -code_style = ["pre-commit (>=2.7.0,<2.8.0)"] +code-style = ["pre-commit (>=2.7.0,<2.8.0)"] doc = ["ablog (>=0.10.13,<0.11.0)", "folium", "ipywidgets", "matplotlib", "myst-nb (>=0.13.2,<0.14.0)", "nbclient", "numpy", "numpydoc", "pandas", "plotly", "sphinx (>=4.0,<5.0)", "sphinx-copybutton", "sphinx-design", "sphinx-examples", "sphinx-tabs", "sphinx-thebe (>=0.1.1)", "sphinx-togglebutton (>=0.2.1)", "sphinxcontrib-bibtex (>=2.2,<3.0)", "sphinxcontrib-youtube", "sphinxext-opengraph"] test = ["beautifulsoup4 (>=4.6.1,<5)", "coverage", "myst-nb (>=0.13.2,<0.14.0)", "pytest (>=6.0.1,<6.1.0)", "pytest-cov", "pytest-regressions (>=2.0.1,<2.1.0)", "sphinx_thebe"] @@ -795,11 +782,11 @@ test = ["coverage", "pytest"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +version = "1.0.3" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -923,6 +910,14 @@ mslex = {version = ">=0.3.0,<0.4.0", markers = "sys_platform == \"win32\""} psutil = ">=5.7.2,<6.0.0" tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""} +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + [[package]] name = "tomli" version = "2.0.1" @@ -948,7 +943,7 @@ optional = false python-versions = ">=3.7" [[package]] -name = "Unidecode" +name = "unidecode" version = "1.3.6" description = "ASCII transliterations of Unicode text" category = "dev" @@ -957,11 +952,11 @@ python-versions = ">=3.5" [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.13" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] @@ -979,7 +974,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "a563183a806a39f7ecb6f4bedff46347ab7bb6ce69989bee6f506f06b75536cf" +content-hash = "f7532a1d95981f6e73e6d389e8ff96e6b5ade455d9ebf7660895682108499c6a" [metadata.files] alabaster = [ @@ -987,14 +982,14 @@ alabaster = [ {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, ] astroid = [ - {file = "astroid-2.12.13-py3-none-any.whl", hash = "sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907"}, - {file = "astroid-2.12.13.tar.gz", hash = "sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7"}, + {file = "astroid-2.13.2-py3-none-any.whl", hash = "sha256:8f6a8d40c4ad161d6fc419545ae4b2f275ed86d1c989c97825772120842ee0d2"}, + {file = "astroid-2.13.2.tar.gz", hash = "sha256:3bc7834720e1a24ca797fd785d77efb14f7a28ee8e635ef040b6e2d80ccb3303"}, ] attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] -Babel = [ +babel = [ {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, ] @@ -1006,21 +1001,21 @@ binaryornot = [ {file = "binaryornot-0.4.4-py2.py3-none-any.whl", hash = "sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4"}, {file = "binaryornot-0.4.4.tar.gz", hash = "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061"}, ] -"boolean.py" = [ +boolean-py = [ {file = "boolean.py-4.0-py3-none-any.whl", hash = "sha256:2876f2051d7d6394a531d82dc6eb407faa0b01a0a0b3083817ccd7323b8d96bd"}, {file = "boolean.py-4.0.tar.gz", hash = "sha256:17b9a181630e43dde1851d42bef546d616d5d9b4480357514597e78b203d06e4"}, ] certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] cffconvert = [ {file = "cffconvert-2.0.0-py3-none-any.whl", hash = "sha256:573c825e4e16173d99396dc956bd22ff5d4f84215cc16b6ab05299124f5373bb"}, {file = "cffconvert-2.0.0.tar.gz", hash = "sha256:b4379ee415c6637dc9e3e7ba196605cb3cedcea24613e4ea242c607d9e98eb50"}, ] chardet = [ - {file = "chardet-5.0.0-py3-none-any.whl", hash = "sha256:d3e64f022d254183001eccc5db4040520c0f23b1a3f33d6413e099eb7f126557"}, - {file = "chardet-5.0.0.tar.gz", hash = "sha256:0368df2bfd78b5fc20572bb4e9bb7fb53e2c094f60ae9993339e8671d0afb8aa"}, + {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"}, + {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"}, ] charset-normalizer = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, @@ -1035,58 +1030,59 @@ colorama = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] -Deprecated = [ + {file = "coverage-7.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a7f23bbaeb2a87f90f607730b45564076d870f1fb07b9318d0c21f36871932b"}, + {file = "coverage-7.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c18d47f314b950dbf24a41787ced1474e01ca816011925976d90a88b27c22b89"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef14d75d86f104f03dea66c13188487151760ef25dd6b2dbd541885185f05f40"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66e50680e888840c0995f2ad766e726ce71ca682e3c5f4eee82272c7671d38a2"}, + {file = "coverage-7.0.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9fed35ca8c6e946e877893bbac022e8563b94404a605af1d1e6accc7eb73289"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d8d04e755934195bdc1db45ba9e040b8d20d046d04d6d77e71b3b34a8cc002d0"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e109f1c9a3ece676597831874126555997c48f62bddbcace6ed17be3e372de8"}, + {file = "coverage-7.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0a1890fca2962c4f1ad16551d660b46ea77291fba2cc21c024cd527b9d9c8809"}, + {file = "coverage-7.0.5-cp310-cp310-win32.whl", hash = "sha256:be9fcf32c010da0ba40bf4ee01889d6c737658f4ddff160bd7eb9cac8f094b21"}, + {file = "coverage-7.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:cbfcba14a3225b055a28b3199c3d81cd0ab37d2353ffd7f6fd64844cebab31ad"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30b5fec1d34cc932c1bc04017b538ce16bf84e239378b8f75220478645d11fca"}, + {file = "coverage-7.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1caed2367b32cc80a2b7f58a9f46658218a19c6cfe5bc234021966dc3daa01f0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d254666d29540a72d17cc0175746cfb03d5123db33e67d1020e42dae611dc196"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19245c249aa711d954623d94f23cc94c0fd65865661f20b7781210cb97c471c0"}, + {file = "coverage-7.0.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b05ed4b35bf6ee790832f68932baf1f00caa32283d66cc4d455c9e9d115aafc"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:29de916ba1099ba2aab76aca101580006adfac5646de9b7c010a0f13867cba45"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e057e74e53db78122a3979f908973e171909a58ac20df05c33998d52e6d35757"}, + {file = "coverage-7.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:411d4ff9d041be08fdfc02adf62e89c735b9468f6d8f6427f8a14b6bb0a85095"}, + {file = "coverage-7.0.5-cp311-cp311-win32.whl", hash = "sha256:52ab14b9e09ce052237dfe12d6892dd39b0401690856bcfe75d5baba4bfe2831"}, + {file = "coverage-7.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:1f66862d3a41674ebd8d1a7b6f5387fe5ce353f8719040a986551a545d7d83ea"}, + {file = "coverage-7.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b69522b168a6b64edf0c33ba53eac491c0a8f5cc94fa4337f9c6f4c8f2f5296c"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436e103950d05b7d7f55e39beeb4d5be298ca3e119e0589c0227e6d0b01ee8c7"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c56bec53d6e3154eaff6ea941226e7bd7cc0d99f9b3756c2520fc7a94e6d96"}, + {file = "coverage-7.0.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a38362528a9115a4e276e65eeabf67dcfaf57698e17ae388599568a78dcb029"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f67472c09a0c7486e27f3275f617c964d25e35727af952869dd496b9b5b7f6a3"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:220e3fa77d14c8a507b2d951e463b57a1f7810a6443a26f9b7591ef39047b1b2"}, + {file = "coverage-7.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ecb0f73954892f98611e183f50acdc9e21a4653f294dfbe079da73c6378a6f47"}, + {file = "coverage-7.0.5-cp37-cp37m-win32.whl", hash = "sha256:d8f3e2e0a1d6777e58e834fd5a04657f66affa615dae61dd67c35d1568c38882"}, + {file = "coverage-7.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9e662e6fc4f513b79da5d10a23edd2b87685815b337b1a30cd11307a6679148d"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:790e4433962c9f454e213b21b0fd4b42310ade9c077e8edcb5113db0818450cb"}, + {file = "coverage-7.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49640bda9bda35b057b0e65b7c43ba706fa2335c9a9896652aebe0fa399e80e6"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d66187792bfe56f8c18ba986a0e4ae44856b1c645336bd2c776e3386da91e1dd"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:276f4cd0001cd83b00817c8db76730938b1ee40f4993b6a905f40a7278103b3a"}, + {file = "coverage-7.0.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95304068686545aa368b35dfda1cdfbbdbe2f6fe43de4a2e9baa8ebd71be46e2"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:17e01dd8666c445025c29684d4aabf5a90dc6ef1ab25328aa52bedaa95b65ad7"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea76dbcad0b7b0deb265d8c36e0801abcddf6cc1395940a24e3595288b405ca0"}, + {file = "coverage-7.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50a6adc2be8edd7ee67d1abc3cd20678987c7b9d79cd265de55941e3d0d56499"}, + {file = "coverage-7.0.5-cp38-cp38-win32.whl", hash = "sha256:e4ce984133b888cc3a46867c8b4372c7dee9cee300335e2925e197bcd45b9e16"}, + {file = "coverage-7.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4a950f83fd3f9bca23b77442f3a2b2ea4ac900944d8af9993743774c4fdc57af"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c2155943896ac78b9b0fd910fb381186d0c345911f5333ee46ac44c8f0e43ab"}, + {file = "coverage-7.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:54f7e9705e14b2c9f6abdeb127c390f679f6dbe64ba732788d3015f7f76ef637"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee30375b409d9a7ea0f30c50645d436b6f5dfee254edffd27e45a980ad2c7f4"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b78729038abea6a5df0d2708dce21e82073463b2d79d10884d7d591e0f385ded"}, + {file = "coverage-7.0.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13250b1f0bd023e0c9f11838bdeb60214dd5b6aaf8e8d2f110c7e232a1bff83b"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c407b1950b2d2ffa091f4e225ca19a66a9bd81222f27c56bd12658fc5ca1209"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c76a3075e96b9c9ff00df8b5f7f560f5634dffd1658bafb79eb2682867e94f78"}, + {file = "coverage-7.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f26648e1b3b03b6022b48a9b910d0ae209e2d51f50441db5dce5b530fad6d9b1"}, + {file = "coverage-7.0.5-cp39-cp39-win32.whl", hash = "sha256:ba3027deb7abf02859aca49c865ece538aee56dcb4871b4cced23ba4d5088904"}, + {file = "coverage-7.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:949844af60ee96a376aac1ded2a27e134b8c8d35cc006a52903fc06c24a3296f"}, + {file = "coverage-7.0.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:b9727ac4f5cf2cbf87880a63870b5b9730a8ae3a4a360241a0fdaa2f71240ff0"}, + {file = "coverage-7.0.5.tar.gz", hash = "sha256:051afcbd6d2ac39298d62d340f94dbb6a1f31de06dfaf6fcef7b759dd3860c45"}, +] +deprecated = [ {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, ] @@ -1098,8 +1094,8 @@ docutils = [ {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, ] flake8 = [ {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, @@ -1114,10 +1110,10 @@ imagesize = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -Jinja2 = [ +jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] @@ -1126,25 +1122,42 @@ jsonschema = [ {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, ] lazy-object-proxy = [ - {file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0"}, - {file = "lazy_object_proxy-1.8.0-pp37-pypy37_pp73-any.whl", hash = "sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891"}, - {file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"}, - {file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"}, + {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] license-expression = [ {file = "license-expression-30.0.0.tar.gz", hash = "sha256:ad638292aa8493f84354909b517922cb823582c2ce2c4d880e42544a86bea8dd"}, @@ -1157,7 +1170,7 @@ markdown-it-py = [ {file = "markdown-it-py-2.1.0.tar.gz", hash = "sha256:cf7e59fed14b5ae17c0006eff14a2d9a00ed5f3a846148153899a0224e2c07da"}, {file = "markdown_it_py-2.1.0-py3-none-any.whl", hash = "sha256:93de681e5c021a432c63147656fe21790bc01231e0cd2da73626f1aa3ac0fe27"}, ] -MarkupSafe = [ +markupsafe = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, @@ -1204,8 +1217,8 @@ mccabe = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] mdit-py-plugins = [ - {file = "mdit-py-plugins-0.3.1.tar.gz", hash = "sha256:3fc13298497d6e04fe96efdd41281bfe7622152f9caa1815ea99b5c893de9441"}, - {file = "mdit_py_plugins-0.3.1-py3-none-any.whl", hash = "sha256:606a7f29cf56dbdfaf914acb21709b8f8ee29d857e8f29dcc33d8cb84c57bfa1"}, + {file = "mdit-py-plugins-0.3.3.tar.gz", hash = "sha256:5cfd7e7ac582a594e23ba6546a2f406e94e42eb33ae596d0734781261c251260"}, + {file = "mdit_py_plugins-0.3.3-py3-none-any.whl", hash = "sha256:36d08a29def19ec43acdcd8ba471d3ebab132e7879d442760d963f19913e04b9"}, ] mdurl = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, @@ -1220,8 +1233,8 @@ myst-parser = [ {file = "myst_parser-0.18.1-py3-none-any.whl", hash = "sha256:61b275b85d9f58aa327f370913ae1bec26ebad372cc99f3ab85c8ec3ee8d9fb8"}, ] packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, @@ -1255,41 +1268,42 @@ pyflakes = [ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] -Pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +pygments = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] pykwalify = [ {file = "pykwalify-1.8.0-py2.py3-none-any.whl", hash = "sha256:731dfa87338cca9f559d1fca2bdea37299116e3139b73f78ca90a543722d6651"}, {file = "pykwalify-1.8.0.tar.gz", hash = "sha256:796b2ad3ed4cb99b88308b533fb2f559c30fa6efb4fa9fda11347f483d245884"}, ] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] pyrsistent = [ - {file = "pyrsistent-0.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a"}, - {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win32.whl", hash = "sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41"}, - {file = "pyrsistent-0.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win32.whl", hash = "sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2"}, - {file = "pyrsistent-0.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73"}, - {file = "pyrsistent-0.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584"}, - {file = "pyrsistent-0.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win32.whl", hash = "sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a"}, - {file = "pyrsistent-0.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab"}, - {file = "pyrsistent-0.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95"}, - {file = "pyrsistent-0.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win32.whl", hash = "sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b"}, - {file = "pyrsistent-0.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291"}, - {file = "pyrsistent-0.19.2-py3-none-any.whl", hash = "sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0"}, - {file = "pyrsistent-0.19.2.tar.gz", hash = "sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2"}, + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, ] pytest = [ {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, @@ -1308,10 +1322,10 @@ python-debian = [ {file = "python_debian-0.1.49-py3-none-any.whl", hash = "sha256:880f3bc52e31599f2a9b432bd7691844286825087fccdcf2f6ffd5cd79a26f9f"}, ] pytz = [ - {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, - {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, + {file = "pytz-2022.7-py2.py3-none-any.whl", hash = "sha256:93007def75ae22f7cd991c84e02d434876818661f8df9ad5df9e950ff4e52cfd"}, + {file = "pytz-2022.7.tar.gz", hash = "sha256:7ccfae7b4b2c067464a6733c6261673fdb8fd1be905460396b97a073e9fa683a"}, ] -PyYAML = [ +pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -1358,14 +1372,14 @@ requests = [ {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] reuse = [ - {file = "reuse-1.0.0-py3-none-any.whl", hash = "sha256:e2605e796311c424465d741ea2a1e1ad03bbb90b921d74750119c331ca5af46e"}, - {file = "reuse-1.0.0.tar.gz", hash = "sha256:db3022be2d87f69c8f508b928023de3026f454ce17d01e22f770f7147ac1e8d4"}, + {file = "reuse-1.1.0-cp311-cp311-manylinux_2_36_x86_64.whl", hash = "sha256:b0f3fb9091ff513af04b555d14a4c529ab05f6a575ab192dd9b68244f1e0721d"}, + {file = "reuse-1.1.0.tar.gz", hash = "sha256:7a054f6e372ad02d0b1b07368030fc38746b50ed45f5422a81994e7a88b52f1f"}, ] -"ruamel.yaml" = [ +ruamel-yaml = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] -"ruamel.yaml.clib" = [ +ruamel-yaml-clib = [ {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, @@ -1401,8 +1415,8 @@ reuse = [ {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, ] setuptools = [ - {file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"}, - {file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1416,7 +1430,7 @@ soupsieve = [ {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, ] -Sphinx = [ +sphinx = [ {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, ] @@ -1440,8 +1454,8 @@ sphinx-icon = [ {file = "sphinx-icon-0.1.2.tar.gz", hash = "sha256:e4adc9922e2e2b19f97813a3994d5e6ccd01e9a21ae73b755f7114ac4247fdf5"}, ] sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, + {file = "sphinxcontrib.applehelp-1.0.3-py3-none-any.whl", hash = "sha256:ba0f2a22e6eeada8da6428d0d520215ee8864253f32facf958cca81e426f661d"}, + {file = "sphinxcontrib.applehelp-1.0.3.tar.gz", hash = "sha256:83749f09f6ac843b8cb685277dbc818a8bf2d76cc19602699094fe9a74db529e"}, ] sphinxcontrib-contentui = [ {file = "sphinxcontrib_contentui-0.2.5-py3-none-any.whl", hash = "sha256:a01c7a0cfe360c99692999d3286b6a4d93ebfc94d0eff2619622fd5e6086ab36"}, @@ -1481,6 +1495,10 @@ taskipy = [ {file = "taskipy-1.10.3-py3-none-any.whl", hash = "sha256:4c0070ca53868d97989f7ab5c6f237525d52ee184f9b967576e8fe427ed9d0b8"}, {file = "taskipy-1.10.3.tar.gz", hash = "sha256:112beaf21e3d5569950b99162a1de003fa885fabee9e450757a6b874be914877"}, ] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -1502,13 +1520,13 @@ typing-extensions = [ {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] -Unidecode = [ +unidecode = [ {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"}, {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"}, ] urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, + {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, + {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, ] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, From a18cf399eebac5c90129fc9ea7306bb705cb2b7c Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 13:13:29 +0100 Subject: [PATCH 35/52] Fix flake8 errors - Max complexity was increased to 15 for now --- .github/workflows/tests.yml | 2 +- src/hermes/commands/harvest/cff.py | 3 +- src/hermes/commands/harvest/git.py | 32 ++++++++++--------- src/hermes/commands/process/cff.py | 3 +- src/hermes/model/context.py | 10 ++++-- src/hermes/model/merge.py | 16 +++++----- src/hermes/model/path.py | 19 ++++++----- test/hermes_test/commands/harvest/test_cff.py | 1 - 8 files changed, 49 insertions(+), 37 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cc536d2a..5f6235fd 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -33,7 +33,7 @@ jobs: # stop the build if there are Python syntax errors or undefined names flake8 ./test/ ./src/ --count --select=E9,F63,F7,F82 --show-source --statistics # Stop build on errors - flake8 ./test/ ./src/ --count --max-complexity=10 --max-line-length=120 --statistics + flake8 ./test/ ./src/ --count --max-complexity=15 --max-line-length=120 --statistics - name: Test with pytest run: | pip install -e . diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index 360fbfd5..d5aa692d 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -98,7 +98,8 @@ def _validate(cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: audit_log.info('') audit_log.info('See the Citation File Format schema guide for further details:') - audit_log.info(f'.') + audit_log.info( + f'.') return False elif len(errors) == 0: diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index 94c3a9f2..33f4d7a6 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -14,7 +14,7 @@ import subprocess import shutil -from hermes.model.context import HermesHarvestContext, ContextPath +from hermes.model.context import HermesHarvestContext _log = logging.getLogger('harvest.git') @@ -53,8 +53,10 @@ def __init__(self, name: str | t.List[str], email: str | t.List[str], ts: str | def __str__(self): parts = [] - if self.name: parts.append(self.name[0]) - if self.email: parts.append(f'<{self.email[0]}>') + if self.name: + parts.append(self.name[0]) + if self.email: + parts.append(f'<{self.email[0]}>') return f'"{" ".join(parts)}"' def _update_attr(self, target, value, unique=True): @@ -215,22 +217,20 @@ def update(self, **kwargs): def _audit_authors(authors, audit_log: logging.Logger): # Collect all authors that have ambiguous data - unmapped_authors = [] - for author in authors._all: - if len(author.email) > 1 or len(author.name) > 1: - unmapped_authors.append(author) + unmapped_authors = [a for a in authors._all if len(a.email) > 1 or len(a.name) > 1] if unmapped_authors: # Report to the audit about our findings audit_log.warning('!!! warning "You have unmapped authors in your Git history."') for author in unmapped_authors: if len(author.email) > 1: - audit_log.info(f" - %s has alternate email: %s", str(author), ', '.join(author.email[1:])) + audit_log.info(" - %s has alternate email: %s", str(author), ', '.join(author.email[1:])) if len(author.name) > 1: - audit_log.info(f" - %s has alternate names: %s", str(author), ', '.join(author.name[1:])) + audit_log.info(" - %s has alternate names: %s", str(author), ', '.join(author.name[1:])) audit_log.warning('') - audit_log.info("Please consider adding a `.maillog` file to your repository to disambiguate these contributors.") + audit_log.info( + "Please consider adding a `.maillog` file to your repository to disambiguate these contributors.") audit_log.info('') audit_log.info('``` .mailmap') @@ -300,18 +300,20 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): p = subprocess.run([git_exe, "rev-parse", "--abbrev-ref", "HEAD"], capture_output=True) if p.returncode: - raise RuntimeError("`git branch` command failed with code {}: '{}'!".format(p.returncode, p.stderr.decode(SHELL_ENCODING))) + raise RuntimeError(f"`git branch` command failed with code {p.returncode}: " + f"'{p.stderr.decode(SHELL_ENCODING)}'!") git_branch = p.stdout.decode(SHELL_ENCODING).strip() # TODO: should we warn or error if the HEAD is detached? p = subprocess.run([git_exe, "log", f"--pretty={_GIT_SEP.join(_GIT_FORMAT)}"] + _GIT_ARGS, capture_output=True) if p.returncode: - raise RuntimeError("`git log` command failed with code {}: '{}'!".format(p.returncode, p.stderr.decode(SHELL_ENCODING))) + raise RuntimeError(f"`git log` command failed with code {p.returncode}: " + f"'{p.stderr.decode(SHELL_ENCODING)}'!") log = p.stdout.decode(SHELL_ENCODING).split('\n') - for l in log: + for line in log: try: - name, email, ts = l.split(_GIT_SEP) + name, email, ts = line.split(_GIT_SEP) except ValueError: continue @@ -331,7 +333,7 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): try: ctx.get_data() - except ValueError as e: + except ValueError: audit_log.error('!!! warning "Inconsistent data"') audit_log.info(' The data collected from git is ambiguous.') audit_log.info(' Consider deleting `%s` to avoid problems.', ctx.hermes_dir) diff --git a/src/hermes/commands/process/cff.py b/src/hermes/commands/process/cff.py index 76932d8b..cfba41e0 100644 --- a/src/hermes/commands/process/cff.py +++ b/src/hermes/commands/process/cff.py @@ -27,5 +27,6 @@ def add_name(ctx: CodeMetaContext, harvest_ctx: HermesHarvestContext): for i, author in enumerate(data.get('author', [])): if 'name' not in author: - harvest_ctx.update(str(author_path[i]["name"]), f"{author['givenName']} {author['familyName']}", stage='preprocess') + harvest_ctx.update(str(author_path[i]["name"]), f"{author['givenName']} {author['familyName']}", + stage='preprocess') audit_log.debug(f"- {author['givenName']} {author['familyName']}") diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index 5cef0480..bd82e03d 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -94,7 +94,10 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): pass - def get_data(self, data: t.Optional[dict] = None, path: t.Optional['ContextPath'] = None, tags: t.Optional[dict] = None) -> dict: + def get_data(self, + data: t.Optional[dict] = None, + path: t.Optional['ContextPath'] = None, + tags: t.Optional[dict] = None) -> dict: if data is None: data = {} if path is not None: @@ -271,7 +274,10 @@ def _check_values(self, path, values): raise ValueError(f'{path}') return value, tag - def get_data(self, data: t.Optional[dict] = None, path: t.Optional['ContextPath'] = None, tags: t.Optional[dict] = None) -> dict: + def get_data(self, + data: t.Optional[dict] = None, + path: t.Optional['ContextPath'] = None, + tags: t.Optional[dict] = None) -> dict: """ Retrieve the data from a given path. diff --git a/src/hermes/model/merge.py b/src/hermes/model/merge.py index 9b959c3e..578dc477 100644 --- a/src/hermes/model/merge.py +++ b/src/hermes/model/merge.py @@ -80,8 +80,8 @@ def __call__(self, target, path, value, **kwargs): match target, path._item: case list(), int() as index if index < len(target): match target[index]: - case dict() as t: t.update(value) - case list() as l: l[:] = value + case dict() as item: item.update(value) + case list() as item: item[:] = value case _: target[index] = value case list(), '*': @@ -98,8 +98,8 @@ def __call__(self, target, path, value, **kwargs): case dict(), str() as key if key in target: match target[key]: - case dict() as t: t.update(value) - case list() as l: l[:] = value + case dict() as item: item.update(value) + case list() as item: item[:] = value case _: target[key] = value case dict(), str() as key: @@ -129,8 +129,8 @@ def __call__(self, target, path, value, **kwargs): match target, path._item: case dict(), str() as key if key in target: match target[key]: - case dict() as t: t.update(value) - case list() as l: l[:] = value + case dict() as item: item.update(value) + case list() as item: item[:] = value case _: target[key] = value case dict(), str() as key: @@ -141,8 +141,8 @@ def __call__(self, target, path, value, **kwargs): case list(), int() as index if index < len(target): match target[index]: - case dict() as t: t.update(value) - case list() as l: l[:] = value + case dict() as item: item.update(value) + case list() as item: item[:] = value case _: target[index] = value case list(), '*': diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py index 90a51b0d..db11190e 100644 --- a/src/hermes/model/path.py +++ b/src/hermes/model/path.py @@ -220,9 +220,9 @@ def _find_setter(self, target: dict | list, path: 'ContextPath', value: t.Any = case list(): filter['type'] = 'list' case dict(): filter['type'] = 'map' elif path._type is list: - filter['type'] = 'list' + filter['type'] = 'list' elif path._type is dict: - filter['type'] = 'map' + filter['type'] = 'map' if ep := kwargs.get('ep', None): filter['ep'] = ep @@ -237,14 +237,14 @@ def _set_item(self, target: dict | list, path: 'ContextPath', value: t.Any, **kw match target, path._item: case list(), int() as index if index < len(target): match target[index]: - case dict() as t: t.update(value) - case list() as l: l[:] = value + case dict() as item: item.update(value) + case list() as item: item[:] = value case _: target[index] = value case dict(), str() as key if key in target: match target[key]: - case dict() as t: t.update(value) - case list() as l: l[:] = value + case dict() as item: item.update(value) + case list() as item: item[:] = value case _: target[key] = value case dict(), str() as key: @@ -267,7 +267,10 @@ def _set_item(self, target: dict | list, path: 'ContextPath', value: t.Any, **kw return value - def resolve(self, target: list | dict, create: bool = False, query: t.Any = None) -> ('ContextPath', list | dict, 'ContextPath'): + def resolve(self, + target: list | dict, + create: bool = False, + query: t.Any = None) -> ('ContextPath', list | dict, 'ContextPath'): """ Resolve a given path relative to a given target. @@ -349,7 +352,7 @@ def update(self, target: t.Dict[str, t.Any] | t.List, value: t.Any, tags: t.Opti prefix.set_item(_target, tail, value, **kwargs) if tags is not None and kwargs: tags[str(self)] = kwargs - except (KeyError, IndexError, TypeError, ValueError) as e: + except (KeyError, IndexError, TypeError, ValueError): raise errors.MergeError(self, _target, value, **kwargs) @classmethod diff --git a/test/hermes_test/commands/harvest/test_cff.py b/test/hermes_test/commands/harvest/test_cff.py index 06ba5a1f..3837fedd 100644 --- a/test/hermes_test/commands/harvest/test_cff.py +++ b/test/hermes_test/commands/harvest/test_cff.py @@ -6,7 +6,6 @@ # SPDX-FileContributor: Michael Meinel import pathlib -from collections import deque import json from ruamel.yaml import YAML From a8de56da53e732b4925c279c8fdc49deca8e5e4b Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 11 Jan 2023 13:22:29 +0100 Subject: [PATCH 36/52] Remove hint logger from configuration --- pyproject.toml | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 089ab071..702eb5df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,12 +109,6 @@ level = "DEBUG" filename = "hermes-audit.md" mode = "w" -[tool.hermes.logging.handlers.hintfile] -class = "logging.FileHandler" -formatter = "plain" -level = "DEBUG" -filename = "quickfix.sh" - [tool.hermes.logging.loggers.cli] level = "DEBUG" handlers = ["terminal"] @@ -127,11 +121,6 @@ handlers = ["terminal", "logfile"] level = "DEBUG" handlers = ["terminal", "auditfile"] -[tool.hermes.logging.loggers.audit.hint] -level = "DEBUG" -propagate = false -handlers = ["terminal", "hintfile"] - [tool.taskipy.tasks] docs-build = "poetry run sphinx-build -M html docs/source docs/build -W" docs-clean = "poetry run sphinx-build -M clean docs/source docs/build" From 0aa1609e49fa2539ba469b48dd1fda13f5e5795f Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 11 Jan 2023 13:22:45 +0100 Subject: [PATCH 37/52] Remove hint logging from git harvester --- src/hermes/commands/harvest/git.py | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index 94c3a9f2..864105d7 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -234,32 +234,6 @@ def _audit_authors(authors, audit_log: logging.Logger): audit_log.info('') audit_log.info('``` .mailmap') - # Provide some example configuration for the hint log - hint_log = audit_log.parent.getChild('hints') - hint_log.debug("# '.maillog' to resolve git ambiguities.") - - unmapped_email = [a for a in unmapped_authors if a.email[1:]] - if unmapped_email: - hint_log.debug('# Mapping of email addresses only. Format (one pair per line):') - hint_log.debug('# ') - - for author in unmapped_email: - for email in author.email[1:]: - hint_log.info("<%s> <%s>", str(author.email[0]), str(email)) - hint_log.debug('') - - unmapped_name = [a for a in unmapped_authors if a.name[1:]] - if unmapped_name: - hint_log.debug('# Mapping of user names. Format (one pair per line):') - hint_log.debug('# Real Name nickname') - hint_log.debug('# Real Name Name, Real') - - for author in [a for a in unmapped_authors if a.name[1:]]: - for name in author.name[1:]: - hint_log.info('%s <%s> %s', str(author.name[0]), str(author.email[0]), str(name)) - - hint_log.info('') - audit_log.info('```') From fcd43ac65e7452278350e563269ff1908f81839b Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 13:22:48 +0100 Subject: [PATCH 38/52] Add missing dependency pyparsing --- poetry.lock | 17 ++++++++++++++++- pyproject.toml | 1 + 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 9753da5b..633d4cbf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -490,6 +490,17 @@ docopt = ">=0.6.2" python-dateutil = ">=2.8.0" "ruamel.yaml" = ">=0.16.0" +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pyrsistent" version = "0.19.3" @@ -974,7 +985,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "f7532a1d95981f6e73e6d389e8ff96e6b5ade455d9ebf7660895682108499c6a" +content-hash = "2cdb639f26b5ef399e9a69be17fc502b13a5a1aacdb3fbe237bfcce00d722078" [metadata.files] alabaster = [ @@ -1276,6 +1287,10 @@ pykwalify = [ {file = "pykwalify-1.8.0-py2.py3-none-any.whl", hash = "sha256:731dfa87338cca9f559d1fca2bdea37299116e3139b73f78ca90a543722d6651"}, {file = "pykwalify-1.8.0.tar.gz", hash = "sha256:796b2ad3ed4cb99b88308b533fb2f559c30fa6efb4fa9fda11347f483d245884"}, ] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] pyrsistent = [ {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, diff --git a/pyproject.toml b/pyproject.toml index 089ab071..7854cdac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ click = "^8.1" jsonschema = "^3.0.0" cffconvert = "^2.0.0" toml = "^0.10.2" +pyparsing = "^3.0.9" # Packages for developers [tool.poetry.group.dev.dependencies] From 8a50d4d1307a9353cbc5f86006c1fd9fa7e36a87 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 13:32:17 +0100 Subject: [PATCH 39/52] Reduce accuracy of timestamp to seconds --- src/hermes/model/context.py | 2 +- .../hermes_test/model/test_harvest_context.py | 38 +++++++++++-------- 2 files changed, 24 insertions(+), 16 deletions(-) diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index bd82e03d..7dcff9d0 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -195,7 +195,7 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): See :py:meth:`HermesContext.update` for more information. """ - ts = kwargs.pop('ts', datetime.datetime.now().isoformat()) + ts = kwargs.pop('ts', datetime.datetime.now().isoformat(timespec='seconds')) ep = kwargs.pop('ep', self._ep.name) if _key not in self._data: diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py index 6459da0b..7ea9a1a2 100644 --- a/test/hermes_test/model/test_harvest_context.py +++ b/test/hermes_test/model/test_harvest_context.py @@ -25,7 +25,7 @@ def test_context_default(harvest_ctx): harvest_ctx.update('spam', 'eggs', test=True) assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_default'}] + ['eggs', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_default'}] ] @@ -34,8 +34,10 @@ def test_context_update_append(harvest_ctx): harvest_ctx.update('spam', 'eggs', index=1) assert harvest_ctx._data['spam'] == [ - ['noodles', {'index': 0, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_update_append'}], - ['eggs', {'index': 1, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_update_append'}] + ['noodles', {'index': 0, + 'ts': datetime.now().isoformat(timespec='seconds'), + 'ep': 'test_context_update_append'}], + ['eggs', {'index': 1, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_update_append'}] ] @@ -44,7 +46,9 @@ def test_context_update_replace(harvest_ctx): harvest_ctx.update('spam', 'eggs', test=True) assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_update_replace'}] + ['eggs', {'test': True, + 'ts': datetime.now().isoformat(timespec='seconds'), + 'ep': 'test_context_update_replace'}] ] @@ -55,10 +59,10 @@ def test_context_bulk_flat(harvest_ctx): }, test=True) assert harvest_ctx._data['ans'] == [ - [42, {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_flat'}] + [42, {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_flat'}] ] assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_flat'}] + ['eggs', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_flat'}] ] @@ -72,18 +76,20 @@ def test_context_bulk_complex(harvest_ctx): }, test=True) assert harvest_ctx._data['ans'] == [ - [42, {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_complex'}] + [42, {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_complex'}] ] assert harvest_ctx._data['author[0].name'] == [ - ['Monty Python', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), + ['Monty Python', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_complex'}] ] assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), + ['eggs@spam.io', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_complex'}] ] assert harvest_ctx._data['author[1].name'] == [ - ['Herr Mes', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_complex'}] + ['Herr Mes', {'test': True, + 'ts': datetime.now().isoformat(timespec='seconds'), + 'ep': 'test_context_bulk_complex'}] ] @@ -92,10 +98,12 @@ def test_context_bulk_replace(harvest_ctx): harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, test=True) assert harvest_ctx._data['author[0].name'] == [ - ['Herr Mes', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_replace'}] + ['Herr Mes', {'test': True, + 'ts': datetime.now().isoformat(timespec='seconds'), + 'ep': 'test_context_bulk_replace'}] ] assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'test': True, 'ts': pytest.approx(datetime.now().isoformat()), + ['eggs@spam.io', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_replace'}] ] @@ -105,11 +113,11 @@ def test_context_bulk_append(harvest_ctx): harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, index=1) assert harvest_ctx._data['author[0].name'] == [ - ['Monty Python', {'index': 0, 'ts': pytest.approx(datetime.now().isoformat()), + ['Monty Python', {'index': 0, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_append'}], - ['Herr Mes', {'index': 1, 'ts': pytest.approx(datetime.now().isoformat()), 'ep': 'test_context_bulk_append'}] + ['Herr Mes', {'index': 1, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_append'}] ] assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'index': 1, 'ts': pytest.approx(datetime.now().isoformat()), + ['eggs@spam.io', {'index': 1, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_append'}] ] From 12c8f3c537a01780e4e345b69c0877eec13d0fb0 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 11 Jan 2023 13:37:04 +0100 Subject: [PATCH 40/52] fix(docs): fix wrong Sphinx domains in docstrings --- src/hermes/commands/harvest/git.py | 4 ++-- src/hermes/model/path.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index 33f4d7a6..8dbc4949 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -80,7 +80,7 @@ def update(self, name=None, email=None, ts=None): def merge(self, other: 'ContributorData'): """ - Merge another :ref:`ContributorData` instance into this one. + Merge another :py:class:`ContributorData` instance into this one. All attributes will be merged yet kept unique if required. @@ -135,7 +135,7 @@ class NodeRegister: """ Helper class to unify Git commit authors / contributors. - This class keeps track of all registered instances and merges two :ref:`ContributorData` instances if some + This class keeps track of all registered instances and merges two :py:class:`ContributorData` instances if some attributes match. """ diff --git a/src/hermes/model/path.py b/src/hermes/model/path.py index db11190e..9474ad81 100644 --- a/src/hermes/model/path.py +++ b/src/hermes/model/path.py @@ -161,7 +161,7 @@ def new(self) -> t.Any: """ Create a new instance of the container this node represents. - For this to work, the node need to have at least on child node derive (e.g., by using `self["child"]'). + For this to work, the node need to have at least on child node derive (e.g., by using ``self["child"]``). """ if self._type is not None: return self._type() From 8ff4d4c055a99208a685ef819a73b47025f70f7d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 11 Jan 2023 13:00:13 +0100 Subject: [PATCH 41/52] fix: ignore more HERMES cli outputs in git --- .gitignore | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 3f9539fb..c3ad18e1 100644 --- a/.gitignore +++ b/.gitignore @@ -11,7 +11,6 @@ __pycache__ .coverage htmlcov .pytest_cache -.hermes docs/source/api docs/build/ @@ -20,3 +19,9 @@ docs/build/ .idea/ .venv/ dist/ + +# HERMES workflow specifics +.hermes +hermes-audit.md +hermes.log +quickfix.sh From 9275646df9bba25043869968112b360e340be8cb Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 11 Jan 2023 13:02:09 +0100 Subject: [PATCH 42/52] refactor(cli): rename from haggis to hermes #29 --- README.md | 6 +-- docs/source/dev/data_model.md | 2 +- pyproject.toml | 2 +- src/hermes/__main__.py | 4 +- src/hermes/cli.py | 10 ++--- .../commands/harvest/test_codemeta.py | 6 +-- test/hermes_test/test_cli.py | 40 +++++++++---------- 7 files changed, 35 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index a20deb50..a0ba921f 100644 --- a/README.md +++ b/README.md @@ -37,12 +37,12 @@ This project uses ## Usage -The `haggis` application provides the entry point for the HERMES workflow. +The `hermes` application provides the entry point for the HERMES workflow. After installation, you can run it from your command line environment: ```shell -haggis --help -haggis harvest +hermes --help +hermes harvest ``` You can also call the `hermes` package as Python module: diff --git a/docs/source/dev/data_model.md b/docs/source/dev/data_model.md index 66a7b38a..a5f4c727 100644 --- a/docs/source/dev/data_model.md +++ b/docs/source/dev/data_model.md @@ -10,7 +10,7 @@ SPDX-FileContributor: Michael Meinel # HERMES Data Model -*haggis* uses an internal data model to store the output of the different stages. +*hermes* uses an internal data model to store the output of the different stages. All the data is collected in a directory called `.hermes` located in the root of the project directory. You should not need to interact with this data directly. diff --git a/pyproject.toml b/pyproject.toml index 7854cdac..c0e48621 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ sphinxext-opengraph = "^0.6.3" reuse = "^1.0.0" [tool.poetry.plugins.console_scripts] -haggis = "hermes.cli:haggis" +hermes = "hermes.cli:main" [tool.poetry.plugins."hermes.harvest"] 000_cff = "hermes.commands.harvest.cff:harvest_cff" diff --git a/src/hermes/__main__.py b/src/hermes/__main__.py index 4c599a23..b9400fcb 100644 --- a/src/hermes/__main__.py +++ b/src/hermes/__main__.py @@ -4,8 +4,8 @@ # SPDX-FileContributor: Michael Meinel -from hermes.cli import haggis +from hermes.cli import main if __name__ == '__main__': - haggis() + main() diff --git a/src/hermes/cli.py b/src/hermes/cli.py index 8e9847b5..81cc28ba 100644 --- a/src/hermes/cli.py +++ b/src/hermes/cli.py @@ -143,7 +143,7 @@ def _process_result(value: t.Any) -> t.Any: @click.option("--post", is_flag=True, default=False) @click.option('--path', default=pathlib.Path('./'), help='Working path', type=pathlib.Path) @click.pass_context -def haggis(ctx: click.Context, *args, **kwargs) -> None: +def main(ctx: click.Context, *args, **kwargs) -> None: """ HERMES aggregated interface script @@ -153,7 +153,7 @@ def haggis(ctx: click.Context, *args, **kwargs) -> None: pass -haggis.add_command(workflow.harvest) -haggis.add_command(workflow.process) -haggis.add_command(workflow.deposit) -haggis.add_command(workflow.post) +main.add_command(workflow.harvest) +main.add_command(workflow.process) +main.add_command(workflow.deposit) +main.add_command(workflow.post) diff --git a/test/hermes_test/commands/harvest/test_codemeta.py b/test/hermes_test/commands/harvest/test_codemeta.py index 0b524159..f1cedde8 100644 --- a/test/hermes_test/commands/harvest/test_codemeta.py +++ b/test/hermes_test/commands/harvest/test_codemeta.py @@ -108,10 +108,10 @@ } ], "targetProduct": { - "@id": "/commandlineapplication/haggis", + "@id": "/commandlineapplication/hermes", "@type": "CommandLineApplication", - "executableName": "haggis", - "name": "haggis", + "executableName": "hermes", + "name": "hermes", "runtimePlatform": "Python 3" }, "url": [ diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index a083f860..d535a357 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -73,71 +73,71 @@ def test_workflow_invoke_with_cb(): cb_mock.assert_called_with(["spam", "eggs"]) -def test_haggis_full(): +def test_hermes_full(): runner = CliRunner() - result = runner.invoke(cli.haggis) + result = runner.invoke(cli.main) assert not result.exception -def test_haggis_harvest(): +def test_hermes_harvest(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('harvest', )) + result = runner.invoke(cli.main, args=('harvest', )) assert not result.exception -def test_haggis_process(): +def test_hermes_process(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('process', )) + result = runner.invoke(cli.main, args=('process', )) assert not result.exception -def test_haggis_with_deposit(): +def test_hermes_with_deposit(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--deposit', )) + result = runner.invoke(cli.main, args=('--deposit', )) assert not result.exception -def test_haggis_with_post(): +def test_hermes_with_post(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--post', )) + result = runner.invoke(cli.main, args=('--post', )) assert not result.exception -def test_haggis_with_path(): +def test_hermes_with_path(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--path', './')) + result = runner.invoke(cli.main, args=('--path', './')) assert not result.exception -def test_haggis_with_deposit_and_post(): +def test_hermes_with_deposit_and_post(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--deposit', '--post')) + result = runner.invoke(cli.main, args=('--deposit', '--post')) assert not result.exception -def test_haggis_with_deposit_and_path(): +def test_hermes_with_deposit_and_path(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--deposit', '--path', './')) + result = runner.invoke(cli.main, args=('--deposit', '--path', './')) assert not result.exception -def test_haggis_with_path_and_post(): +def test_hermes_with_path_and_post(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--path', './', '--post')) + result = runner.invoke(cli.main, args=('--path', './', '--post')) assert not result.exception -def test_haggis_with_deposit_and_post_and_path(): +def test_hermes_with_deposit_and_post_and_path(): runner = CliRunner() - result = runner.invoke(cli.haggis, args=('--deposit', '--post', '--path', './')) + result = runner.invoke(cli.main, args=('--deposit', '--post', '--path', './')) assert not result.exception From 509a063f9a86d89ad8cba1a3a5ca22238fd1c8e7 Mon Sep 17 00:00:00 2001 From: Jeffrey Kelling Date: Wed, 11 Jan 2023 11:29:43 +0100 Subject: [PATCH 43/52] constex, harvest: ep -> harverster; ts -> timestamp --- src/hermes/commands/harvest/git.py | 32 ++++++------- src/hermes/model/context.py | 20 ++++---- .../hermes_test/model/test_harvest_context.py | 48 +++++++++---------- 3 files changed, 50 insertions(+), 50 deletions(-) diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index 8dbc4949..ed76e599 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -37,19 +37,19 @@ class ContributorData: Stores contributor data information from Git history. """ - def __init__(self, name: str | t.List[str], email: str | t.List[str], ts: str | t.List[str]): + def __init__(self, name: str | t.List[str], email: str | t.List[str], timestamp: str | t.List[str]): """ Initialize a new contributor dataset. :param name: Name as returned by the `git log` command (i.e., with `.mailmap` applied). :param email: Email address as returned by the `git log` command (also with `.mailmap` applied). - :param ts: Timestamp when the respective commit was done. + :param timestamp: Timestamp when the respective commit was done. """ self.name = [] self.email = [] - self.ts = [] + self.timestamp = [] - self.update(name=name, email=email, ts=ts) + self.update(name=name, email=email, timestamp=timestamp) def __str__(self): parts = [] @@ -66,17 +66,17 @@ def _update_attr(self, target, value, unique=True): case str() if not unique or value not in target: target.append(value) - def update(self, name=None, email=None, ts=None): + def update(self, name=None, email=None, timestamp=None): """ Update the current contributor with the given data. :param name: New name to assign (addtionally). :param email: New email to assign (additionally). - :param ts: New timestamp to adapt time range. + :param timestamp: New timestamp to adapt time range. """ self._update_attr(self.name, name) self._update_attr(self.email, email) - self._update_attr(self.ts, ts, unique=False) + self._update_attr(self.timestamp, timestamp, unique=False) def merge(self, other: 'ContributorData'): """ @@ -88,7 +88,7 @@ def merge(self, other: 'ContributorData'): """ self.name += [n for n in other.name if n not in self.name] self.email += [e for e in other.email if e not in self.email] - self.ts += other.ts + self.timestamp += other.timestamp def to_codemeta(self) -> dict: """ @@ -110,10 +110,10 @@ def to_codemeta(self) -> dict: if self.email: res['contactPoint'] = [{'@type': 'ContactPoint', 'email': email} for email in self.email] - if self.ts: - ts_start, *_, ts_end = sorted(self.ts + [self.ts[0]]) - res['startTime'] = ts_start - res['endTime'] = ts_end + if self.timestamp: + timestamp_start, *_, timestamp_end = sorted(self.timestamp + [self.timestamp[0]]) + res['startTime'] = timestamp_start + res['endTime'] = timestamp_end return res @@ -127,8 +127,8 @@ def from_codemeta(cls, data) -> 'ContributorData': """ name = [data['name']] + data.get('alternateName', []) email = [data['email']] + [contact['email'] for contact in data.get('contactPoint', [])] - ts = [data['startTime'], data['endTime']] - return cls(name, email, ts) + timestamp = [data['startTime'], data['endTime']] + return cls(name, email, timestamp) class NodeRegister: @@ -313,11 +313,11 @@ def harvest_git(click_ctx: click.Context, ctx: HermesHarvestContext): log = p.stdout.decode(SHELL_ENCODING).split('\n') for line in log: try: - name, email, ts = line.split(_GIT_SEP) + name, email, timestamp = line.split(_GIT_SEP) except ValueError: continue - authors.update(email=email, name=name, ts=ts) + authors.update(email=email, name=name, timestamp=timestamp) _audit_authors(authors, logging.getLogger('audit.git')) diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index 7dcff9d0..d3c2eba1 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -195,30 +195,30 @@ def update(self, _key: str, _value: t.Any, **kwargs: t.Any): See :py:meth:`HermesContext.update` for more information. """ - ts = kwargs.pop('ts', datetime.datetime.now().isoformat(timespec='seconds')) - ep = kwargs.pop('ep', self._ep.name) + timestamp = kwargs.pop('timestamp', datetime.datetime.now().isoformat(timespec='seconds')) + harvester = kwargs.pop('harvester', self._ep.name) if _key not in self._data: self._data[_key] = [] for entry in self._data[_key]: value, tag = entry - tag_ts = tag.pop('ts') - tag_ep = tag.pop('ep') + tag_timestamp = tag.pop('timestamp') + tag_harvester = tag.pop('harvester') if tag == kwargs: self._log.debug("Update %s: %s -> %s (%s)", _key, str(value), _value, str(tag)) entry[0] = _value - tag['ts'] = ts - tag['ep'] = ep + tag['timestamp'] = timestamp + tag['harvester'] = harvester break - tag['ts'] = tag_ts - tag['ep'] = tag_ep + tag['timestamp'] = tag_timestamp + tag['harvester'] = tag_harvester else: - kwargs['ts'] = ts - kwargs['ep'] = ep + kwargs['timestamp'] = timestamp + kwargs['harvester'] = harvester self._data[_key].append([_value, kwargs]) def _update_key_from(self, _key: ContextPath, _value: t.Any, **kwargs): diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py index 7ea9a1a2..3b38ff3c 100644 --- a/test/hermes_test/model/test_harvest_context.py +++ b/test/hermes_test/model/test_harvest_context.py @@ -25,7 +25,7 @@ def test_context_default(harvest_ctx): harvest_ctx.update('spam', 'eggs', test=True) assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_default'}] + ['eggs', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_default'}] ] @@ -35,20 +35,20 @@ def test_context_update_append(harvest_ctx): assert harvest_ctx._data['spam'] == [ ['noodles', {'index': 0, - 'ts': datetime.now().isoformat(timespec='seconds'), - 'ep': 'test_context_update_append'}], - ['eggs', {'index': 1, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_update_append'}] + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_update_append'}], + ['eggs', {'index': 1, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_update_append'}] ] -def test_context_update_replace(harvest_ctx): +def test_context_update_rharvesterlace(harvest_ctx): harvest_ctx.update('spam', 'noodles', test=True) harvest_ctx.update('spam', 'eggs', test=True) assert harvest_ctx._data['spam'] == [ ['eggs', {'test': True, - 'ts': datetime.now().isoformat(timespec='seconds'), - 'ep': 'test_context_update_replace'}] + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_update_rharvesterlace'}] ] @@ -59,10 +59,10 @@ def test_context_bulk_flat(harvest_ctx): }, test=True) assert harvest_ctx._data['ans'] == [ - [42, {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_flat'}] + [42, {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_bulk_flat'}] ] assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_flat'}] + ['eggs', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_bulk_flat'}] ] @@ -76,20 +76,20 @@ def test_context_bulk_complex(harvest_ctx): }, test=True) assert harvest_ctx._data['ans'] == [ - [42, {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_complex'}] + [42, {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_bulk_complex'}] ] assert harvest_ctx._data['author[0].name'] == [ - ['Monty Python', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), - 'ep': 'test_context_bulk_complex'}] + ['Monty Python', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_complex'}] ] assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), - 'ep': 'test_context_bulk_complex'}] + ['eggs@spam.io', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_complex'}] ] assert harvest_ctx._data['author[1].name'] == [ ['Herr Mes', {'test': True, - 'ts': datetime.now().isoformat(timespec='seconds'), - 'ep': 'test_context_bulk_complex'}] + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_complex'}] ] @@ -99,11 +99,11 @@ def test_context_bulk_replace(harvest_ctx): assert harvest_ctx._data['author[0].name'] == [ ['Herr Mes', {'test': True, - 'ts': datetime.now().isoformat(timespec='seconds'), - 'ep': 'test_context_bulk_replace'}] + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_replace'}] ] assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'test': True, 'ts': datetime.now().isoformat(timespec='seconds'), + ['eggs@spam.io', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_replace'}] ] @@ -113,11 +113,11 @@ def test_context_bulk_append(harvest_ctx): harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, index=1) assert harvest_ctx._data['author[0].name'] == [ - ['Monty Python', {'index': 0, 'ts': datetime.now().isoformat(timespec='seconds'), - 'ep': 'test_context_bulk_append'}], - ['Herr Mes', {'index': 1, 'ts': datetime.now().isoformat(timespec='seconds'), 'ep': 'test_context_bulk_append'}] + ['Monty Python', {'index': 0, 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_append'}], + ['Herr Mes', {'index': 1, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_bulk_append'}] ] assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'index': 1, 'ts': datetime.now().isoformat(timespec='seconds'), - 'ep': 'test_context_bulk_append'}] + ['eggs@spam.io', {'index': 1, 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_append'}] ] From b10d0d4b78e28e9eec0734b14f5c710110f59727 Mon Sep 17 00:00:00 2001 From: David Pape Date: Wed, 11 Jan 2023 13:16:34 +0100 Subject: [PATCH 44/52] Rename post to post[-_]process in various places --- src/hermes/cli.py | 4 ++-- src/hermes/commands/workflow.py | 2 +- test/hermes_test/test_cli.py | 16 ++++++++-------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/hermes/cli.py b/src/hermes/cli.py index 81cc28ba..ca2fc93f 100644 --- a/src/hermes/cli.py +++ b/src/hermes/cli.py @@ -140,7 +140,7 @@ def _process_result(value: t.Any) -> t.Any: @click.group(cls=WorkflowCommand, invoke_without_command=True) @click.option("--deposit", is_flag=True, default=False) -@click.option("--post", is_flag=True, default=False) +@click.option("--post-process", is_flag=True, default=False) @click.option('--path', default=pathlib.Path('./'), help='Working path', type=pathlib.Path) @click.pass_context def main(ctx: click.Context, *args, **kwargs) -> None: @@ -156,4 +156,4 @@ def main(ctx: click.Context, *args, **kwargs) -> None: main.add_command(workflow.harvest) main.add_command(workflow.process) main.add_command(workflow.deposit) -main.add_command(workflow.post) +main.add_command(workflow.postprocess) diff --git a/src/hermes/commands/workflow.py b/src/hermes/commands/workflow.py index 84d2a688..58cdd72d 100644 --- a/src/hermes/commands/workflow.py +++ b/src/hermes/commands/workflow.py @@ -117,7 +117,7 @@ def deposit(): @click.group(invoke_without_command=True) -def post(): +def post_process(): """ Post-process metadata after deposition """ diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index d535a357..e80ed17d 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -101,9 +101,9 @@ def test_hermes_with_deposit(): assert not result.exception -def test_hermes_with_post(): +def test_haggis_with_post_process(): runner = CliRunner() - result = runner.invoke(cli.main, args=('--post', )) + result = runner.invoke(cli.main, args=('--post-process', )) assert not result.exception @@ -115,9 +115,9 @@ def test_hermes_with_path(): assert not result.exception -def test_hermes_with_deposit_and_post(): +def test_hermes_with_deposit_and_post_process(): runner = CliRunner() - result = runner.invoke(cli.main, args=('--deposit', '--post')) + result = runner.invoke(cli.main, args=('--deposit', '--post-process')) assert not result.exception @@ -129,15 +129,15 @@ def test_hermes_with_deposit_and_path(): assert not result.exception -def test_hermes_with_path_and_post(): +def test_hermes_with_path_and_post_process(): runner = CliRunner() - result = runner.invoke(cli.main, args=('--path', './', '--post')) + result = runner.invoke(cli.main, args=('--path', './', '--post-process')) assert not result.exception -def test_hermes_with_deposit_and_post_and_path(): +def test_hermes_with_deposit_and_post_process_and_path(): runner = CliRunner() - result = runner.invoke(cli.main, args=('--deposit', '--post', '--path', './')) + result = runner.invoke(cli.main, args=('--deposit', '--post-process', '--path', './')) assert not result.exception From 0d44d876d2b5d1cd00e84432afbac9c4cc54109c Mon Sep 17 00:00:00 2001 From: David Pape Date: Wed, 11 Jan 2023 13:40:50 +0100 Subject: [PATCH 45/52] Spell noun with, verb without hyphen --- src/hermes/cli.py | 2 +- src/hermes/commands/process/cff.py | 2 +- src/hermes/commands/workflow.py | 4 ++-- test/hermes_test/test_cli.py | 16 ++++++++-------- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/hermes/cli.py b/src/hermes/cli.py index ca2fc93f..d7519fff 100644 --- a/src/hermes/cli.py +++ b/src/hermes/cli.py @@ -140,7 +140,7 @@ def _process_result(value: t.Any) -> t.Any: @click.group(cls=WorkflowCommand, invoke_without_command=True) @click.option("--deposit", is_flag=True, default=False) -@click.option("--post-process", is_flag=True, default=False) +@click.option("--postprocess", is_flag=True, default=False) @click.option('--path', default=pathlib.Path('./'), help='Working path', type=pathlib.Path) @click.pass_context def main(ctx: click.Context, *args, **kwargs) -> None: diff --git a/src/hermes/commands/process/cff.py b/src/hermes/commands/process/cff.py index cfba41e0..88dc2b9b 100644 --- a/src/hermes/commands/process/cff.py +++ b/src/hermes/commands/process/cff.py @@ -13,7 +13,7 @@ def add_name(ctx: CodeMetaContext, harvest_ctx: HermesHarvestContext): """ Augment each author with a `name` attribute (if not present). - This will allow better matching against the git authors and can be removed in a post-process step. + This will allow better matching against the git authors and can be removed in a post-processing step. :param ctx: The resulting context that should contain the harmonized data. :param harvest_ctx: The harvest context containing all raw harvested data. diff --git a/src/hermes/commands/workflow.py b/src/hermes/commands/workflow.py index 58cdd72d..d22de701 100644 --- a/src/hermes/commands/workflow.py +++ b/src/hermes/commands/workflow.py @@ -117,8 +117,8 @@ def deposit(): @click.group(invoke_without_command=True) -def post_process(): +def postprocess(): """ - Post-process metadata after deposition + Postprocess metadata after deposition """ click.echo("Post-processing") diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index e80ed17d..14daf260 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -101,9 +101,9 @@ def test_hermes_with_deposit(): assert not result.exception -def test_haggis_with_post_process(): +def test_haggis_with_postprocess(): runner = CliRunner() - result = runner.invoke(cli.main, args=('--post-process', )) + result = runner.invoke(cli.main, args=('--postprocess', )) assert not result.exception @@ -115,9 +115,9 @@ def test_hermes_with_path(): assert not result.exception -def test_hermes_with_deposit_and_post_process(): +def test_haggis_with_deposit_and_postprocess(): runner = CliRunner() - result = runner.invoke(cli.main, args=('--deposit', '--post-process')) + result = runner.invoke(cli.main, args=('--deposit', '--postprocess')) assert not result.exception @@ -129,15 +129,15 @@ def test_hermes_with_deposit_and_path(): assert not result.exception -def test_hermes_with_path_and_post_process(): +def test_haggis_with_path_and_postprocess(): runner = CliRunner() - result = runner.invoke(cli.main, args=('--path', './', '--post-process')) + result = runner.invoke(cli.main, args=('--path', './', '--postprocess')) assert not result.exception -def test_hermes_with_deposit_and_post_process_and_path(): +def test_haggis_with_deposit_and_postprocess_and_path(): runner = CliRunner() - result = runner.invoke(cli.main, args=('--deposit', '--post-process', '--path', './')) + result = runner.invoke(cli.main, args=('--deposit', '--postprocess', '--path', './')) assert not result.exception From 41bcec506ba5c5cbe0b0885c3dffa3e25c9974b1 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 14:19:21 +0100 Subject: [PATCH 46/52] Replace missing ep tag by harvester --- test/hermes_test/model/test_harvest_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py index 3b38ff3c..bce1482f 100644 --- a/test/hermes_test/model/test_harvest_context.py +++ b/test/hermes_test/model/test_harvest_context.py @@ -104,7 +104,7 @@ def test_context_bulk_replace(harvest_ctx): ] assert harvest_ctx._data['author[0].email'] == [ ['eggs@spam.io', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), - 'ep': 'test_context_bulk_replace'}] + 'harvester': 'test_context_bulk_replace'}] ] From 523863a80e16e33dd61ca64919df12c4a1ddacb8 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 14:21:49 +0100 Subject: [PATCH 47/52] Fix some more flake8 issues (lines too long) --- .../hermes_test/model/test_harvest_context.py | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py index bce1482f..2b315167 100644 --- a/test/hermes_test/model/test_harvest_context.py +++ b/test/hermes_test/model/test_harvest_context.py @@ -25,7 +25,9 @@ def test_context_default(harvest_ctx): harvest_ctx.update('spam', 'eggs', test=True) assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_default'}] + ['eggs', {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_default'}] ] @@ -37,7 +39,9 @@ def test_context_update_append(harvest_ctx): ['noodles', {'index': 0, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_update_append'}], - ['eggs', {'index': 1, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_update_append'}] + ['eggs', {'index': 1, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_update_append'}] ] @@ -59,10 +63,14 @@ def test_context_bulk_flat(harvest_ctx): }, test=True) assert harvest_ctx._data['ans'] == [ - [42, {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_bulk_flat'}] + [42, {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_flat'}] ] assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_bulk_flat'}] + ['eggs', {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_flat'}] ] @@ -76,7 +84,9 @@ def test_context_bulk_complex(harvest_ctx): }, test=True) assert harvest_ctx._data['ans'] == [ - [42, {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_bulk_complex'}] + [42, {'test': True, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_complex'}] ] assert harvest_ctx._data['author[0].name'] == [ ['Monty Python', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), @@ -115,7 +125,9 @@ def test_context_bulk_append(harvest_ctx): assert harvest_ctx._data['author[0].name'] == [ ['Monty Python', {'index': 0, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_bulk_append'}], - ['Herr Mes', {'index': 1, 'timestamp': datetime.now().isoformat(timespec='seconds'), 'harvester': 'test_context_bulk_append'}] + ['Herr Mes', {'index': 1, + 'timestamp': datetime.now().isoformat(timespec='seconds'), + 'harvester': 'test_context_bulk_append'}] ] assert harvest_ctx._data['author[0].email'] == [ ['eggs@spam.io', {'index': 1, 'timestamp': datetime.now().isoformat(timespec='seconds'), From a6db2a8e7987ca1f4d53c6a5fa90abf2097b2493 Mon Sep 17 00:00:00 2001 From: jkelling Date: Wed, 11 Jan 2023 14:26:10 +0100 Subject: [PATCH 48/52] test/hermes_test/model/test_harvest_context.py: Fix misplaced harvester Co-authored-by: Stephan Druskat --- test/hermes_test/model/test_harvest_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py index 2b315167..fd50dcc1 100644 --- a/test/hermes_test/model/test_harvest_context.py +++ b/test/hermes_test/model/test_harvest_context.py @@ -52,7 +52,7 @@ def test_context_update_rharvesterlace(harvest_ctx): assert harvest_ctx._data['spam'] == [ ['eggs', {'test': True, 'timestamp': datetime.now().isoformat(timespec='seconds'), - 'harvester': 'test_context_update_rharvesterlace'}] + 'harvester': 'test_context_update_replace'}] ] From e27de93e6ad360597e4ec4fe776dfdfad6d3ea64 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 11 Jan 2023 14:27:59 +0100 Subject: [PATCH 49/52] Fix typo --- test/hermes_test/model/test_harvest_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py index fd50dcc1..92e351e2 100644 --- a/test/hermes_test/model/test_harvest_context.py +++ b/test/hermes_test/model/test_harvest_context.py @@ -45,7 +45,7 @@ def test_context_update_append(harvest_ctx): ] -def test_context_update_rharvesterlace(harvest_ctx): +def test_context_update_replace(harvest_ctx): harvest_ctx.update('spam', 'noodles', test=True) harvest_ctx.update('spam', 'eggs', test=True) From dd66d6eafae890bca05daf105f68058b48128154 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 14:44:25 +0100 Subject: [PATCH 50/52] Remove unneccessary HTML template --- src/hermes/commands/workflow.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/src/hermes/commands/workflow.py b/src/hermes/commands/workflow.py index d22de701..78189c11 100644 --- a/src/hermes/commands/workflow.py +++ b/src/hermes/commands/workflow.py @@ -45,22 +45,6 @@ def harvest(click_ctx: click.Context): audit_log.info('') -_HTML_PREFIX = """ - - - Hermes Report - - -""" - - @click.group(invoke_without_command=True) def process(): """ From be159eb8312280ffe66ef83dfba0342d31ebfeec Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 15:20:59 +0100 Subject: [PATCH 51/52] Add commiter instead of setting some custom field --- src/hermes/commands/harvest/git.py | 2 +- src/hermes/commands/process/git.py | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/hermes/commands/harvest/git.py b/src/hermes/commands/harvest/git.py index af57cec9..2b9182e7 100644 --- a/src/hermes/commands/harvest/git.py +++ b/src/hermes/commands/harvest/git.py @@ -97,7 +97,7 @@ def to_codemeta(self) -> dict: :return: The CodeMeta representation of this dataset. """ res = { - '@type': ['Person', 'hermes:contributor'], + '@type': 'Person', } if self.name: diff --git a/src/hermes/commands/process/git.py b/src/hermes/commands/process/git.py index 7460c6a2..832fea66 100644 --- a/src/hermes/commands/process/git.py +++ b/src/hermes/commands/process/git.py @@ -24,6 +24,8 @@ def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): audit_log.info('### Flag new authors') author_path = ContextPath('author') + contributor_path = ContextPath('contributor') + tags = {} try: data = harverst_ctx.get_data(tags=tags) @@ -36,10 +38,12 @@ def flag_authors(ctx: CodeMetaContext, harverst_ctx: HermesHarvestContext): author_key, target, path = author_path['*'].resolve(ctx._data, query=query) if author_key._item == '*': - contributor['projectRole'] = 'Contributor' audit_log.debug('- %s', contributor['name']) - - ctx.update(author_key, contributor, tags=tags) + if contributor_path not in ctx.keys(): + ctx.update(contributor_path, []) + ctx.update(contributor_path['*'], contributor, tags=tags) + else: + ctx.update(author_key, contributor, tags=tags) ctx.tags.update(tags) harverst_ctx.finish() From fce075e007a5bc3963847e8918f30df6c060bbec Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Wed, 11 Jan 2023 15:29:24 +0100 Subject: [PATCH 52/52] Apply suggestions from code review Co-authored-by: Stephan Druskat --- pyproject.toml | 2 -- src/hermes/model/context.py | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6a6debf0..c8f37635 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,8 +76,6 @@ hermes = "hermes.cli:main" 020_git = "hermes.commands.process.git:flag_authors" -[tool.hermes.harvest] -000_git.enabled = false [tool.hermes.logging.formatters.plain] format = "%(message)s" diff --git a/src/hermes/model/context.py b/src/hermes/model/context.py index d3c2eba1..2d4aa527 100644 --- a/src/hermes/model/context.py +++ b/src/hermes/model/context.py @@ -160,7 +160,7 @@ def store_cache(self): data_file = self.get_cache('harvest', self._ep.name, create=True) self._log.debug("Writing cache to %s...", data_file) - json.dump(self._data, data_file.open('w'), indent=' ') + json.dump(self._data, data_file.open('w'), indent=2) def __enter__(self): self.load_cache()