From ad040467a2e4d7b85b8769b1976f6118fb9d220a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 21 Oct 2025 23:07:33 +0100 Subject: [PATCH 01/16] =?UTF-8?q?=F0=9F=9A=A7=20Added=20action=20classes?= =?UTF-8?q?=20for=20sender=20uploads?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 3 + simvue/offline/__init__.py | 3 + simvue/offline/actions.py | 451 +++++++++++++++++++++++++++++++++++++ simvue/offline/sender.py | 134 +++++++++++ simvue/utilities.py | 5 +- 5 files changed, 594 insertions(+), 2 deletions(-) create mode 100644 simvue/offline/__init__.py create mode 100644 simvue/offline/actions.py create mode 100644 simvue/offline/sender.py diff --git a/.gitignore b/.gitignore index ffa94acc..533bab88 100644 --- a/.gitignore +++ b/.gitignore @@ -149,3 +149,6 @@ offline/ Vagrantfile .sourcery* + +# Modules +!simvue/offline/ diff --git a/simvue/offline/__init__.py b/simvue/offline/__init__.py new file mode 100644 index 00000000..53f54e1e --- /dev/null +++ b/simvue/offline/__init__.py @@ -0,0 +1,3 @@ +from .sender import Sender + +__all__ = ["Sender"] diff --git a/simvue/offline/actions.py b/simvue/offline/actions.py new file mode 100644 index 00000000..9b40912b --- /dev/null +++ b/simvue/offline/actions.py @@ -0,0 +1,451 @@ +import abc +from collections.abc import Generator +from concurrent.futures import ThreadPoolExecutor +import json +import logging +import pathlib +import threading +import typing + +from simvue.api.objects import ( + Alert, + Artifact, + EventsAlert, + FileArtifact, + FileStorage, + Grid, + MetricsRangeAlert, + MetricsThresholdAlert, + ObjectArtifact, + Run, + S3Storage, + Storage, + Tag, + Tenant, + User, + UserAlert, +) +from simvue.api.objects.alert.fetch import AlertType +from simvue.api.objects.artifact.base import ArtifactBase +from simvue.api.objects.base import SimvueObject + +try: + from typing import override +except ImportError: + from typing_extensions import override # noqa: UP035 + + +class UploadAction: + """Defines the tasks to execute during upload.""" + + object_type: str = "" + logger: logging.Logger = logging.getLogger(__name__) + + @classmethod + def json_file(cls, cache_directory: pathlib.Path, offline_id: str) -> pathlib.Path: + return cache_directory.joinpath(f"{cls.object_type}", f"{offline_id}.json") + + @classmethod + def log_upload_failed( + cls, cache_directory: pathlib.Path, offline_id: str, data: dict[str, typing.Any] + ) -> None: + data["upload_failed"] = True + with cls.json_file(cache_directory, offline_id).open("w") as out_f: + json.dump(data, out_f, indent=2) + + @classmethod + def count(cls, cache_directory: pathlib.Path) -> int: + """Return number of objects to upload of this type.""" + return len(list(cls.uploadable_objects(cache_directory))) + + @classmethod + def pre_tasks( + cls, offline_id: str, data: dict[str, typing.Any], cache_directory: pathlib.Path + ) -> None: + """Pre-upload actions.""" + _ = offline_id + _ = data + _ = cache_directory + pass + + @classmethod + def post_tasks( + cls, + offline_id: str, + online_id: str, + data: dict[str, typing.Any], + cache_directory: pathlib.Path, + ) -> None: + """Post-upload actions.""" + _ = data + _ = online_id + cls.json_file(cache_directory, offline_id).unlink(missing_ok=True) + + @abc.abstractmethod + @classmethod + def initialise_object(cls, identifier: str, **data) -> SimvueObject: + """Initialise an instance.""" + pass + + @classmethod + def uploadable_objects(cls, cache_directory: pathlib.Path) -> Generator[str]: + """Iterate through uploadables.""" + for file in cache_directory.glob(f"{cls.object_type}/*.json"): + yield file.stem + + @classmethod + def _single_item_upload( + cls, + identifier: str, + id_mapping: dict[str, str], + cache_directory: pathlib.Path, + thread_lock: threading.Lock, + *, + throw_exceptions: bool = False, + retry_failed: bool = False, + ) -> None: + _json_file = cache_directory.joinpath(f"{cls.object_type}/{identifier}.json") + + with _json_file.open() as in_f: + _data = json.load(in_f) + + if _data.pop("upload_failed", False) and not retry_failed: + return + + try: + cls.pre_tasks( + offline_id=identifier, data=_data, cache_directory=cache_directory + ) + + _object = cls.initialise_object(identifier=identifier, **_data) + + _object.on_reconnect(id_mapping) + + if not isinstance(_object, ArtifactBase): + _object.commit() + + _object.read_only(True) + + except Exception as err: + if throw_exceptions: + raise err + cls.logger.exception( + "Failed to upload %s '%s'", cls.object_type, identifier + ) + cls.log_upload_failed(cache_directory, identifier, _data) + return + + if not _object.id: + cls.logger.error( + "No identifier retrieved for %s '%s'", + cls.object_type, + identifier, + ) + cls.log_upload_failed(cache_directory, identifier, _data) + return + + cls.logger.info( + "%s %s '%s'", + "Updated" if id_mapping.get(identifier) else "Created", + cls.object_type[:-1] if cls.object_type.endswith("s") else cls.object_type, + _object.id, + ) + + with thread_lock: + id_mapping[identifier] = _object.id + + cls.post_tasks( + offline_id=identifier, + online_id=_object.id, + data=_data, + cache_directory=cache_directory, + ) + + @classmethod + def upload( + cls, + id_mapping: dict[str, str], + cache_directory: pathlib.Path, + thread_lock: threading.Lock, + single_thread_limit: int, + max_thread_workers: int, + *, + throw_exceptions: bool = False, + retry_failed: bool = False, + ) -> None: + """Run upload of file category.""" + _iterable = cls.uploadable_objects(cache_directory) + if cls.count(cache_directory) < single_thread_limit: + for identifier in _iterable: + cls._single_item_upload( + identifier=identifier, + cache_directory=cache_directory, + thread_lock=thread_lock, + throw_exceptions=throw_exceptions, + retry_failed=retry_failed, + id_mapping=id_mapping, + ) + else: + with ThreadPoolExecutor( + max_workers=max_thread_workers, + thread_name_prefix="sender_session_upload", + ) as executor: + _results = executor.map( + lambda identifier: cls._single_item_upload( + identifier=identifier, + cache_directory=cache_directory, + thread_lock=thread_lock, + throw_exceptions=throw_exceptions, + retry_failed=retry_failed, + id_mapping=id_mapping, + ), + _iterable, + ) + # This will raise any exceptions encountered during sending + for result in _results: + pass + + +class ArtifactUploadAction(UploadAction): + object_type: str = "artifacts" + + @override + @classmethod + def post_tasks( + cls, + offline_id: str, + online_id: str, + data: dict[str, typing.Any], + cache_directory: pathlib.Path, + ) -> None: + _ = online_id + if not data.get("obj"): + return + cache_directory.joinpath(cls.object_type, f"{offline_id}.object").unlink() + + @override + @classmethod + def initialise_object( + cls, identifier: str | None, **data + ) -> FileArtifact | ObjectArtifact: + if not identifier: + if data.get("file_path"): + return FileArtifact.new(**data) + + return ObjectArtifact.new(**data) + + _sv_obj = Artifact(identifier=identifier) + _sv_obj.read_only(False) + return _sv_obj + + +class RunUploadAction(UploadAction): + object_type: str = "runs" + + @override + @classmethod + def initialise_object(cls, identifier: str | None, **data) -> Run: + if not identifier: + return Run.new(**data) + + _sv_obj = Run(identifier=identifier) + _sv_obj.read_only(False) + return _sv_obj + + @override + @classmethod + def post_tasks( + cls, + offline_id: str, + online_id: str, + data: dict[str, typing.Any], + cache_directory: pathlib.Path, + ) -> None: + super().post_tasks( + offline_id=offline_id, + online_id=online_id, + data=data, + cache_directory=cache_directory, + ) + + _ = cache_directory.joinpath("server_ids", f"{offline_id}.txt").write_text( + online_id + ) + + if not cache_directory.joinpath( + cls.object_type, f"{offline_id}.closed" + ).exists(): + return + + _alerts_list: list[str] = typing.cast("list[str]", data.get("alerts", [])) + + for _id in _alerts_list: + cache_directory.joinpath("server_ids", f"{_id}.txt").unlink() + + if _folder_id := data.get("folder_id"): + cache_directory.joinpath("server_ids", f"{_folder_id}.txt").unlink() + + cache_directory.joinpath("server_ids", f"{offline_id}.txt").unlink() + cache_directory.joinpath(cls.object_type, f"{offline_id}.closed").unlink() + cls.logger.info("Run '%s' closed - deleting cached copies...", offline_id) + + +class FolderUploadAction(UploadAction): + object_type: str = "folders" + + @classmethod + @override + def post_tasks( + cls, + offline_id: str, + online_id: str, + data: dict[str, typing.Any], + cache_directory: pathlib.Path, + ) -> None: + super().post_tasks( + offline_id=offline_id, + online_id=online_id, + data=data, + cache_directory=cache_directory, + ) + + _ = cache_directory.joinpath("server_ids", f"{offline_id}.txt").write_text( + online_id + ) + + +class TenantUploadAction(UploadAction): + object_type: str = "tenants" + + @classmethod + @override + def initialise_object(cls, identifier: str, **data) -> Tenant: + if not identifier: + return Tenant.new(**data) + + _sv_obj = Tenant(identifier=identifier) + _sv_obj.read_only(False) + return _sv_obj + + +class UserUploadAction(UploadAction): + object_type: str = "users" + + @classmethod + @override + def initialise_object(cls, identifier: str, **data) -> User: + if not identifier: + return User.new(**data) + + _sv_obj = User(identifier=identifier) + _sv_obj.read_only(False) + return _sv_obj + + +class TagUploadAction(UploadAction): + object_type: str = "tags" + + @classmethod + @override + def initialise_object(cls, identifier: str, **data) -> Tag: + if not identifier: + return Tag.new(**data) + + _sv_obj = Tag(identifier=identifier) + _sv_obj.read_only(False) + return _sv_obj + + @classmethod + @override + def post_tasks( + cls, + offline_id: str, + online_id: str, + data: dict[str, typing.Any], + cache_directory: pathlib.Path, + ) -> None: + super().post_tasks(offline_id, online_id, data, cache_directory) + _ = cache_directory.joinpath("server_ids", f"{offline_id}.txt").write_text( + online_id + ) + + +class AlertUploadAction(UploadAction): + object_type: str = "alerts" + + @classmethod + @override + def initialise_object(cls, identifier: str, **data) -> AlertType: + if not identifier: + _source: str = data["source"] + + if _source == "events": + return EventsAlert.new(**data) + elif _source == "metrics" and data.get("threshold"): + return MetricsThresholdAlert.new(**data) + elif _source == "metrics": + return MetricsRangeAlert.new(**data) + else: + return UserAlert.new(**data) + + _sv_obj = Alert(identifier=identifier) + _sv_obj.read_only(False) + return _sv_obj + + @classmethod + @override + def post_tasks( + cls, + offline_id: str, + online_id: str, + data: dict[str, typing.Any], + cache_directory: pathlib.Path, + ) -> None: + super().post_tasks(offline_id, online_id, data, cache_directory) + _ = cache_directory.joinpath("server_ids", f"{offline_id}.txt").write_text( + online_id + ) + + +class StorageUploadAction(UploadAction): + object_type: str = "storage" + + @classmethod + @override + def initialise_object(cls, identifier: str, **data) -> S3Storage | FileStorage: + if not identifier: + if data.get("config", {}).get("endpoint_url"): + return S3Storage.new(**data) + + return FileStorage.new(**data) + + _sv_obj = Storage(identifier=identifier) + _sv_obj.read_only(False) + return _sv_obj + + +class GridUploadAction(UploadAction): + object_type: str = "grids" + + @classmethod + @override + def initialise_object(cls, identifier: str, **data) -> Grid: + if not identifier: + return Grid.new(**data) + + _sv_obj = Grid(identifier=identifier) + _sv_obj.read_only(False) + return _sv_obj + + +UPLOAD_ORDER: tuple[type[UploadAction], ...] = ( + TenantUploadAction, + UserUploadAction, + StorageUploadAction, + FolderUploadAction, + TagUploadAction, + AlertUploadAction, + RunUploadAction, + GridUploadAction, + ArtifactUploadAction, +) diff --git a/simvue/offline/sender.py b/simvue/offline/sender.py new file mode 100644 index 00000000..fb0a6b77 --- /dev/null +++ b/simvue/offline/sender.py @@ -0,0 +1,134 @@ +"""Classes and methods for sending local objects to server. + +These are designed to be run with a cron task in cases where server connection +is either not possible on the simulation machine, or connection is limited. +""" + +from collections.abc import Iterable +import logging +import threading +import typing +import pydantic +import pathlib +import psutil + +from simvue.offline.actions import UPLOAD_ORDER +from simvue.config.user import SimvueConfiguration + +UploadItem = typing.Literal[ + "tenants", + "users", + "storage", + "folders", + "tags", + "alerts", + "runs", + "grids", + "artifacts", + "metrics", + "grid_metrics", + "events", +] + + +class Sender(pydantic.BaseModel): + cache_directory: pydantic.DirectoryPath + server_url: str + server_token: pydantic.SecretStr + max_workers: pydantic.PositiveInt = 5 + threading_threshold: pydantic.PositiveInt = 10 + throw_exceptions: bool = False + retry_failed_uploads: bool = False + _lock_path: pathlib.Path + _id_mapping: dict[str, str] + _thread_lock: threading.Lock = pydantic.PrivateAttr(threading.Lock()) + _logger: logging.Logger + _run_failed: bool = pydantic.PrivateAttr(False) + + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + frozen=True, extra="forbid" + ) + + @pydantic.model_validator(mode="before") + @classmethod + def set_credentials(cls, values: dict[str, object]) -> dict[str, object]: + """Set URL and token if unspecified.""" + _local_config: SimvueConfiguration = SimvueConfiguration.fetch( + server_url=values.get("server_url"), + server_token=values.get("server_token"), + ) + values["server_url"] = _local_config.server.url + values["server_token"] = _local_config.server.token + values["cache_directory"] = values.get( + "cache_directory", _local_config.offline.cache + ) + + return values + + def __post_init__(self) -> None: + """Creates required local directories.""" + self.cache_directory.joinpath("server_ids").mkdir(parents=True, exist_ok=True) + self._lock_path = self.cache_directory.joinpath("sender.lock") + self._id_mapping = { + file_path.name.split(".")[0]: file_path.read_text() + for file_path in self.cache_directory.glob("server_ids/*.txt") + } + self._logger = logging.getLogger(__name__) + + @property + def locked(self) -> bool: + """Check if dispatch locked by another sender.""" + if not self._lock_path: + raise RuntimeError("Expected lock file path, but none initialised.") + return self._lock_path.exists() and psutil.pid_exists( + int(self._lock_path.read_text()) + ) + + def _error(self, message: str, join_threads: bool = True) -> None: + """Raise an exception if necessary and log error + + Parameters + ---------- + message : str + message to display in exception or logger message + join_threads : bool + whether to join the threads on failure. This option exists to + prevent join being called in nested thread calls to this function. + + Raises + ------ + RuntimeError + exception throw + """ + self._logger.error(message) + + self._run_failed = True + + def _lock(self) -> None: + """Lock to this sender.""" + if self.locked: + raise RuntimeError("A sender is already running for this cache!") + _ = self._lock_path.write_text(f"{psutil.Process().pid}") + + def _release(self) -> None: + """Release lock to this sender.""" + self._lock_path.unlink() + + @pydantic.validate_call(config={"validate_default": True}) + def upload(self, objects_to_upload: Iterable[UploadItem] | None = None) -> None: + """Upload objects to server.""" + for action in UPLOAD_ORDER: + if objects_to_upload and action.object_type not in list(objects_to_upload): + continue + + _n_objects: int = action.count(self.cache_directory) + + action.upload( + cache_directory=self.cache_directory, + id_mapping=self._id_mapping, + thread_lock=self._thread_lock, + throw_exceptions=self.throw_exceptions, + retry_failed=self.retry_failed_uploads, + single_thread_limit=self.threading_threshold, + max_thread_workers=self.max_workers, + ) diff --git a/simvue/utilities.py b/simvue/utilities.py index e890698e..7b267e57 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -21,6 +21,7 @@ if typing.TYPE_CHECKING: from simvue.run import Run + from simvue.offline import Sender def find_first_instance_of_file( @@ -219,7 +220,7 @@ def parse_pydantic_error(error: pydantic.ValidationError) -> str: def skip_if_failed( - failure_attr: str, + failure_attr: str | None, ignore_exc_attr: str, on_failure_return: typing.Any | None = None, ) -> typing.Callable: @@ -248,7 +249,7 @@ def skip_if_failed( def decorator(class_func: typing.Callable) -> typing.Callable: @functools.wraps(class_func) - def wrapper(self: "Run", *args, **kwargs) -> typing.Any: + def wrapper(self: "Run | Sender", *args, **kwargs) -> typing.Any: if getattr(self, failure_attr, None) and getattr( self, ignore_exc_attr, None ): From a1c0b208d1b91a586853f9b890dfa9ea2d27a5cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 22 Oct 2025 10:38:12 +0100 Subject: [PATCH 02/16] =?UTF-8?q?=F0=9F=9A=A7=20Start=20tweaks=20to=20pass?= =?UTF-8?q?=20tests=20with=20new=20sender?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/api/objects/base.py | 9 +- simvue/bin/sender.py | 89 +++-- simvue/client.py | 3 +- simvue/config/user.py | 10 + simvue/models.py | 2 + simvue/offline/__init__.py | 3 - simvue/offline/sender.py | 134 -------- simvue/run.py | 7 +- simvue/sender.py | 324 ------------------ simvue/sender/__init__.py | 3 + simvue/{offline => sender}/actions.py | 384 ++++++++++++++++++---- simvue/sender/base.py | 107 ++++++ simvue/utilities.py | 5 +- tests/functional/test_run_class.py | 48 ++- tests/unit/test_event_alert.py | 19 +- tests/unit/test_events.py | 13 +- tests/unit/test_file_artifact.py | 18 +- tests/unit/test_file_storage.py | 11 +- tests/unit/test_folder.py | 23 +- tests/unit/test_grids.py | 16 +- tests/unit/test_metric_range_alert.py | 18 +- tests/unit/test_metric_threshold_alert.py | 16 +- tests/unit/test_metrics.py | 15 +- tests/unit/test_object_artifact.py | 7 +- tests/unit/test_run.py | 15 +- 25 files changed, 626 insertions(+), 673 deletions(-) delete mode 100644 simvue/offline/__init__.py delete mode 100644 simvue/offline/sender.py delete mode 100644 simvue/sender.py create mode 100644 simvue/sender/__init__.py rename simvue/{offline => sender}/actions.py (51%) create mode 100644 simvue/sender/base.py diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 730f41b8..08955462 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -22,7 +22,6 @@ from simvue.utilities import staging_merger from simvue.config.user import SimvueConfiguration from simvue.exception import ObjectNotFoundError -from simvue.version import __version__ from simvue.api.request import ( get as sv_get, get_paginated, @@ -226,13 +225,7 @@ def __init__( ) self._headers: dict[str, str] = ( - { - "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}", - "User-Agent": _user_agent or f"Simvue Python client {__version__}", - "Accept-Encoding": "gzip", - } - if not self._offline - else {} + self._user_config.headers if not self._offline else {} ) self._params: dict[str, str] = {} diff --git a/simvue/bin/sender.py b/simvue/bin/sender.py index 649b9d6f..0d62ba68 100644 --- a/simvue/bin/sender.py +++ b/simvue/bin/sender.py @@ -1,44 +1,69 @@ -"""Send runs to server""" +"""Send locally cached data to server.""" import logging +import pathlib +import click + +from simvue.sender import Sender, UPLOAD_ORDER, UploadItem -from simvue.sender import sender, UPLOAD_ORDER -import argparse _logger = logging.getLogger(__name__) _logger.setLevel(logging.INFO) -def run() -> None: - parser = argparse.ArgumentParser(description="My script description") - parser.add_argument( - "-w", - "--max-workers", - type=int, - required=False, - default=5, - help="The maximum number of worker threads to use in parallel, by default 5", - ) - parser.add_argument( - "-n", - "--threading-threshold", - type=int, - required=False, - default=10, - help="The number of objects of a given type above which items will be sent to the server in parallel, by default 10", - ) - parser.add_argument( - "-o", - "--objects-to-upload", - type=str, - nargs="+", - required=False, - default=UPLOAD_ORDER, - help="The object types to upload, by default All", - ) - args = parser.parse_args() +@click.command("simvue-sender") +@click.option( + "--max-workers", + "-w", + type=int, + default=5, + required=False, + help="The maximum number of worker threads to use in parallel, by default 5", +) +@click.option( + "-n", + "--threading-threshold", + type=int, + required=False, + default=10, + help="The number of objects of a given type above which items will be sent to the server in parallel, by default 10", +) +@click.option( + "-o", + "--objects-to-upload", + type=str, + nargs=-1, + required=False, + default=UPLOAD_ORDER, + help="The object types to upload, by default All", +) +@click.option( + "-i", + "--cache-directory", + type=click.Path( + file_okay=False, + dir_okay=True, + exists=True, + writable=True, + path_type=pathlib.Path, + ), + help="Location of cache directory to use", + default=None, + required=False, +) +def run( + cache_directory: pathlib.Path | None, + objects_to_upload: list[UploadItem] | None, + threading_threshold: int, + max_workers: int, +) -> None: try: _logger.info("Starting Simvue Sender") - sender(**vars(args)) + _sender = Sender( + cache_directory=cache_directory, + max_workers=max_workers, + threading_threshold=threading_threshold, + ) + _sender.upload(objects_to_upload) except Exception as err: _logger.critical("Exception running sender: %s", str(err)) diff --git a/simvue/client.py b/simvue/client.py index 2130db38..67928525 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -92,8 +92,7 @@ def __init__( if not value: logger.warning(f"No {label} specified") - self._headers: dict[str, str] = { - "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}", + self._headers: dict[str, str] = self._user_config.headers | { "Accept-Encoding": "gzip", } diff --git a/simvue/config/user.py b/simvue/config/user.py index b1114102..d285918e 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -253,3 +253,13 @@ def config_file(cls) -> pathlib.Path: raise FileNotFoundError("Failed to find Simvue configuration file") return _config_file + + @property + def headers(self) -> dict[str, str]: + if not self.server.token: + raise ValueError("Cannot generate headers, no token provided.") + return { + "Authorization": f"Bearer {self.server.token.get_secret_value()}", + "User-Agent": f"Simvue Python client {__version__}", + "Accept-Encoding": "gzip", + } diff --git a/simvue/models.py b/simvue/models.py index 49c4ac19..7dec244d 100644 --- a/simvue/models.py +++ b/simvue/models.py @@ -9,6 +9,7 @@ NAME_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:]+$" METRIC_KEY_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:=><+\(\)]+$" DATETIME_FORMAT: str = "%Y-%m-%dT%H:%M:%S.%f" +OBJECT_ID: str = r"^[A-Za-z0-9]{22}$" MetadataKeyString = typing.Annotated[ str, pydantic.StringConstraints(pattern=r"^[\w\-\s\.]+$") @@ -17,6 +18,7 @@ MetricKeyString = typing.Annotated[ str, pydantic.StringConstraints(pattern=METRIC_KEY_REGEX) ] +ObjectID = typing.Annotated[str, pydantic.StringConstraints(pattern=OBJECT_ID)] def validate_timestamp(timestamp: str, raise_except: bool = True) -> bool: diff --git a/simvue/offline/__init__.py b/simvue/offline/__init__.py deleted file mode 100644 index 53f54e1e..00000000 --- a/simvue/offline/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .sender import Sender - -__all__ = ["Sender"] diff --git a/simvue/offline/sender.py b/simvue/offline/sender.py deleted file mode 100644 index fb0a6b77..00000000 --- a/simvue/offline/sender.py +++ /dev/null @@ -1,134 +0,0 @@ -"""Classes and methods for sending local objects to server. - -These are designed to be run with a cron task in cases where server connection -is either not possible on the simulation machine, or connection is limited. -""" - -from collections.abc import Iterable -import logging -import threading -import typing -import pydantic -import pathlib -import psutil - -from simvue.offline.actions import UPLOAD_ORDER -from simvue.config.user import SimvueConfiguration - -UploadItem = typing.Literal[ - "tenants", - "users", - "storage", - "folders", - "tags", - "alerts", - "runs", - "grids", - "artifacts", - "metrics", - "grid_metrics", - "events", -] - - -class Sender(pydantic.BaseModel): - cache_directory: pydantic.DirectoryPath - server_url: str - server_token: pydantic.SecretStr - max_workers: pydantic.PositiveInt = 5 - threading_threshold: pydantic.PositiveInt = 10 - throw_exceptions: bool = False - retry_failed_uploads: bool = False - _lock_path: pathlib.Path - _id_mapping: dict[str, str] - _thread_lock: threading.Lock = pydantic.PrivateAttr(threading.Lock()) - _logger: logging.Logger - _run_failed: bool = pydantic.PrivateAttr(False) - - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( - frozen=True, extra="forbid" - ) - - @pydantic.model_validator(mode="before") - @classmethod - def set_credentials(cls, values: dict[str, object]) -> dict[str, object]: - """Set URL and token if unspecified.""" - _local_config: SimvueConfiguration = SimvueConfiguration.fetch( - server_url=values.get("server_url"), - server_token=values.get("server_token"), - ) - values["server_url"] = _local_config.server.url - values["server_token"] = _local_config.server.token - values["cache_directory"] = values.get( - "cache_directory", _local_config.offline.cache - ) - - return values - - def __post_init__(self) -> None: - """Creates required local directories.""" - self.cache_directory.joinpath("server_ids").mkdir(parents=True, exist_ok=True) - self._lock_path = self.cache_directory.joinpath("sender.lock") - self._id_mapping = { - file_path.name.split(".")[0]: file_path.read_text() - for file_path in self.cache_directory.glob("server_ids/*.txt") - } - self._logger = logging.getLogger(__name__) - - @property - def locked(self) -> bool: - """Check if dispatch locked by another sender.""" - if not self._lock_path: - raise RuntimeError("Expected lock file path, but none initialised.") - return self._lock_path.exists() and psutil.pid_exists( - int(self._lock_path.read_text()) - ) - - def _error(self, message: str, join_threads: bool = True) -> None: - """Raise an exception if necessary and log error - - Parameters - ---------- - message : str - message to display in exception or logger message - join_threads : bool - whether to join the threads on failure. This option exists to - prevent join being called in nested thread calls to this function. - - Raises - ------ - RuntimeError - exception throw - """ - self._logger.error(message) - - self._run_failed = True - - def _lock(self) -> None: - """Lock to this sender.""" - if self.locked: - raise RuntimeError("A sender is already running for this cache!") - _ = self._lock_path.write_text(f"{psutil.Process().pid}") - - def _release(self) -> None: - """Release lock to this sender.""" - self._lock_path.unlink() - - @pydantic.validate_call(config={"validate_default": True}) - def upload(self, objects_to_upload: Iterable[UploadItem] | None = None) -> None: - """Upload objects to server.""" - for action in UPLOAD_ORDER: - if objects_to_upload and action.object_type not in list(objects_to_upload): - continue - - _n_objects: int = action.count(self.cache_directory) - - action.upload( - cache_directory=self.cache_directory, - id_mapping=self._id_mapping, - thread_lock=self._thread_lock, - throw_exceptions=self.throw_exceptions, - retry_failed=self.retry_failed_uploads, - single_thread_limit=self.threading_threshold, - max_thread_workers=self.max_workers, - ) diff --git a/simvue/run.py b/simvue/run.py index 5792a620..4b945183 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -202,12 +202,7 @@ def __init__( else self._user_config.metrics.system_metrics_interval ) self._headers: dict[str, str] = ( - { - "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}", - "Accept-Encoding": "gzip", - } - if mode != "offline" - else {} + self._user_config.headers if mode != "offline" else {} ) self._sv_obj: RunObject | None = None self._pid: int | None = 0 diff --git a/simvue/sender.py b/simvue/sender.py deleted file mode 100644 index 30e869d9..00000000 --- a/simvue/sender.py +++ /dev/null @@ -1,324 +0,0 @@ -""" -Simvue Sender -============== - -Function to send data cached by Simvue in Offline mode to the server. -""" - -import json -import pydantic -import logging -from concurrent.futures import ThreadPoolExecutor -import threading -import requests -import psutil -from simvue.config.user import SimvueConfiguration -import simvue.api.objects -from simvue.api.objects.artifact.base import ArtifactBase -from simvue.eco.emissions_monitor import CO2Monitor -from simvue.version import __version__ - -UPLOAD_ORDER: list[str] = [ - "tenants", - "users", - "storage", - "folders", - "tags", - "alerts", - "runs", - "grids", - "artifacts", - "metrics", - "grid_metrics", - "events", -] - -_logger = logging.getLogger(__name__) - - -def _log_upload_failed(file_path: pydantic.FilePath) -> None: - """Record that an object failed to upload in the object offline cache file. - - Parameters - ---------- - file_path : pydantic.FilePath - The path to the offline cache file for the object - """ - with file_path.open("r") as file: - _data = json.load(file) - _data["upload_failed"] = True - with file_path.open("w") as file: - json.dump(_data, file) - - -def upload_cached_file( - cache_dir: pydantic.DirectoryPath, - obj_type: str, - file_path: pydantic.FilePath, - id_mapping: dict[str, str], - throw_exceptions: bool, - retry_failed_uploads: bool, - lock: threading.Lock, -) -> None: - """Upload data stored in a cached file to the Simvue server. - - Parameters - ---------- - cache_dir : pydantic.DirectoryPath - The directory where cached files are stored - obj_type : str - The type of object which should be created for this cached file - file_path : pydantic.FilePath - The path to the cached file to upload - id_mapping : dict[str, str] - A mapping of offline to online object IDs - throw_exceptions : bool - Whether to throw exceptions, or just log them - retry_failed_uploads : bool - Whether to retry failed uploads or ignore them - lock : threading.Lock - A lock to prevent multiple threads accessing the id mapping directory at once - """ - _current_id = file_path.name.split(".")[0] - _data = json.load(file_path.open()) - _exact_type: str = _data.pop("obj_type") - - if _data.pop("upload_failed", False) and not retry_failed_uploads: - return - - try: - _instance_class = getattr(simvue.api.objects, _exact_type) - except AttributeError as error: - if throw_exceptions: - raise error - - _logger.error(f"Attempt to initialise unknown type '{_exact_type}'") - _log_upload_failed(file_path) - return - - # If it is an ObjectArtifact, need to load the object as bytes from a different file - if issubclass(_instance_class, simvue.api.objects.ObjectArtifact): - with open(file_path.parent.joinpath(f"{_current_id}.object"), "rb") as file: - _data["serialized"] = file.read() - try: - # We want to reconnect if there is an online ID stored for this file - if _online_id := id_mapping.get(_current_id): - obj_for_upload = _instance_class( - identifier=_online_id, _read_only=False, **_data - ) - else: - obj_for_upload = _instance_class.new(**_data) - - with lock: - obj_for_upload.on_reconnect(id_mapping) - - if not issubclass(_instance_class, ArtifactBase): - obj_for_upload.commit() - _new_id = obj_for_upload.id - - except Exception as error: - if "status 409" in str(error): - return - if throw_exceptions: - raise error - - _logger.error( - f"Error while committing '{_instance_class.__name__}': {str(error)}" - ) - _log_upload_failed(file_path) - return - if not _new_id: - _logger.error(f"Object of type '{_instance_class.__name__}' has no identifier") - _log_upload_failed(file_path) - return - - _logger.info( - f"{'Updated' if id_mapping.get(_current_id) else 'Created'} {_instance_class.__name__} '{_new_id}'" - ) - - file_path.unlink(missing_ok=True) - if issubclass(_instance_class, simvue.api.objects.ObjectArtifact): - file_path.parent.joinpath(f"{_current_id}.object").unlink() - - with lock: - id_mapping[_current_id] = _new_id - - if obj_type in {"alerts", "runs", "folders", "tags"}: - cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text(_new_id) - - if ( - obj_type == "runs" - and cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").exists() - ): - # Get alerts and folder created by this run - their IDs can be deleted - for id in _data.get("alerts", []): - cache_dir.joinpath("server_ids", f"{id}.txt").unlink() - if _folder_id := _data.get("folder_id"): - cache_dir.joinpath("server_ids", f"{_folder_id}.txt").unlink() - - cache_dir.joinpath("server_ids", f"{_current_id}.txt").unlink() - cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").unlink() - _logger.info(f"Run {_current_id} closed - deleting cached copies...") - - -def send_heartbeat( - file_path: pydantic.FilePath, - id_mapping: dict[str, str], - server_url: str, - headers: dict[str, str], -): - _offline_id = file_path.name.split(".")[0] - _online_id = id_mapping.get(_offline_id) - if not _online_id: - # Run has been closed - can just remove heartbeat and continue - file_path.unlink() - return - _logger.info(f"Sending heartbeat to run {_online_id}") - _response = requests.put( - f"{server_url}/runs/{_online_id}/heartbeat", - headers=headers, - ) - if _response.status_code == 200: - file_path.unlink() - else: - _logger.warning( - f"Attempting to send heartbeat to run {_online_id} returned status code {_response.status_code}." - ) - - -@pydantic.validate_call -def sender( - cache_dir: pydantic.DirectoryPath | None = None, - max_workers: int = 5, - threading_threshold: int = 10, - objects_to_upload: list[str] = UPLOAD_ORDER, - throw_exceptions: bool = False, - retry_failed_uploads: bool = False, -) -> dict[str, str]: - """Send data from a local cache directory to the Simvue server. - - Parameters - ---------- - cache_dir : pydantic.DirectoryPath - The directory where cached files are stored - max_workers : int - The maximum number of threads to use - threading_threshold : int - The number of cached files above which threading will be used - objects_to_upload : list[str] - Types of objects to upload, by default uploads all types of objects present in cache - throw_exceptions : bool, optional - Whether to throw exceptions as they are encountered in the sender, default is False (exceptions will be logged) - retry_failed_uploads : bool, optional - Whether to retry sending objects which previously failed, by default False - - Returns - ------- - id_mapping - mapping of local ID to server ID - """ - _user_config: SimvueConfiguration = SimvueConfiguration.fetch() - cache_dir = cache_dir or _user_config.offline.cache - - cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) - _lock_path = cache_dir.joinpath("sender.lock") - - # Check that no other sender is already currently running... - if _lock_path.exists() and psutil.pid_exists(int(_lock_path.read_text())): - raise RuntimeError("A sender is already running for this cache!") - - # Create lock file to prevent other senders running while this one isn't finished - _lock_path.write_text(str(psutil.Process().pid)) - - _id_mapping: dict[str, str] = { - file_path.name.split(".")[0]: file_path.read_text() - for file_path in cache_dir.glob("server_ids/*.txt") - } - _lock = threading.Lock() - _upload_order = [item for item in UPLOAD_ORDER if item in objects_to_upload] - # Glob all files to look in at the start, to prevent extra files being written while other types are being uploaded - _all_offline_files = { - obj_type: list(cache_dir.glob(f"{obj_type}/*.json")) - for obj_type in _upload_order - } - - for _obj_type in _upload_order: - _offline_files = _all_offline_files[_obj_type] - if len(_offline_files) < threading_threshold: - for file_path in _offline_files: - upload_cached_file( - cache_dir=cache_dir, - obj_type=_obj_type, - file_path=file_path, - id_mapping=_id_mapping, - throw_exceptions=throw_exceptions, - retry_failed_uploads=retry_failed_uploads, - lock=_lock, - ) - else: - with ThreadPoolExecutor( - max_workers=max_workers, thread_name_prefix="sender_session_upload" - ) as executor: - _results = executor.map( - lambda file_path: upload_cached_file( - cache_dir=cache_dir, - obj_type=_obj_type, - file_path=file_path, - id_mapping=_id_mapping, - throw_exceptions=throw_exceptions, - retry_failed_uploads=retry_failed_uploads, - lock=_lock, - ), - _offline_files, - ) - # This will raise any exceptions encountered during sending - for result in _results: - pass - - # Send heartbeats - _headers: dict[str, str] = { - "Authorization": f"Bearer {_user_config.server.token.get_secret_value()}", - "User-Agent": f"Simvue Python client {__version__}", - } - _heartbeat_files = list(cache_dir.glob("runs/*.heartbeat")) - if len(_heartbeat_files) < threading_threshold: - for _heartbeat_file in _heartbeat_files: - ( - send_heartbeat( - file_path=_heartbeat_file, - id_mapping=_id_mapping, - server_url=_user_config.server.url, - headers=_headers, - ), - ) - else: - with ThreadPoolExecutor( - max_workers=max_workers, thread_name_prefix="sender_heartbeat" - ) as executor: - _results = executor.map( - lambda _heartbeat_file: send_heartbeat( - file_path=_heartbeat_file, - id_mapping=_id_mapping, - server_url=_user_config.server.url, - headers=_headers, - ), - _heartbeat_files, - ) - - # If CO2 emissions are requested create a dummy monitor which just - # refreshes the CO2 intensity value if required. No emission metrics - # will be taken by the sender itself, values are assumed to be recorded - # by any offline runs being sent. - if _user_config.metrics.enable_emission_metrics: - CO2Monitor( - thermal_design_power_per_gpu=None, - thermal_design_power_per_cpu=None, - local_data_directory=cache_dir, - intensity_refresh_interval=_user_config.eco.intensity_refresh_interval, - co2_intensity=_user_config.eco.co2_intensity, - co2_signal_api_token=_user_config.eco.co2_signal_api_token, - ).check_refresh() - - # Remove lock file to allow another sender to start in the future - _lock_path.unlink() - return _id_mapping diff --git a/simvue/sender/__init__.py b/simvue/sender/__init__.py new file mode 100644 index 00000000..74340ecf --- /dev/null +++ b/simvue/sender/__init__.py @@ -0,0 +1,3 @@ +from .base import Sender, UPLOAD_ORDER, UploadItem + +__all__ = ["Sender", "UPLOAD_ORDER", "UploadItem"] diff --git a/simvue/offline/actions.py b/simvue/sender/actions.py similarity index 51% rename from simvue/offline/actions.py rename to simvue/sender/actions.py index 9b40912b..2c8fc009 100644 --- a/simvue/offline/actions.py +++ b/simvue/sender/actions.py @@ -1,19 +1,26 @@ import abc from collections.abc import Generator from concurrent.futures import ThreadPoolExecutor +import http import json import logging import pathlib import threading import typing +import requests + from simvue.api.objects import ( Alert, Artifact, + Events, EventsAlert, FileArtifact, FileStorage, + Folder, Grid, + GridMetrics, + Metrics, MetricsRangeAlert, MetricsThresholdAlert, ObjectArtifact, @@ -28,6 +35,10 @@ from simvue.api.objects.alert.fetch import AlertType from simvue.api.objects.artifact.base import ArtifactBase from simvue.api.objects.base import SimvueObject +from simvue.api.request import put as sv_put, get_json_from_response +from simvue.models import ObjectID +from simvue.config.user import SimvueConfiguration +from simvue.eco import CO2Monitor try: from typing import override @@ -40,6 +51,7 @@ class UploadAction: object_type: str = "" logger: logging.Logger = logging.getLogger(__name__) + singular_object: bool = True @classmethod def json_file(cls, cache_directory: pathlib.Path, offline_id: str) -> pathlib.Path: @@ -72,7 +84,7 @@ def pre_tasks( def post_tasks( cls, offline_id: str, - online_id: str, + online_id: ObjectID | None, data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: @@ -81,11 +93,14 @@ def post_tasks( _ = online_id cls.json_file(cache_directory, offline_id).unlink(missing_ok=True) - @abc.abstractmethod @classmethod - def initialise_object(cls, identifier: str, **data) -> SimvueObject: + @abc.abstractmethod + def initialise_object( + cls, online_id: ObjectID | None, **data + ) -> SimvueObject | None: """Initialise an instance.""" - pass + _ = online_id + _ = data @classmethod def uploadable_objects(cls, cache_directory: pathlib.Path) -> Generator[str]: @@ -117,9 +132,16 @@ def _single_item_upload( offline_id=identifier, data=_data, cache_directory=cache_directory ) - _object = cls.initialise_object(identifier=identifier, **_data) + _object = cls.initialise_object( + online_id=id_mapping.get(identifier), **_data + ) + + if not _object: + _out_msg: str = f"No initialiser defined for type '{cls.__name__}'" + raise RuntimeError(_out_msg) - _object.on_reconnect(id_mapping) + with thread_lock: + _object.on_reconnect(id_mapping) if not isinstance(_object, ArtifactBase): _object.commit() @@ -135,28 +157,39 @@ def _single_item_upload( cls.log_upload_failed(cache_directory, identifier, _data) return - if not _object.id: - cls.logger.error( - "No identifier retrieved for %s '%s'", - cls.object_type, - identifier, + if cls.singular_object: + if not _object.id: + cls.logger.error( + "No identifier retrieved for %s '%s'", + cls.object_type, + identifier, + ) + cls.log_upload_failed(cache_directory, identifier, _data) + return + + cls.logger.info( + "%s %s '%s'", + "Updated" if id_mapping.get(identifier) else "Created", + cls.object_type[:-1] + if cls.object_type.endswith("s") + else cls.object_type, + _object.id, ) - cls.log_upload_failed(cache_directory, identifier, _data) - return - - cls.logger.info( - "%s %s '%s'", - "Updated" if id_mapping.get(identifier) else "Created", - cls.object_type[:-1] if cls.object_type.endswith("s") else cls.object_type, - _object.id, - ) - with thread_lock: - id_mapping[identifier] = _object.id + with thread_lock: + id_mapping[identifier] = _object.id + else: + cls.logger.info( + "%s %s", + "Updated" if id_mapping.get(identifier) else "Created", + cls.object_type[:-1] + if cls.object_type.endswith("s") + else cls.object_type, + ) cls.post_tasks( offline_id=identifier, - online_id=_object.id, + online_id=_object.id if cls.singular_object else None, data=_data, cache_directory=cache_directory, ) @@ -209,33 +242,53 @@ def upload( class ArtifactUploadAction(UploadAction): object_type: str = "artifacts" + @override + @classmethod + def pre_tasks( + cls, + offline_id: str, + data: dict[str, typing.Any], + cache_directory: pathlib.Path, + ) -> None: + if data["obj_type"] != "ObjectArtifact": + return + with cache_directory.joinpath(cls.object_type, f"{offline_id}.object").open( + "rb" + ) as in_f: + data["serialized"] = in_f.read() + @override @classmethod def post_tasks( cls, offline_id: str, - online_id: str, + online_id: ObjectID | None, data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: _ = online_id - if not data.get("obj"): + super().post_tasks( + offline_id=offline_id, + online_id=online_id, + data=data, + cache_directory=cache_directory, + ) + if data["obj_type"] != "ObjectArtifact": return cache_directory.joinpath(cls.object_type, f"{offline_id}.object").unlink() @override @classmethod def initialise_object( - cls, identifier: str | None, **data + cls, online_id: ObjectID | None, **data ) -> FileArtifact | ObjectArtifact: - if not identifier: + if not online_id: if data.get("file_path"): return FileArtifact.new(**data) return ObjectArtifact.new(**data) - _sv_obj = Artifact(identifier=identifier) - _sv_obj.read_only(False) + _sv_obj = Artifact(identifier=online_id, _read_only=False, **data) return _sv_obj @@ -244,13 +297,11 @@ class RunUploadAction(UploadAction): @override @classmethod - def initialise_object(cls, identifier: str | None, **data) -> Run: - if not identifier: + def initialise_object(cls, online_id: ObjectID | None, **data) -> Run: + if not online_id: return Run.new(**data) - _sv_obj = Run(identifier=identifier) - _sv_obj.read_only(False) - return _sv_obj + return Run(identifier=online_id, _read_only=False, **data) @override @classmethod @@ -293,6 +344,14 @@ def post_tasks( class FolderUploadAction(UploadAction): object_type: str = "folders" + @classmethod + @override + def initialise_object(cls, online_id: ObjectID | None, **data) -> Folder: + if not online_id: + return Folder.new(**data) + + return Folder(identifier=online_id, _read_only=False, **data) + @classmethod @override def post_tasks( @@ -319,13 +378,11 @@ class TenantUploadAction(UploadAction): @classmethod @override - def initialise_object(cls, identifier: str, **data) -> Tenant: - if not identifier: + def initialise_object(cls, online_id: ObjectID | None, **data) -> Tenant: + if not online_id: return Tenant.new(**data) - _sv_obj = Tenant(identifier=identifier) - _sv_obj.read_only(False) - return _sv_obj + return Tenant(identifier=online_id, _read_only=False, **data) class UserUploadAction(UploadAction): @@ -333,13 +390,11 @@ class UserUploadAction(UploadAction): @classmethod @override - def initialise_object(cls, identifier: str, **data) -> User: - if not identifier: + def initialise_object(cls, online_id: ObjectID | None, **data) -> User: + if not online_id: return User.new(**data) - _sv_obj = User(identifier=identifier) - _sv_obj.read_only(False) - return _sv_obj + return User(identifier=online_id, _read_only=False, **data) class TagUploadAction(UploadAction): @@ -347,13 +402,11 @@ class TagUploadAction(UploadAction): @classmethod @override - def initialise_object(cls, identifier: str, **data) -> Tag: - if not identifier: + def initialise_object(cls, online_id: ObjectID | None, **data) -> Tag: + if not online_id: return Tag.new(**data) - _sv_obj = Tag(identifier=identifier) - _sv_obj.read_only(False) - return _sv_obj + return Tag(identifier=online_id, _read_only=False, **data) @classmethod @override @@ -375,8 +428,8 @@ class AlertUploadAction(UploadAction): @classmethod @override - def initialise_object(cls, identifier: str, **data) -> AlertType: - if not identifier: + def initialise_object(cls, online_id: ObjectID | None, **data) -> AlertType: + if not online_id: _source: str = data["source"] if _source == "events": @@ -388,9 +441,7 @@ def initialise_object(cls, identifier: str, **data) -> AlertType: else: return UserAlert.new(**data) - _sv_obj = Alert(identifier=identifier) - _sv_obj.read_only(False) - return _sv_obj + return Alert(identifier=online_id, _read_only=False, **data) @classmethod @override @@ -412,16 +463,16 @@ class StorageUploadAction(UploadAction): @classmethod @override - def initialise_object(cls, identifier: str, **data) -> S3Storage | FileStorage: - if not identifier: + def initialise_object( + cls, online_id: ObjectID | None, **data + ) -> S3Storage | FileStorage: + if not online_id: if data.get("config", {}).get("endpoint_url"): return S3Storage.new(**data) return FileStorage.new(**data) - _sv_obj = Storage(identifier=identifier) - _sv_obj.read_only(False) - return _sv_obj + return Storage(identifier=online_id, _read_only=False, **data) class GridUploadAction(UploadAction): @@ -429,16 +480,214 @@ class GridUploadAction(UploadAction): @classmethod @override - def initialise_object(cls, identifier: str, **data) -> Grid: - if not identifier: + def initialise_object(cls, online_id: ObjectID | None, **data) -> Grid: + if not online_id: return Grid.new(**data) - _sv_obj = Grid(identifier=identifier) - _sv_obj.read_only(False) - return _sv_obj + return Grid(identifier=online_id, _read_only=False, **data) + + +class MetricsUploadAction(UploadAction): + object_type: str = "metrics" + singular_object: bool = False + + @classmethod + @override + def initialise_object(cls, online_id: ObjectID | None, **data) -> Metrics: + _ = online_id + return Metrics.new(**data) + + +class GridMetricsUploadAction(UploadAction): + object_type: str = "grid_metrics" + singular_object: bool = False + + @classmethod + @override + def initialise_object(cls, online_id: ObjectID | None, **data) -> GridMetrics: + _ = online_id + return GridMetrics.new(**data) + + +class EventsUploadAction(UploadAction): + object_type: str = "events" + singular_object: bool = False + + @classmethod + @override + def initialise_object(cls, online_id: ObjectID | None, **data) -> Events: + _ = online_id + return Events.new(**data) + + +class HeartbeatUploadAction(UploadAction): + object_type: str = "heartbeat" + singular_object: bool = True + + @override + @classmethod + def initialise_object(cls, online_id: ObjectID | None, **data) -> None: + _ = online_id + _ = data + + @override + @classmethod + def pre_tasks( + cls, offline_id: str, data: dict[str, typing.Any], cache_directory: pathlib.Path + ) -> None: + _ = offline_id + _ = data + _ = cache_directory + pass + + @override + @classmethod + def uploadable_objects(cls, cache_directory: pathlib.Path) -> Generator[str]: + """Iterate through uploadables.""" + for file in cache_directory.glob("runs/*.heartbeat"): + yield file.stem + + @override + @classmethod + def _single_item_upload( + cls, + identifier: str, + id_mapping: dict[str, str], + cache_directory: pathlib.Path, + thread_lock: threading.Lock, + *, + throw_exceptions: bool = False, + retry_failed: bool = False, + ) -> None: + if not (_online_id := id_mapping.get(identifier)): + # Run has been closed - can just remove heartbeat and continue + cache_directory.joinpath(f"runs/{identifier}.heartbeat").unlink() + return + + _local_config: SimvueConfiguration = SimvueConfiguration.fetch() + + cls.logger.info("Sending heartbeat to run '%s'", identifier) + + _response: requests.Response = sv_put( + url=f"{_local_config.server.url}/runs/{_online_id}/heartbeat", + headers=_local_config.headers, + ) + + try: + _json_response = get_json_from_response( + expected_status=[http.HTTPStatus.OK], + scenario=f"Attempt to send heartbeat to run {_online_id}", + response=_response, + ) + except RuntimeError as e: + if throw_exceptions: + raise e + cls.logger.exception(e) + + @override + @classmethod + def post_tasks( + cls, + offline_id: str, + online_id: ObjectID | None, + data: dict[str, typing.Any], + cache_directory: pathlib.Path, + ) -> None: + pass + + +class CO2IntensityUploadAction(UploadAction): + object_type: str = "co2_intensity" + + @override + @classmethod + def initialise_object(cls, online_id: ObjectID | None, **data) -> None: + _ = online_id + _ = data + + @override + @classmethod + def pre_tasks( + cls, offline_id: str, data: dict[str, typing.Any], cache_directory: pathlib.Path + ) -> None: + _ = offline_id + _ = data + _ = cache_directory + + @override + @classmethod + def post_tasks( + cls, + offline_id: str, + online_id: ObjectID | None, + data: dict[str, typing.Any], + cache_directory: pathlib.Path, + ) -> None: + _ = offline_id + _ = data + _ = cache_directory + + @override + @classmethod + def uploadable_objects(cls, cache_directory: pathlib.Path) -> Generator[str]: + yield from () + + @override + @classmethod + def _single_item_upload( + cls, + identifier: str, + id_mapping: dict[str, str], + cache_directory: pathlib.Path, + thread_lock: threading.Lock, + *, + throw_exceptions: bool = False, + retry_failed: bool = False, + ) -> None: + _ = identifier + _ = id_mapping + _ = cache_directory + _ = thread_lock + + @override + @classmethod + def upload( + cls, + id_mapping: dict[str, str], + cache_directory: pathlib.Path, + thread_lock: threading.Lock, + single_thread_limit: int, + max_thread_workers: int, + *, + throw_exceptions: bool = False, + retry_failed: bool = False, + ) -> None: + _ = id_mapping + _ = thread_lock + _ = single_thread_limit + _ = max_thread_workers + + _local_config: SimvueConfiguration = SimvueConfiguration.fetch() + + if not _local_config.metrics.enable_emission_metrics: + return + + try: + CO2Monitor( + thermal_design_power_per_gpu=None, + thermal_design_power_per_cpu=None, + local_data_directory=cache_directory, + intensity_refresh_interval=_local_config.eco.intensity_refresh_interval, + co2_intensity=_local_config.eco.co2_intensity, + co2_signal_api_token=_local_config.eco.co2_signal_api_token, + ).check_refresh() + except (ValueError, RuntimeError) as e: + if throw_exceptions: + raise e + cls.logger.exception(e) -UPLOAD_ORDER: tuple[type[UploadAction], ...] = ( +UPLOAD_ACTION_ORDER: tuple[type[UploadAction], ...] = ( TenantUploadAction, UserUploadAction, StorageUploadAction, @@ -448,4 +697,9 @@ def initialise_object(cls, identifier: str, **data) -> Grid: RunUploadAction, GridUploadAction, ArtifactUploadAction, + MetricsUploadAction, + GridMetricsUploadAction, + EventsUploadAction, + HeartbeatUploadAction, + CO2IntensityUploadAction, ) diff --git a/simvue/sender/base.py b/simvue/sender/base.py new file mode 100644 index 00000000..876d71a1 --- /dev/null +++ b/simvue/sender/base.py @@ -0,0 +1,107 @@ +"""Classes and methods for sending local objects to server. + +These are designed to be run with a cron task in cases where server connection +is either not possible on the simulation machine, or connection is limited. +""" + +import logging +import threading +import typing +import pydantic +import psutil + +from simvue.sender.actions import UPLOAD_ACTION_ORDER +from simvue.config.user import SimvueConfiguration + +logger = logging.getLogger(__name__) + +UploadItem = typing.Literal[ + "tenants", + "users", + "storage", + "folders", + "tags", + "alerts", + "runs", + "grids", + "artifacts", + "metrics", + "grid_metrics", + "events", +] + +UPLOAD_ORDER: list[str] = [action.object_type for action in UPLOAD_ACTION_ORDER] + + +class Sender: + @pydantic.validate_call + def __init__( + self, + cache_directory: pydantic.DirectoryPath | None = None, + max_workers: pydantic.PositiveInt = 5, + threading_threshold: pydantic.PositiveInt = 10, + throw_exceptions: bool = False, + retry_failed_uploads: bool = False, + ) -> None: + """Creates required local directories.""" + _local_config: SimvueConfiguration = SimvueConfiguration.fetch() + self._cache_directory = cache_directory or _local_config.offline.cache + self._cache_directory.joinpath("server_ids").mkdir(parents=True, exist_ok=True) + self._throw_exceptions = throw_exceptions + self._threading_threshold = threading_threshold + self._retry_failed_uploads = retry_failed_uploads + self._max_workers = max_workers + self._lock_path = self._cache_directory.joinpath("sender.lock") + self._thread_lock = threading.Lock() + self._id_mapping = { + file_path.name.split(".")[0]: file_path.read_text() + for file_path in self._cache_directory.glob("server_ids/*.txt") + } + + @property + def locked(self) -> bool: + """Check if dispatch locked by another sender.""" + if not self._lock_path: + raise RuntimeError("Expected lock file path, but none initialised.") + return self._lock_path.exists() and psutil.pid_exists( + int(self._lock_path.read_text()) + ) + + @property + def id_mapping(self) -> dict[str, str]: + """Get the ID mapping from offline to online ID.""" + return self._id_mapping + + def _lock(self) -> None: + """Lock to this sender.""" + if self.locked: + raise RuntimeError("A sender is already running for this cache!") + _ = self._lock_path.write_text(f"{psutil.Process().pid}") + + def _release(self) -> None: + """Release lock to this sender.""" + self._lock_path.unlink() + + @pydantic.validate_call + def upload(self, objects_to_upload: list[UploadItem] | None = None) -> None: + """Upload objects to server.""" + self._lock() + + for action in UPLOAD_ACTION_ORDER: + if objects_to_upload and action.object_type not in objects_to_upload: + continue + + logger.info("Uploading %s", action.object_type) + + _n_objects: int = action.count(self._cache_directory) + + action.upload( + cache_directory=self._cache_directory, + id_mapping=self._id_mapping, + thread_lock=self._thread_lock, + throw_exceptions=self._throw_exceptions, + retry_failed=self._retry_failed_uploads, + single_thread_limit=self._threading_threshold, + max_thread_workers=self._max_workers, + ) + self._release() diff --git a/simvue/utilities.py b/simvue/utilities.py index 7b267e57..e890698e 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -21,7 +21,6 @@ if typing.TYPE_CHECKING: from simvue.run import Run - from simvue.offline import Sender def find_first_instance_of_file( @@ -220,7 +219,7 @@ def parse_pydantic_error(error: pydantic.ValidationError) -> str: def skip_if_failed( - failure_attr: str | None, + failure_attr: str, ignore_exc_attr: str, on_failure_return: typing.Any | None = None, ) -> typing.Callable: @@ -249,7 +248,7 @@ def skip_if_failed( def decorator(class_func: typing.Callable) -> typing.Callable: @functools.wraps(class_func) - def wrapper(self: "Run | Sender", *args, **kwargs) -> typing.Any: + def wrapper(self: "Run", *args, **kwargs) -> typing.Any: if getattr(self, failure_attr, None) and getattr( self, ignore_exc_attr, None ): diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 2654ff0a..8ac28d95 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -24,9 +24,9 @@ from simvue.eco.api_client import CO2SignalData, CO2SignalResponse from simvue.exception import ObjectNotFoundError, SimvueRunError from simvue.eco.emissions_monitor import TIME_FORMAT, CO2Monitor +from simvue.sender import Sender import simvue.run as sv_run import simvue.client as sv_cl -import simvue.sender as sv_send import simvue.config.user as sv_cfg from simvue.api.objects import Run as RunObject @@ -116,7 +116,9 @@ def test_run_with_emissions_offline(speedy_heartbeat, mock_co2_signal, create_pl run_created.config(enable_emission_metrics=True) time.sleep(5) # Run should continue, but fail to log metrics until sender runs and creates file - id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], throw_exceptions=True) + _sender.upload() + id_mapping = _sender.id_mapping _run = RunObject(identifier=id_mapping[run_created.id]) _metric_names = [item[0] for item in _run.metrics] for _metric in ["emissions", "energy_consumed"]: @@ -126,7 +128,9 @@ def test_run_with_emissions_offline(speedy_heartbeat, mock_co2_signal, create_pl assert _delta_metric_name not in _metric_names # Sender should now have made a local file, and the run should be able to use it to create emissions metrics time.sleep(5) - id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], throw_exceptions=True) + _sender.upload() + id_mapping = _sender.id_mapping _run.refresh() _metric_names = [item[0] for item in _run.metrics] client = sv_cl.Client() @@ -318,7 +322,9 @@ def test_log_metrics_offline( run.log_metrics(METRICS) time.sleep(1) - id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], throw_exceptions=True) + _sender.upload() + id_mapping = _sender.id_mapping time.sleep(1) if metric_type == "tensor": @@ -441,9 +447,11 @@ def test_visibility_offline( retention_period=os.environ.get("SIMVUE_TESTING_RETENTION_PERIOD", "2 mins"), ) _id = run.id - _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], throw_exceptions=True) + _sender.upload() + id_mapping = _sender.id_mapping run.close() - _retrieved_run = RunObject(identifier=_id_mapping.get(_id)) + _retrieved_run = RunObject(identifier=id_mapping.get(_id)) if visibility == "tenant": assert _retrieved_run.visibility.tenant @@ -478,7 +486,8 @@ def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) - run, _ = create_plain_run_offline run_name = run.name run.log_event(EVENT_MSG) - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], throw_exceptions=True) + _sender.upload() client = sv_cl.Client() attempts: int = 0 @@ -488,7 +497,8 @@ def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) - not (event_data := client.get_events(client.get_run_id_from_name(run_name), count_limit=1)) ) and attempts < 5: time.sleep(1) - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload() attempts += 1 assert event_data[0].get("message", EVENT_MSG) @@ -496,8 +506,9 @@ def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) - @pytest.mark.run @pytest.mark.offline def test_offline_tags(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: - run, run_data = create_plain_run_offline - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _, run_data = create_plain_run_offline + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload() client = sv_cl.Client() tags = client.get_tags() @@ -557,7 +568,8 @@ def test_update_metadata_offline( # Try updating an already defined piece of metadata run.update_metadata({"a": 1}) - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload() client = sv_cl.Client() run_info = client.get_run(client.get_run_id_from_name(run_name)) @@ -945,7 +957,8 @@ def test_save_file_offline( "w", ) as out_f: out_f.write("updated file!") - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload() os.remove(out_name) client = sv_cl.Client() base_name = name or out_name.name @@ -1031,7 +1044,8 @@ def test_update_tags_offline( simvue_run.update_tags(["additional"]) - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload() client = sv_cl.Client() run_data = client.get_run(client.get_run_id_from_name(run_name)) @@ -1358,7 +1372,9 @@ def test_reconnect_functionality(mode, monkeypatch: pytest.MonkeyPatch) -> None: ) run_id = run.id if mode == "offline": - _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload() + _id_mapping = _sender.id_mapping run_id = _id_mapping.get(run_id) client = simvue.Client() @@ -1372,7 +1388,9 @@ def test_reconnect_functionality(mode, monkeypatch: pytest.MonkeyPatch) -> None: run.log_event("Testing!") if mode == "offline": - _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload() + _id_mapping = _sender.id_mapping _reconnected_run = client.get_run(run_id) assert dict(_reconnected_run.metrics)["test_metric"]["last"] == 1 diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py index 2e4d7722..764f7b54 100644 --- a/tests/unit/test_event_alert.py +++ b/tests/unit/test_event_alert.py @@ -5,7 +5,7 @@ import uuid from simvue.api.objects import Alert, EventsAlert -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -55,12 +55,13 @@ def test_event_alert_creation_offline(offline_cache_setup) -> None: assert _local_data.get("alert").get("pattern") == "completed" assert _local_data.get("name") == f"events_alert_{_uuid}" assert _local_data.get("notification") == "none" - - _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + + _sender = Sender(cache_directory=_alert._local_staging_file.parents[1], max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload(objects_to_upload=["alerts"]) time.sleep(1) # Get online ID and retrieve alert - _online_alert = Alert(_id_mapping.get(_alert.id)) + _online_alert = Alert(_sender.id_mapping.get(_alert.id)) assert _online_alert.source == "events" assert _online_alert.alert.frequency == 1 assert _online_alert.alert.pattern == "completed" @@ -106,11 +107,12 @@ def test_event_alert_modification_offline(offline_cache_setup) -> None: description=None ) _alert.commit() - _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(cache_directory=_alert._local_staging_file.parents[1], max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload(objects_to_upload=["alerts"]) time.sleep(1) # Get online ID and retrieve alert - _online_alert = Alert(_id_mapping.get(_alert.id)) + _online_alert = Alert(_sender.id_mapping.get(_alert.id)) assert _online_alert.source == "events" assert _online_alert.alert.frequency == 1 assert _online_alert.alert.pattern == "completed" @@ -118,6 +120,7 @@ def test_event_alert_modification_offline(offline_cache_setup) -> None: assert _online_alert.notification == "none" _new_alert = EventsAlert(_alert.id) + assert _new_alert._offline _new_alert.read_only(False) _new_alert.description = "updated!" _new_alert.commit() @@ -130,7 +133,9 @@ def test_event_alert_modification_offline(offline_cache_setup) -> None: _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(cache_directory=_alert._local_staging_file.parents[1], max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload(objects_to_upload=["alerts"]) + time.sleep(1) _online_alert.refresh() diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index 1839f067..f407e305 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -7,7 +7,7 @@ from simvue.api.objects import Events, Folder, Run from simvue.models import DATETIME_FORMAT -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -55,15 +55,16 @@ def test_events_creation_offline(offline_cache_setup) -> None: assert _local_data.get("run") == _run.id assert _local_data.get("events")[0].get("message") == "This is a test!" assert _local_data.get("events")[0].get("timestamp") == _timestamp - - _id_mapping = sender(_events._local_staging_file.parents[1], 1, 10, ["folders", "runs", "events"], throw_exceptions=True) + + _sender = Sender(cache_directory=_events._local_staging_file.parents[1], max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload(["folders", "runs", "events"]) time.sleep(1) # Get online version of events - _online_events = Events(_id_mapping.get(_events.id)) - _event_content = next(_online_events.get(run_id=_id_mapping.get(_run.id))) + _online_events = Events(_sender.id_mapping.get(_events.id)) + _event_content = next(_online_events.get(run_id=_sender.id_mapping.get(_run.id))) assert _event_content.message == "This is a test!" assert _event_content.timestamp == _timestamp _run.delete() - _folder.delete(recursive=True, delete_runs=True, runs_only=False) \ No newline at end of file + _folder.delete(recursive=True, delete_runs=True, runs_only=False) diff --git a/tests/unit/test_file_artifact.py b/tests/unit/test_file_artifact.py index ad736c3a..0c778d17 100644 --- a/tests/unit/test_file_artifact.py +++ b/tests/unit/test_file_artifact.py @@ -8,9 +8,8 @@ import json from simvue.api.objects import FileArtifact, Run, Artifact from simvue.api.objects.folder import Folder -from simvue.sender import sender -from simvue.client import Client -import logging +from simvue.sender import Sender + @pytest.mark.api @pytest.mark.online @@ -103,13 +102,14 @@ def test_file_artifact_creation_offline(offline_cache_setup, snapshot) -> None: # If snapshot, check artifact definition file and a copy of the actual file exist in staging area assert len(list(_artifact._local_staging_file.parent.iterdir())) == 2 if snapshot else 1 - _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) + _sender = Sender(cache_directory=pathlib.Path(offline_cache_setup.name), max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload() time.sleep(1) # Check file(s) deleted after upload assert len(list(_artifact._local_staging_file.parent.iterdir())) == 0 - _online_artifact = Artifact(_id_mapping[_artifact.id]) + _online_artifact = Artifact(_sender.id_mapping[_artifact.id]) assert _online_artifact.name == _artifact.name _content = b"".join(_online_artifact.download_content()).decode("UTF-8") assert _content == f"Hello World! {_uuid}" @@ -159,13 +159,15 @@ def test_file_artifact_creation_offline_updated(offline_cache_setup, caplog, sna if not snapshot: with pytest.raises(RuntimeError, match="The SHA256 you specified did not match the calculated checksum."): - _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) + _sender = Sender(cache_directory=pathlib.Path(offline_cache_setup.name), max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload() return else: - _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) + _sender = Sender(cache_directory=pathlib.Path(offline_cache_setup.name), max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload() time.sleep(1) - _online_artifact = Artifact(_id_mapping[_artifact.id]) + _online_artifact = Artifact(_sender.id_mapping[_artifact.id]) assert _online_artifact.name == _artifact.name _content = b"".join(_online_artifact.download_content()).decode("UTF-8") # Since it was snapshotted, should be the state of the file before it was changed diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index 1c5c9ce6..cbd74a34 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -4,7 +4,7 @@ import uuid from simvue.api.objects import FileStorage -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -37,13 +37,14 @@ def test_create_file_storage_offline(offline_cache_setup) -> None: assert _local_data.get("name") == _uuid assert _local_data.get("is_enabled") == False assert _local_data.get("is_default") == False - - _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"], throw_exceptions=True) + + _sender = Sender(cache_directory=_storage._local_staging_file.parents[1], max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload(["storage"]) time.sleep(1) - _online_storage = FileStorage(_id_mapping.get(_storage.id)) + _online_storage = FileStorage(_sender.id_mapping.get(_storage.id)) assert _online_storage.name == _uuid assert _online_storage.is_enabled == False assert _online_storage.is_default == False _online_storage.read_only(False) - _online_storage.delete() \ No newline at end of file + _online_storage.delete() diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 7004551f..8f0dd83a 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -1,14 +1,13 @@ -import typing import pytest import uuid import contextlib import json import time -import os from simvue.api.objects.folder import Folder -from simvue.sender import sender +from simvue.sender import Sender from simvue.client import Client + @pytest.mark.api @pytest.mark.online def test_folder_creation_online() -> None: @@ -42,14 +41,15 @@ def test_folder_creation_offline(offline_cache_setup) -> None: assert _folder._local_staging_file.name.split(".")[0] == _folder.id assert _local_data.get("path", None) == _path - sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"], throw_exceptions=True) + _sender = Sender(cache_directory=_folder._local_staging_file.parents[1], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload(["folders"]) + time.sleep(1) - client = Client() - _folder_new = client.get_folder(_path) + _folder_new = Folder(identifier=_sender.id_mapping[_folder.id]) assert _folder_new.path == _path - _folder_new.delete() + _folder_new.delete(recursive=True, delete_runs=True) assert not _folder._local_staging_file.exists() @@ -96,11 +96,11 @@ def test_folder_modification_offline(offline_cache_setup) -> None: _folder = Folder.new(path=_path, offline=True) _folder.commit() - sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"], throw_exceptions=True) + _sender = Sender(cache_directory=_folder._local_staging_file.parents[1], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload(["folders"]) time.sleep(1) - client = Client() - _folder_online = client.get_folder(_path) + _folder_online = Folder(identifier=_sender.id_mapping[_folder.id]) assert _folder_online.path == _path _folder_new = Folder(identifier=_folder.id) @@ -115,7 +115,8 @@ def test_folder_modification_offline(offline_cache_setup) -> None: assert _local_data.get("description", None) == _description assert _local_data.get("tags", None) == _tags - sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"], throw_exceptions=True) + _sender = Sender(cache_directory=_folder._local_staging_file.parents[1], max_workers=2, threading_threshold=10, throw_exceptions=True) + _sender.upload(["folders"]) time.sleep(1) _folder_online.refresh() diff --git a/tests/unit/test_grids.py b/tests/unit/test_grids.py index 91c5a05e..4e78d976 100644 --- a/tests/unit/test_grids.py +++ b/tests/unit/test_grids.py @@ -7,13 +7,9 @@ import contextlib import json import time -import os from simvue.api.objects import Grid, GridMetrics, Folder, Run -from simvue.models import GridMetricSet -from simvue.run import Run as sv_Run -from simvue.sender import sender -from simvue.client import Client +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -72,10 +68,11 @@ def test_grid_creation_offline() -> None: assert _local_data.get("runs", [None])[0] == [_run.id, "A"] npt.assert_array_equal(numpy.array(_local_data.get("grid")), _grid_def) - _id_mapping = sender(_grid._local_staging_file.parents[1], 1, 10, ["folders", "runs", "grids"], throw_exceptions=True) + _sender = Sender(cache_directory=_grid._local_staging_file.parents[1], max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload(["folders", "runs", "grids"]) time.sleep(1) # Get online version of grid - _online_grid = Grid(_id_mapping.get(_grid.id)) + _online_grid = Grid(_sender.id_mapping.get(_grid.id)) npt.assert_array_equal(numpy.array(_online_grid.grid), _grid_def) _grid.delete() with contextlib.suppress(RuntimeError): @@ -184,9 +181,10 @@ def test_grid_metrics_creation_offline() -> None: _metrics.commit() _run.status = "completed" _run.commit() - _id_mapping = sender(_grid._local_staging_file.parents[1], 1, 10, ["folders", "runs", "grids", "grid_metrics"], throw_exceptions=True) + _sender = Sender(cache_directory=_grid._local_staging_file.parents[1], max_workers=1, threading_threshold=10, throw_exceptions=True) + _sender.upload(["folders", "runs", "grids", "grid_metrics"]) time.sleep(1) # Online metrics - assert list(GridMetrics.get(runs=[_id_mapping[_run.id]], metrics=["A"], step=_step)) + assert list(GridMetrics.get(runs=[_sender.id_mapping[_run.id]], metrics=["A"], step=_step)) _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index 56347eff..a7efbaff 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -5,8 +5,7 @@ import uuid from simvue.api.objects import MetricsRangeAlert, Alert -from simvue.client import Client -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -62,12 +61,12 @@ def test_metric_range_alert_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"metrics_range_alert_{_uuid}" assert _local_data.get("notification") == "none" assert _local_data.get("alert").get("range_low") == 10 - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["alerts"]) time.sleep(1) # Get online ID and retrieve alert - _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_alert = Alert(_online_id) + _online_alert = Alert(_sender.id_mapping[_alert.id]) assert _online_alert.source == "metrics" assert _online_alert.alert.frequency == 1 @@ -124,12 +123,12 @@ def test_metric_range_alert_modification_offline(offline_cache_setup) -> None: offline=True ) _alert.commit() - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["alerts"]) time.sleep(1) # Get online ID and retrieve alert - _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_alert = Alert(_online_id) + _online_alert = Alert(_sender.id_mapping[_alert.id]) assert _online_alert.source == "metrics" assert _online_alert.alert.frequency == 1 @@ -149,7 +148,8 @@ def test_metric_range_alert_modification_offline(offline_cache_setup) -> None: _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["alerts"]) time.sleep(1) _online_alert.refresh() diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index 8f04c698..dfd1209e 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -5,7 +5,7 @@ import uuid from simvue.api.objects import MetricsThresholdAlert, Alert -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -61,12 +61,12 @@ def test_metric_threshold_alert_creation_offline(offline_cache_setup) -> None: assert _local_data.get("notification") == "none" assert _local_data.get("alert").get("threshold") == 10 - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["alerts"]) time.sleep(1) # Get online ID and retrieve alert - _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_alert = Alert(_online_id) + _online_alert = Alert(_sender.id_mapping[_alert.id]) assert _online_alert.source == "metrics" assert _online_alert.alert.frequency == 1 @@ -123,12 +123,12 @@ def test_metric_threshold_alert_modification_offline(offline_cache_setup) -> Non ) _alert.commit() - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["alerts"]) time.sleep(1) # Get online ID and retrieve alert - _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_alert = MetricsThresholdAlert(_online_id) + _online_alert = MetricsThresholdAlert(_sender.id_mapping[_alert.id]) assert _online_alert.source == "metrics" assert _online_alert.alert.frequency == 1 @@ -149,7 +149,7 @@ def test_metric_threshold_alert_modification_offline(offline_cache_setup) -> Non _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True).upload(["alerts"]) time.sleep(1) _online_alert.refresh() diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index fea65482..74a5c0aa 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -7,7 +7,7 @@ from simvue.api.objects import Metrics, Folder, Run from simvue.models import DATETIME_FORMAT -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -88,14 +88,15 @@ def test_metrics_creation_offline(offline_cache_setup) -> None: assert _local_data.get("metrics")[0].get("step") == _step assert _local_data.get("metrics")[0].get("time") == _time - _id_mapping = sender(_metrics._local_staging_file.parents[1], 1, 10, ["folders", "runs", "metrics"], throw_exceptions=True) + _sender = Sender(_metrics._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload( ["folders", "runs", "metrics"]) time.sleep(1) # Get online version of metrics - _online_metrics = Metrics(_id_mapping.get(_metrics.id)) - _data = next(_online_metrics.get(metrics=["x", "y", "aB0-_/.:=><+()"], runs=[_id_mapping.get(_run.id)], xaxis="step")) - assert sorted(_online_metrics.names(run_ids=[_id_mapping.get(_run.id)])) == sorted(_values.keys()) - assert _data.get(_id_mapping.get(_run.id)).get('y')[0].get('value') == 2.0 - assert _data.get(_id_mapping.get(_run.id)).get('y')[0].get('step') == 1 + _online_metrics = Metrics(_sender.id_mapping.get(_metrics.id)) + _data = next(_online_metrics.get(metrics=["x", "y", "aB0-_/.:=><+()"], runs=[_sender.id_mapping.get(_run.id)], xaxis="step")) + assert sorted(_online_metrics.names(run_ids=[_sender.id_mapping.get(_run.id)])) == sorted(_values.keys()) + assert _data.get(_sender.id_mapping.get(_run.id)).get('y')[0].get('value') == 2.0 + assert _data.get(_sender.id_mapping.get(_run.id)).get('y')[0].get('step') == 1 _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) diff --git a/tests/unit/test_object_artifact.py b/tests/unit/test_object_artifact.py index b45eef1a..1a60dda4 100644 --- a/tests/unit/test_object_artifact.py +++ b/tests/unit/test_object_artifact.py @@ -6,7 +6,7 @@ import json from simvue.api.objects import ObjectArtifact, Run, Artifact from simvue.api.objects.folder import Folder -from simvue.sender import sender +from simvue.sender import Sender from simvue.serialization import _deserialize_numpy_array @pytest.mark.api @@ -63,10 +63,11 @@ def test_object_artifact_creation_offline(offline_cache_setup) -> None: assert _local_data.get("mime_type") == "application/vnd.simvue.numpy.v1" assert _local_data.get("runs") == {_run.id: "input"} - _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) + _sender = Sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) + _sender.upload() time.sleep(1) - _online_artifact = Artifact(_id_mapping.get(_artifact.id)) + _online_artifact = Artifact(_sender.id_mapping.get(_artifact.id)) assert _online_artifact.name == f"test_object_artifact_offline_{_uuid}" assert _online_artifact.mime_type == "application/vnd.simvue.numpy.v1" diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 395b658a..45dc71be 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -5,7 +5,7 @@ import datetime import uuid from simvue.api.objects.run import RunBatchArgs -from simvue.sender import sender +from simvue.sender import Sender from simvue.api.objects import Run, Folder from simvue.client import Client @@ -41,18 +41,17 @@ def test_run_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"simvue_offline_run_{_uuid}" assert _local_data.get("folder") == _folder_name - sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) + _sender = Sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) + _sender.upload() time.sleep(1) # Get online ID and retrieve run - _online_id = _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_run = Run(_online_id) + _online_run = Run(_sender.id_mapping[_run.id]) assert _online_run.name == _run_name assert _online_run.folder == _folder_name _run.delete() - _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").unlink() client = Client() client.delete_folder(_folder_name, recursive=True, remove_runs=True) @@ -119,12 +118,12 @@ def test_run_modification_offline(offline_cache_setup) -> None: assert _new_run.description == "Simvue test run" assert _new_run.name == "simvue_test_run" - sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) + _sender = Sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) + _sender.upload() time.sleep(1) # Get online ID and retrieve run - _online_id = _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_run = Run(_online_id) + _online_run = Run(_sender.id_mapping[_run.id]) assert _online_run.ttl == 120 assert _online_run.description == "Simvue test run" From 8ef470f35e556b9a9125c91916515ccec8f71f46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 22 Oct 2025 23:40:04 +0100 Subject: [PATCH 03/16] =?UTF-8?q?=F0=9F=9A=A7=20Added=20Simvue=20run=20to?= =?UTF-8?q?=20sender=20for=20monitoring=20uploads?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/sender/__init__.py | 2 ++ simvue/sender/actions.py | 17 ++++++++-- simvue/sender/base.py | 30 ++++++++++++++++++ tests/functional/test_run_execute_process.py | 5 +-- tests/unit/test_run.py | 11 ++++--- tests/unit/test_s3_storage.py | 7 +++-- tests/unit/test_sender.py | 33 +++++++++++--------- tests/unit/test_tag.py | 15 +++++---- tests/unit/test_tenant.py | 7 +++-- tests/unit/test_user.py | 7 +++-- tests/unit/test_user_alert.py | 28 ++++++++++------- 11 files changed, 111 insertions(+), 51 deletions(-) diff --git a/simvue/sender/__init__.py b/simvue/sender/__init__.py index 74340ecf..eb89cd52 100644 --- a/simvue/sender/__init__.py +++ b/simvue/sender/__init__.py @@ -1,3 +1,5 @@ +"""Simvue sender for sending locally cached data to the server.""" + from .base import Sender, UPLOAD_ORDER, UploadItem __all__ = ["Sender", "UPLOAD_ORDER", "UploadItem"] diff --git a/simvue/sender/actions.py b/simvue/sender/actions.py index 2c8fc009..9ef17c80 100644 --- a/simvue/sender/actions.py +++ b/simvue/sender/actions.py @@ -1,3 +1,5 @@ +"""Upload actions for cached files.""" + import abc from collections.abc import Generator from concurrent.futures import ThreadPoolExecutor @@ -39,6 +41,7 @@ from simvue.models import ObjectID from simvue.config.user import SimvueConfiguration from simvue.eco import CO2Monitor +from simvue.run import Run as SimvueRun try: from typing import override @@ -115,10 +118,12 @@ def _single_item_upload( id_mapping: dict[str, str], cache_directory: pathlib.Path, thread_lock: threading.Lock, + simvue_monitor_run: SimvueRun, *, throw_exceptions: bool = False, retry_failed: bool = False, ) -> None: + simvue_monitor_run.log_event(f"Uploading {cls.object_type} '{identifier}'") _json_file = cache_directory.joinpath(f"{cls.object_type}/{identifier}.json") with _json_file.open() as in_f: @@ -151,9 +156,14 @@ def _single_item_upload( except Exception as err: if throw_exceptions: raise err - cls.logger.exception( - "Failed to upload %s '%s'", cls.object_type, identifier + _exception_msg: str = ( + f"Error while committing {cls.object_type} '{identifier}': {err}" + ) + simvue_monitor_run.log_event(_exception_msg) + simvue_monitor_run.log_alert( + name="sender_object_upload_failure", state="critical" ) + cls.logger.error(_exception_msg) cls.log_upload_failed(cache_directory, identifier, _data) return @@ -202,6 +212,7 @@ def upload( thread_lock: threading.Lock, single_thread_limit: int, max_thread_workers: int, + simvue_monitor_run: SimvueRun, *, throw_exceptions: bool = False, retry_failed: bool = False, @@ -217,6 +228,7 @@ def upload( throw_exceptions=throw_exceptions, retry_failed=retry_failed, id_mapping=id_mapping, + simvue_monitor_run=simvue_monitor_run, ) else: with ThreadPoolExecutor( @@ -231,6 +243,7 @@ def upload( throw_exceptions=throw_exceptions, retry_failed=retry_failed, id_mapping=id_mapping, + simvue_monitor_run=simvue_monitor_run, ), _iterable, ) diff --git a/simvue/sender/base.py b/simvue/sender/base.py index 876d71a1..0c99f25f 100644 --- a/simvue/sender/base.py +++ b/simvue/sender/base.py @@ -4,6 +4,7 @@ is either not possible on the simulation machine, or connection is limited. """ +import datetime import logging import threading import typing @@ -12,6 +13,7 @@ from simvue.sender.actions import UPLOAD_ACTION_ORDER from simvue.config.user import SimvueConfiguration +from simvue.run import Run logger = logging.getLogger(__name__) @@ -42,6 +44,7 @@ def __init__( threading_threshold: pydantic.PositiveInt = 10, throw_exceptions: bool = False, retry_failed_uploads: bool = False, + run_notification: typing.Literal["none", "all", "email"] = "none", ) -> None: """Creates required local directories.""" _local_config: SimvueConfiguration = SimvueConfiguration.fetch() @@ -53,6 +56,7 @@ def __init__( self._max_workers = max_workers self._lock_path = self._cache_directory.joinpath("sender.lock") self._thread_lock = threading.Lock() + self._run_notification: typing.Literal["none", "email"] = run_notification self._id_mapping = { file_path.name.split(".")[0]: file_path.read_text() for file_path in self._cache_directory.glob("server_ids/*.txt") @@ -82,11 +86,36 @@ def _release(self) -> None: """Release lock to this sender.""" self._lock_path.unlink() + def _initialise_monitor_run(self) -> Run: + """Create a Simvue run for monitoring upload.""" + _time_stamp: str = datetime.datetime.now(tz=datetime.UTC).strftime( + "%Y_%m_%d_%H_%M_%S" + ) + _run = Run(mode="online") + _ = _run.init( + name=f"sender_upload_{_time_stamp}", + folder="/sender", + notification=self._run_notification, + timeout=None, + no_color=True, + ) + _ = _run.config(suppress_errors=True, enable_emission_metrics=False) + _run.create_user_alert( + name="sender_object_upload_failure", + description="Triggers when an object fails to send to the server.", + notification=self._run_notification, + trigger_abort=False, + ) + + return _run + @pydantic.validate_call def upload(self, objects_to_upload: list[UploadItem] | None = None) -> None: """Upload objects to server.""" self._lock() + _monitor_run = self._initialise_monitor_run() + for action in UPLOAD_ACTION_ORDER: if objects_to_upload and action.object_type not in objects_to_upload: continue @@ -103,5 +132,6 @@ def upload(self, objects_to_upload: list[UploadItem] | None = None) -> None: retry_failed=self._retry_failed_uploads, single_thread_limit=self._threading_threshold, max_thread_workers=self._max_workers, + simvue_monitor_run=_monitor_run, ) self._release() diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index 1337e301..66b8a4c9 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -9,7 +9,7 @@ from simvue import Run, Client from simvue.executor import get_current_shell -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.executor def test_monitor_processes(create_plain_run_offline: tuple[Run, dict]): @@ -24,7 +24,8 @@ def test_monitor_processes(create_plain_run_offline: tuple[Run, dict]): _run.add_process(f"process_1_{os.environ.get('PYTEST_XDIST_WORKER', 0)}", Command="Write-Output 'Hello World!'", executable="powershell") _run.add_process(f"process_2_{os.environ.get('PYTEST_XDIST_WORKER', 0)}", Command="Get-ChildItem", executable="powershell") _run.add_process(f"process_3_{os.environ.get('PYTEST_XDIST_WORKER', 0)}", Command="exit 0", executable="powershell") - sender(_run._sv_obj._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"], throw_exceptions=True) + _sender = Sender(_run._sv_obj._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["folders", "runs", "alerts"], ) @pytest.mark.executor diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 45dc71be..1ee2e6f9 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -41,8 +41,8 @@ def test_run_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"simvue_offline_run_{_uuid}" assert _local_data.get("folder") == _folder_name - _sender = Sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) - _sender.upload() + _sender = Sender(_run._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["folders", "runs"]) time.sleep(1) # Get online ID and retrieve run @@ -118,8 +118,8 @@ def test_run_modification_offline(offline_cache_setup) -> None: assert _new_run.description == "Simvue test run" assert _new_run.name == "simvue_test_run" - _sender = Sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) - _sender.upload() + _sender = Sender(_run._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["folders", "runs"]) time.sleep(1) # Get online ID and retrieve run @@ -138,7 +138,8 @@ def test_run_modification_offline(offline_cache_setup) -> None: _online_run.refresh() assert _online_run.tags == [] - sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) + _sender = Sender(_run._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["folders", "runs"]) time.sleep(1) _online_run.refresh() diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 2246e98a..e251a034 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -5,7 +5,7 @@ from simvue.api.objects import S3Storage from simvue.api.objects.storage.fetch import Storage -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -71,8 +71,9 @@ def test_create_s3_offline(offline_cache_setup) -> None: assert not _local_data.get("user", None) assert not _local_data.get("usage", None) - _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"], throw_exceptions=True) - _online_id = _id_mapping[_storage.id] + _sender = Sender(_storage._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["storage"]) + _online_id = _sender.id_mapping[_storage.id] time.sleep(1) _online_storage = S3Storage(_online_id) diff --git a/tests/unit/test_sender.py b/tests/unit/test_sender.py index 5d0933e6..9177903d 100644 --- a/tests/unit/test_sender.py +++ b/tests/unit/test_sender.py @@ -1,13 +1,10 @@ -import contextlib import json import pytest import time import datetime import uuid -from simvue.api.objects.run import RunBatchArgs -from simvue.sender import sender +from simvue.sender import Sender from simvue.api.objects import Run, Metrics, Folder -from simvue.client import Client from simvue.models import DATETIME_FORMAT import logging import pathlib @@ -37,23 +34,26 @@ def test_sender_exception_handling(offline_cache_setup, caplog, throw_exceptions if throw_exceptions: with pytest.raises(ValueError): - sender(throw_exceptions=True, threading_threshold=1 if parallel else 10) + _sender = Sender(throw_exceptions=True, threading_threshold=1 if parallel else 10) + _sender.upload() return with caplog.at_level(logging.ERROR): - sender(threading_threshold=1 if parallel else 10) + _sender = Sender(threading_threshold=1 if parallel else 10) + _sender.upload() - assert "Error while committing 'Metrics'" in caplog.text + assert "Error while committing metrics" in caplog.text # Wait, then try sending again time.sleep(1) caplog.clear() with caplog.at_level(logging.ERROR): - sender(retry_failed_uploads=retry_failed_uploads, threading_threshold=1 if parallel else 10) + _sender = Sender(retry_failed_uploads=retry_failed_uploads, threading_threshold=1 if parallel else 10) + _sender.upload() if retry_failed_uploads: - assert "Error while committing 'Metrics'" in caplog.text + assert "Error while committing metrics" in caplog.text else: assert not caplog.text @@ -100,7 +100,8 @@ def test_sender_server_ids(offline_cache_setup, caplog, parallel): # Send both items with caplog.at_level(logging.ERROR): - sender(threading_threshold=1 if parallel else 10) + _sender = Sender(threading_threshold=1 if parallel else 10) + _sender.upload() assert not caplog.text @@ -139,7 +140,8 @@ def test_sender_server_ids(offline_cache_setup, caplog, parallel): # Run sender again, check online ID is correctly loaded from file and substituted for offline ID with caplog.at_level(logging.ERROR): - sender(threading_threshold=1 if parallel else 10) + _sender = Sender(threading_threshold=1 if parallel else 10) + _sender.upload() assert not caplog.text @@ -171,8 +173,9 @@ def test_send_heartbeat(offline_cache_setup, parallel, mocker): _offline_runs.append(_run) - _id_mapping = sender(threading_threshold=1 if parallel else 10) - _online_runs = [Run(identifier=_id_mapping.get(_offline_run.id)) for _offline_run in _offline_runs] + _sender = Sender(threading_threshold=1 if parallel else 10) + _sender.upload() + _online_runs = [Run(identifier=_sender.id_mapping.get(_offline_run.id)) for _offline_run in _offline_runs] assert all([_online_run.status == "running" for _online_run in _online_runs]) spy_put = mocker.spy(requests, "put") @@ -181,11 +184,11 @@ def test_send_heartbeat(offline_cache_setup, parallel, mocker): for i in range(10): time.sleep(0.5) [_offline_run.send_heartbeat() for _offline_run in _offline_runs] - sender(threading_threshold=1 if parallel else 10) + Sender(threading_threshold=1 if parallel else 10).upload() # Check requests.put() endpoint called 50 times - once for each of the 5 runs, on all 10 iterations assert spy_put.call_count == 50 # Get online runs and check all running [_online_run.refresh() for _online_run in _online_runs] - assert all([_online_run.status == "running" for _online_run in _online_runs]) \ No newline at end of file + assert all([_online_run.status == "running" for _online_run in _online_runs]) diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py index 381234d2..1fa2f4de 100644 --- a/tests/unit/test_tag.py +++ b/tests/unit/test_tag.py @@ -5,7 +5,7 @@ import json import pydantic.color from simvue.api.objects.tag import Tag -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -35,10 +35,11 @@ def test_tag_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"test_tag_{_uuid}" - _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"], throw_exceptions=True) + _sender = Sender(_tag._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["tags"]) time.sleep(1) - _online_id = _id_mapping.get(_tag.id) + _online_id = _sender.id_mapping.get(_tag.id) _online_tag = Tag(_online_id) assert _online_tag.name == f"test_tag_{_uuid}" @@ -78,8 +79,9 @@ def test_tag_modification_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"test_tag_{_uuid}" - _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"], throw_exceptions=True) - _online_id = _id_mapping.get(_tag.id) + _sender = sender(_tag._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["tags"]) + _online_id = _sender.id_mapping.get(_tag.id) _online_tag = Tag(_online_id) assert _online_tag.name == f"test_tag_{_uuid}" @@ -101,7 +103,8 @@ def test_tag_modification_offline(offline_cache_setup) -> None: assert pydantic.color.parse_str(_local_data.get("colour")).r == 250 / 255 assert _local_data.get("description") == "modified test tag" - sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"], throw_exceptions=True) + _sender = Sender(_tag._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["tags"]) time.sleep(1) # Check online version is updated diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py index 04684467..5ff854ae 100644 --- a/tests/unit/test_tenant.py +++ b/tests/unit/test_tenant.py @@ -5,7 +5,7 @@ import uuid from simvue.api.objects.administrator import Tenant -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -40,9 +40,10 @@ def test_create_tenant_offline(offline_cache_setup) -> None: assert _local_data.get("name") == _uuid assert _local_data.get("is_enabled") == True - _id_mapping = sender(_new_tenant._local_staging_file.parents[1], 1, 10, ["tenants"], throw_exceptions=True) + _sender = Sender(_new_tenant._local_staging_file.parents[1], 1, 10, ["tenants"], throw_exceptions=True) + _sender.upload() time.sleep(1) - _online_user = Tenant(_id_mapping.get(_new_tenant.id)) + _online_user = Tenant(_sender.id_mapping.get(_new_tenant.id)) assert _online_user.name == _uuid assert _online_user.is_enabled == True diff --git a/tests/unit/test_user.py b/tests/unit/test_user.py index a53f3cfd..5aac3c11 100644 --- a/tests/unit/test_user.py +++ b/tests/unit/test_user.py @@ -5,7 +5,7 @@ import uuid from simvue.api.objects.administrator import User, Tenant -from simvue.sender import sender +from simvue.sender import Sender @pytest.mark.api @pytest.mark.online @@ -62,9 +62,10 @@ def test_create_user_offline(offline_cache_setup) -> None: assert _local_data.get("fullname") == "Joe Bloggs" assert _local_data.get("email") == "jbloggs@simvue.io" - _id_mapping = sender(_user._local_staging_file.parents[1], 1, 10, ["users"], throw_exceptions=True) + _sender = Sender(_user._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["users"]) time.sleep(1) - _online_user = User(_id_mapping.get(_user.id)) + _online_user = User(_sender.id_mapping.get(_user.id)) assert _online_user.username == "jbloggs" assert _online_user.fullname == "Joe Bloggs" assert _online_user.email == "jbloggs@simvue.io" diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index f1f1acea..77141312 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -3,7 +3,7 @@ import contextlib import pytest import uuid -from simvue.sender import sender +from simvue.sender import Sender from simvue.api.objects import Alert, UserAlert, Run from simvue.api.objects.folder import Folder @@ -46,11 +46,11 @@ def test_user_alert_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"users_alert_{_uuid}" assert _local_data.get("notification") == "none" - _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["alerts"]) time.sleep(1) - _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_alert = Alert(_online_id) + _online_alert = Alert(_sender.id_mapping[_alert.id]) assert _online_alert.source == "user" assert _online_alert.name == f"users_alert_{_uuid}" @@ -94,12 +94,13 @@ def test_user_alert_modification_offline(offline_cache_setup) -> None: ) _alert.commit() - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["alerts"]) + time.sleep(1) # Get online ID and retrieve alert - _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_alert = UserAlert(_online_id) + _online_alert = UserAlert(_sender.id_mapping[_alert.id]) assert _online_alert.source == "user" assert _online_alert.name == f"users_alert_{_uuid}" @@ -117,7 +118,8 @@ def test_user_alert_modification_offline(offline_cache_setup) -> None: with _alert._local_staging_file.open() as in_f: _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["alerts"]) time.sleep(1) _online_alert.refresh() @@ -191,11 +193,12 @@ def test_user_alert_status_offline(offline_cache_setup) -> None: _run.alerts = [_alert.id] _run.commit() - _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["folders", "runs", "alerts"]) time.sleep(1) # Get online aler, check status is not set - _online_alert = UserAlert(_id_mapping.get(_alert.id)) + _online_alert = UserAlert(_sender.id_mapping.get(_alert.id)) assert not _online_alert.get_status(run_id=_id_mapping.get(_run.id)) _alert.set_status(_run.id, "critical") @@ -206,12 +209,13 @@ def test_user_alert_status_offline(offline_cache_setup) -> None: _online_alert.refresh() assert not _online_alert.get_status(run_id=_id_mapping.get(_run.id)) - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) + _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["alerts"]) time.sleep(1) # Check online status has been updated _online_alert.refresh() - assert _online_alert.get_status(run_id=_id_mapping.get(_run.id)) == "critical" + assert _online_alert.get_status(run_id=_sender.id_mapping.get(_run.id)) == "critical" _run.delete() _folder.delete(recursive=True, runs_only=False, delete_runs=True) From 99bfa4288ec75112f91a727e41a379cd26b38dd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 23 Oct 2025 08:50:00 +0100 Subject: [PATCH 04/16] =?UTF-8?q?=F0=9F=90=9B=20Fixed=20some=20bugs=20with?= =?UTF-8?q?=20new=20sender=20and=20finalised=20docstrings?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 4 + pyproject.toml | 2 +- simvue/config/parameters.py | 3 + simvue/sender/actions.py | 434 +++++++++++++++++++++++++++++++++--- simvue/sender/base.py | 42 +++- 5 files changed, 455 insertions(+), 30 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c3fb2c6e..3f0ba6ac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Change Log +## Unreleased + +- Refactored sender functionality introducing new `Sender` class. + ## [v2.2.2](https://github.com/simvue-io/client/releases/tag/v2.2.2) - 2025-10-14 - Enforced use of UTC for all datetime recording. diff --git a/pyproject.toml b/pyproject.toml index bb76d034..92a2df07 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "simvue" -version = "2.2.2" +version = "2.3.0" description = "Simulation tracking and monitoring" authors = [ {name = "Simvue Development Team", email = "info@simvue.io"} diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index be54de9e..9647ce93 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -22,6 +22,9 @@ class ServerSpecifications(pydantic.BaseModel): + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + extra="forbid", strict=True + ) url: pydantic.AnyHttpUrl | None token: pydantic.SecretStr | None diff --git a/simvue/sender/actions.py b/simvue/sender/actions.py index 9ef17c80..63471dd4 100644 --- a/simvue/sender/actions.py +++ b/simvue/sender/actions.py @@ -58,26 +58,66 @@ class UploadAction: @classmethod def json_file(cls, cache_directory: pathlib.Path, offline_id: str) -> pathlib.Path: + """Returns the local cache JSON file for an upload. + + Parameters + ---------- + cache_directory : pathlib.Path + the cache directory to search + offline_id : str + the offline identifier for the upload + + Returns + ------- + pathlib.Path + path of local JSON file + """ return cache_directory.joinpath(f"{cls.object_type}", f"{offline_id}.json") @classmethod - def log_upload_failed( + def _log_upload_failed( cls, cache_directory: pathlib.Path, offline_id: str, data: dict[str, typing.Any] ) -> None: + """Log a failing upload to the local cache.""" data["upload_failed"] = True with cls.json_file(cache_directory, offline_id).open("w") as out_f: json.dump(data, out_f, indent=2) @classmethod def count(cls, cache_directory: pathlib.Path) -> int: - """Return number of objects to upload of this type.""" + """Return number of objects to upload of this type. + + Parameters + ---------- + cache_directory : pathlib.Path + the local cache directory to read from. + + Returns + ------- + int + the number of objects of this type pending upload. + """ return len(list(cls.uploadable_objects(cache_directory))) @classmethod def pre_tasks( cls, offline_id: str, data: dict[str, typing.Any], cache_directory: pathlib.Path ) -> None: - """Pre-upload actions.""" + """Pre-upload actions. + + For this object type no pre-actions are performed. + + Parameters + ----------- + offline_id : str + the offline identifier for the upload. + online_id : str + the recorded online identifier after upload. + data : dict[str, Any] + the data sent during upload. + cache_directory : pathlib.Path + the local cache directory to read from. + """ _ = offline_id _ = data _ = cache_directory @@ -91,7 +131,21 @@ def post_tasks( data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: - """Post-upload actions.""" + """Post-upload actions. + + Removes local JSON data on successful upload. + + Parameters + ----------- + offline_id : str + the offline identifier for the upload. + online_id : str + the recorded online identifier after upload. + data : dict[str, Any] + the data sent during upload. + cache_directory : pathlib.Path + the local cache directory to read from. + """ _ = data _ = online_id cls.json_file(cache_directory, offline_id).unlink(missing_ok=True) @@ -101,13 +155,27 @@ def post_tasks( def initialise_object( cls, online_id: ObjectID | None, **data ) -> SimvueObject | None: - """Initialise an instance.""" + """Initialise an instance of an object.""" _ = online_id _ = data @classmethod def uploadable_objects(cls, cache_directory: pathlib.Path) -> Generator[str]: - """Iterate through uploadables.""" + """Iterate through uploadables. + + Returns the offline identifiers.f objects awaiting upload for this type. + + + Parameters + ---------- + cache_directory : pathlib.Path + the local cache directory to read from. + + Yields + ------ + str + offline identifier + """ for file in cache_directory.glob(f"{cls.object_type}/*.json"): yield file.stem @@ -122,8 +190,11 @@ def _single_item_upload( *, throw_exceptions: bool = False, retry_failed: bool = False, + upload_status: dict[str, str | float] | None, ) -> None: - simvue_monitor_run.log_event(f"Uploading {cls.object_type} '{identifier}'") + """Upload a single item of this object type.""" + _label: str = cls.object_type[:-1] if cls.singular_object else cls.object_type + simvue_monitor_run.log_event(f"Uploading {_label} '{identifier}'") _json_file = cache_directory.joinpath(f"{cls.object_type}/{identifier}.json") with _json_file.open() as in_f: @@ -157,32 +228,30 @@ def _single_item_upload( if throw_exceptions: raise err _exception_msg: str = ( - f"Error while committing {cls.object_type} '{identifier}': {err}" + f"Error while committing {_label} '{identifier}': {err}" ) simvue_monitor_run.log_event(_exception_msg) simvue_monitor_run.log_alert( name="sender_object_upload_failure", state="critical" ) cls.logger.error(_exception_msg) - cls.log_upload_failed(cache_directory, identifier, _data) + cls._log_upload_failed(cache_directory, identifier, _data) return if cls.singular_object: if not _object.id: cls.logger.error( "No identifier retrieved for %s '%s'", - cls.object_type, + _label, identifier, ) - cls.log_upload_failed(cache_directory, identifier, _data) + cls._log_upload_failed(cache_directory, identifier, _data) return cls.logger.info( "%s %s '%s'", "Updated" if id_mapping.get(identifier) else "Created", - cls.object_type[:-1] - if cls.object_type.endswith("s") - else cls.object_type, + _label, _object.id, ) @@ -190,13 +259,17 @@ def _single_item_upload( id_mapping[identifier] = _object.id else: cls.logger.info( - "%s %s", - "Updated" if id_mapping.get(identifier) else "Created", - cls.object_type[:-1] - if cls.object_type.endswith("s") - else cls.object_type, + "%s %s", "Updated" if id_mapping.get(identifier) else "Created", _label ) + if upload_status is not None: + with thread_lock: + upload_status.setdefault(cls.object_type, 0) + upload_status[cls.object_type] += 1 + simvue_monitor_run.log_metrics( + {f"uploads.{cls.object_type}": upload_status[cls.object_type]} + ) + cls.post_tasks( offline_id=identifier, online_id=_object.id if cls.singular_object else None, @@ -210,16 +283,38 @@ def upload( id_mapping: dict[str, str], cache_directory: pathlib.Path, thread_lock: threading.Lock, - single_thread_limit: int, + threading_threshold: int, max_thread_workers: int, simvue_monitor_run: SimvueRun, *, throw_exceptions: bool = False, retry_failed: bool = False, + upload_status: dict[str, int | float] | None = None, ) -> None: - """Run upload of file category.""" + """Run upload of all objects of this type. + + Parameters + ---------- + id_mapping : dict[str, str] + the offline-online mapping to update after upload. + cache_directory : pathlib.Path + the local cache directory to read from. + thread_lock : threading.Lock + the thread lock to use when uploading via multithreading + to ensure mappings are modified correctly. + threading_threshold: int + the number of cached files above which threading will be used. + max_thread_workers : int + the maximum number of threads to use. + throw_exceptions : bool, optional + whether to throw exceptions and terminate, default False. + retry_failed : bool, optional + whether to retry failed uploads, default True. + upload_status : dict[str, int | float] | None, optional + a mapping which will be updated with upload status, default None. + """ _iterable = cls.uploadable_objects(cache_directory) - if cls.count(cache_directory) < single_thread_limit: + if cls.count(cache_directory) < threading_threshold: for identifier in _iterable: cls._single_item_upload( identifier=identifier, @@ -229,6 +324,7 @@ def upload( retry_failed=retry_failed, id_mapping=id_mapping, simvue_monitor_run=simvue_monitor_run, + upload_status=upload_status, ) else: with ThreadPoolExecutor( @@ -244,11 +340,12 @@ def upload( retry_failed=retry_failed, id_mapping=id_mapping, simvue_monitor_run=simvue_monitor_run, + upload_status=upload_status, ), _iterable, ) # This will raise any exceptions encountered during sending - for result in _results: + for _ in _results: pass @@ -263,6 +360,22 @@ def pre_tasks( data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: + """Pre-upload actions. + + For object-based artifacts the local data is serialized in + preparation for the upload. + + Parameters + ----------- + offline_id : str + the offline identifier for the upload. + online_id : str + the recorded online identifier after upload. + data : dict[str, Any] + the data sent during upload. + cache_directory : pathlib.Path + the local cache directory to read from. + """ if data["obj_type"] != "ObjectArtifact": return with cache_directory.joinpath(cls.object_type, f"{offline_id}.object").open( @@ -279,6 +392,22 @@ def post_tasks( data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: + """Post-upload actions. + + Removes local JSON data on successful upload, if the artifact + is object-based the locally serialized data is removed. + + Parameters + ----------- + offline_id : str + the offline identifier for the upload. + online_id : str + the recorded online identifier after upload. + data : dict[str, Any] + the data sent during upload. + cache_directory : pathlib.Path + the local cache directory to read from. + """ _ = online_id super().post_tasks( offline_id=offline_id, @@ -295,6 +424,20 @@ def post_tasks( def initialise_object( cls, online_id: ObjectID | None, **data ) -> FileArtifact | ObjectArtifact: + """Initialise/update an Artifact object. + + Parameters + ---------- + online_id : str | None, optional + the online identifier for an object to update, default None. + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.FileArtifact | simvue.api.objects.ObjectArtifact + a local representation of the server object. + """ if not online_id: if data.get("file_path"): return FileArtifact.new(**data) @@ -311,6 +454,20 @@ class RunUploadAction(UploadAction): @override @classmethod def initialise_object(cls, online_id: ObjectID | None, **data) -> Run: + """Initialise/update a Run object. + + Parameters + ---------- + online_id : str | None, optional + the online identifier for an object to update, default None. + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.Run + a local representation of the server object. + """ if not online_id: return Run.new(**data) @@ -325,6 +482,22 @@ def post_tasks( data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: + """Post-upload actions. + + Removes local JSON data on successful upload, also handles removal + of additional files defining related identifiers. + + Parameters + ----------- + offline_id : str + the offline identifier for the upload. + online_id : str + the recorded online identifier after upload. + data : dict[str, Any] + the data sent during upload. + cache_directory : pathlib.Path + the local cache directory to read from. + """ super().post_tasks( offline_id=offline_id, online_id=online_id, @@ -360,6 +533,20 @@ class FolderUploadAction(UploadAction): @classmethod @override def initialise_object(cls, online_id: ObjectID | None, **data) -> Folder: + """Initialise/update a Folder object. + + Parameters + ---------- + online_id : str | None, optional + the online identifier for an object to update, default None. + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.Folder + a local representation of the server object. + """ if not online_id: return Folder.new(**data) @@ -374,6 +561,22 @@ def post_tasks( data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: + """Post-upload actions. + + Removes local JSON data on successful upload, also handles removal + of additional files defining related identifiers. + + Parameters + ----------- + offline_id : str + the offline identifier for the upload. + online_id : str + the recorded online identifier after upload. + data : dict[str, Any] + the data sent during upload. + cache_directory : pathlib.Path + the local cache directory to read from. + """ super().post_tasks( offline_id=offline_id, online_id=online_id, @@ -392,6 +595,20 @@ class TenantUploadAction(UploadAction): @classmethod @override def initialise_object(cls, online_id: ObjectID | None, **data) -> Tenant: + """Initialise/update a Tenant object. + + Parameters + ---------- + online_id : str | None, optional + the online identifier for an object to update, default None. + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.administrator.Tenant + a local representation of the server object. + """ if not online_id: return Tenant.new(**data) @@ -404,6 +621,20 @@ class UserUploadAction(UploadAction): @classmethod @override def initialise_object(cls, online_id: ObjectID | None, **data) -> User: + """Initialise/update a User object. + + Parameters + ---------- + online_id : str | None, optional + the online identifier for an object to update, default None. + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.administrator.User + a local representation of the server object. + """ if not online_id: return User.new(**data) @@ -416,6 +647,20 @@ class TagUploadAction(UploadAction): @classmethod @override def initialise_object(cls, online_id: ObjectID | None, **data) -> Tag: + """Initialise/update a Tag object. + + Parameters + ---------- + online_id : str | None, optional + the online identifier for an object to update, default None. + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.Tag + a local representation of the server object. + """ if not online_id: return Tag.new(**data) @@ -430,6 +675,22 @@ def post_tasks( data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: + """Post-upload actions. + + Removes local JSON data on successful upload, also handles removal + of additional files defining related identifiers. + + Parameters + ----------- + offline_id : str + the offline identifier for the upload. + online_id : str + the recorded online identifier after upload. + data : dict[str, Any] + the data sent during upload. + cache_directory : pathlib.Path + the local cache directory to read from. + """ super().post_tasks(offline_id, online_id, data, cache_directory) _ = cache_directory.joinpath("server_ids", f"{offline_id}.txt").write_text( online_id @@ -442,6 +703,20 @@ class AlertUploadAction(UploadAction): @classmethod @override def initialise_object(cls, online_id: ObjectID | None, **data) -> AlertType: + """Initialise/update an Alert object. + + Parameters + ---------- + online_id : str | None, optional + the online identifier for an object to update, default None. + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.AlertType + a local representation of the server object. + """ if not online_id: _source: str = data["source"] @@ -465,6 +740,22 @@ def post_tasks( data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: + """Post-upload actions. + + Removes local JSON data on successful upload, also handles removal + of additional files defining related identifiers. + + Parameters + ----------- + offline_id : str + the offline identifier for the upload. + online_id : str + the recorded online identifier after upload. + data : dict[str, Any] + the data sent during upload. + cache_directory : pathlib.Path + the local cache directory to read from. + """ super().post_tasks(offline_id, online_id, data, cache_directory) _ = cache_directory.joinpath("server_ids", f"{offline_id}.txt").write_text( online_id @@ -479,6 +770,20 @@ class StorageUploadAction(UploadAction): def initialise_object( cls, online_id: ObjectID | None, **data ) -> S3Storage | FileStorage: + """Initialise/update an Storage object. + + Parameters + ---------- + online_id : str | None, optional + the online identifier for an object to update, default None. + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.S3Storage | simvue.api.objects.FileStorage + a local representation of the server object. + """ if not online_id: if data.get("config", {}).get("endpoint_url"): return S3Storage.new(**data) @@ -494,6 +799,20 @@ class GridUploadAction(UploadAction): @classmethod @override def initialise_object(cls, online_id: ObjectID | None, **data) -> Grid: + """Initialise/update an Grid object. + + Parameters + ---------- + online_id : str | None, optional + the online identifier for an object to update, default None. + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.Grid + a local representation of the server object. + """ if not online_id: return Grid.new(**data) @@ -507,6 +826,21 @@ class MetricsUploadAction(UploadAction): @classmethod @override def initialise_object(cls, online_id: ObjectID | None, **data) -> Metrics: + """Initialise Metrics. + + Parameters + ---------- + online_id : str | None, optional + parameter is ignored in this case, + update is ambiguous in this context + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.Grid + a local representation of the server object. + """ _ = online_id return Metrics.new(**data) @@ -518,6 +852,21 @@ class GridMetricsUploadAction(UploadAction): @classmethod @override def initialise_object(cls, online_id: ObjectID | None, **data) -> GridMetrics: + """Initialise GridMetrics. + + Parameters + ---------- + online_id : str | None, optional + parameter is ignored in this case, + update is ambiguous in this context + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.GridMetrics + a local representation of the server object. + """ _ = online_id return GridMetrics.new(**data) @@ -529,6 +878,21 @@ class EventsUploadAction(UploadAction): @classmethod @override def initialise_object(cls, online_id: ObjectID | None, **data) -> Events: + """Initialise Events. + + Parameters + ---------- + online_id : str | None, optional + parameter is ignored in this case, + update is ambiguous in this context + **data + data to create/modify a run. + + Returns + ------- + simvue.api.objects.Events + a local representation of the server object. + """ _ = online_id return Events.new(**data) @@ -540,6 +904,7 @@ class HeartbeatUploadAction(UploadAction): @override @classmethod def initialise_object(cls, online_id: ObjectID | None, **data) -> None: + """No initialiser for this action.""" _ = online_id _ = data @@ -548,6 +913,7 @@ def initialise_object(cls, online_id: ObjectID | None, **data) -> None: def pre_tasks( cls, offline_id: str, data: dict[str, typing.Any], cache_directory: pathlib.Path ) -> None: + """No pre-tasks for this action.""" _ = offline_id _ = data _ = cache_directory @@ -556,7 +922,7 @@ def pre_tasks( @override @classmethod def uploadable_objects(cls, cache_directory: pathlib.Path) -> Generator[str]: - """Iterate through uploadables.""" + """Iterate through uploadable heartbeat run identifiers.""" for file in cache_directory.glob("runs/*.heartbeat"): yield file.stem @@ -572,6 +938,7 @@ def _single_item_upload( throw_exceptions: bool = False, retry_failed: bool = False, ) -> None: + """Upload a single heartbeat item.""" if not (_online_id := id_mapping.get(identifier)): # Run has been closed - can just remove heartbeat and continue cache_directory.joinpath(f"runs/{identifier}.heartbeat").unlink() @@ -606,6 +973,11 @@ def post_tasks( data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: + """No post-tasks for this action.""" + _ = offline_id + _ = data + _ = cache_directory + _ = online_id pass @@ -615,6 +987,7 @@ class CO2IntensityUploadAction(UploadAction): @override @classmethod def initialise_object(cls, online_id: ObjectID | None, **data) -> None: + """No initialiser for this action.""" _ = online_id _ = data @@ -623,6 +996,7 @@ def initialise_object(cls, online_id: ObjectID | None, **data) -> None: def pre_tasks( cls, offline_id: str, data: dict[str, typing.Any], cache_directory: pathlib.Path ) -> None: + """No pre-tasks for this action.""" _ = offline_id _ = data _ = cache_directory @@ -636,6 +1010,7 @@ def post_tasks( data: dict[str, typing.Any], cache_directory: pathlib.Path, ) -> None: + """No post-tasks for this action.""" _ = offline_id _ = data _ = cache_directory @@ -643,6 +1018,7 @@ def post_tasks( @override @classmethod def uploadable_objects(cls, cache_directory: pathlib.Path) -> Generator[str]: + """No uploadable object file data for this action.""" yield from () @override @@ -669,16 +1045,21 @@ def upload( id_mapping: dict[str, str], cache_directory: pathlib.Path, thread_lock: threading.Lock, - single_thread_limit: int, + threading_threshold: int, max_thread_workers: int, *, throw_exceptions: bool = False, retry_failed: bool = False, + upload_status: dict[str, str | float] | None = None, + simvue_monitor_run: dict[str, str | float] | None = None, ) -> None: + """Upload CO2 intensity data.""" _ = id_mapping _ = thread_lock - _ = single_thread_limit + _ = threading_threshold _ = max_thread_workers + _ = simvue_monitor_run + _ = upload_status _local_config: SimvueConfiguration = SimvueConfiguration.fetch() @@ -700,6 +1081,7 @@ def upload( cls.logger.exception(e) +# Define the upload action ordering UPLOAD_ACTION_ORDER: tuple[type[UploadAction], ...] = ( TenantUploadAction, UserUploadAction, diff --git a/simvue/sender/base.py b/simvue/sender/base.py index 0c99f25f..e9036cbe 100644 --- a/simvue/sender/base.py +++ b/simvue/sender/base.py @@ -45,8 +45,24 @@ def __init__( throw_exceptions: bool = False, retry_failed_uploads: bool = False, run_notification: typing.Literal["none", "all", "email"] = "none", + run_retention_period: str | None = None, ) -> None: - """Creates required local directories.""" + """Initialise a local data sender. + + Parameters + ---------- + cache_directory : pydantic.DirectoryPath | None, optional + The directory where cached files are stored, else use default. + max_workers : int, optional + The maximum number of threads to use, default 5. + threading_threshold : int, optional + The number of cached files above which threading will be used, default 10. + throw_exceptions : bool, optional + Whether to throw exceptions as they are encountered in the sender, + default is False (exceptions will be logged) + retry_failed_uploads : bool, optional + Whether to retry sending objects which previously failed, by default False + """ _local_config: SimvueConfiguration = SimvueConfiguration.fetch() self._cache_directory = cache_directory or _local_config.offline.cache self._cache_directory.joinpath("server_ids").mkdir(parents=True, exist_ok=True) @@ -57,6 +73,8 @@ def __init__( self._lock_path = self._cache_directory.joinpath("sender.lock") self._thread_lock = threading.Lock() self._run_notification: typing.Literal["none", "email"] = run_notification + self._run_retention_period: str | None = run_retention_period + self._upload_status: dict[str, str | float] = {} self._id_mapping = { file_path.name.split(".")[0]: file_path.read_text() for file_path in self._cache_directory.glob("server_ids/*.txt") @@ -96,7 +114,14 @@ def _initialise_monitor_run(self) -> Run: name=f"sender_upload_{_time_stamp}", folder="/sender", notification=self._run_notification, + description="Simvue sender upload session.", + retention_period=self._run_retention_period, timeout=None, + metadata={ + f"sender.item_count.{upload_object.object_type}": _obj_count + for upload_object in UPLOAD_ACTION_ORDER + if (_obj_count := upload_object.count(self._cache_directory)) > 0 + }, no_color=True, ) _ = _run.config(suppress_errors=True, enable_emission_metrics=False) @@ -107,14 +132,23 @@ def _initialise_monitor_run(self) -> Run: trigger_abort=False, ) + _run.upload_count = 0 + return _run @pydantic.validate_call def upload(self, objects_to_upload: list[UploadItem] | None = None) -> None: - """Upload objects to server.""" + """Upload objects to server. + + Parameters + ---------- + objects_to_upload : list[str] + Types of objects to upload, by default uploads all types of objects present in cache + """ self._lock() _monitor_run = self._initialise_monitor_run() + self._upload_status = {} for action in UPLOAD_ACTION_ORDER: if objects_to_upload and action.object_type not in objects_to_upload: @@ -130,8 +164,10 @@ def upload(self, objects_to_upload: list[UploadItem] | None = None) -> None: thread_lock=self._thread_lock, throw_exceptions=self._throw_exceptions, retry_failed=self._retry_failed_uploads, - single_thread_limit=self._threading_threshold, + threading_threshold=self._threading_threshold, max_thread_workers=self._max_workers, simvue_monitor_run=_monitor_run, + upload_status=self._upload_status, ) + _monitor_run.close() self._release() From 1ae29f8390dc8b369a39963b80c16aa92b5330ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 23 Oct 2025 08:53:03 +0100 Subject: [PATCH 05/16] =?UTF-8?q?=F0=9F=90=9B=20Re-applied=20fixes=20from?= =?UTF-8?q?=20dev?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/sender/actions.py | 2 +- simvue/sender/base.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/simvue/sender/actions.py b/simvue/sender/actions.py index 63471dd4..7f955945 100644 --- a/simvue/sender/actions.py +++ b/simvue/sender/actions.py @@ -944,7 +944,7 @@ def _single_item_upload( cache_directory.joinpath(f"runs/{identifier}.heartbeat").unlink() return - _local_config: SimvueConfiguration = SimvueConfiguration.fetch() + _local_config: SimvueConfiguration = SimvueConfiguration.fetch(mode="online") cls.logger.info("Sending heartbeat to run '%s'", identifier) diff --git a/simvue/sender/base.py b/simvue/sender/base.py index e9036cbe..d20da298 100644 --- a/simvue/sender/base.py +++ b/simvue/sender/base.py @@ -63,7 +63,7 @@ def __init__( retry_failed_uploads : bool, optional Whether to retry sending objects which previously failed, by default False """ - _local_config: SimvueConfiguration = SimvueConfiguration.fetch() + _local_config: SimvueConfiguration = SimvueConfiguration.fetch(mode="online") self._cache_directory = cache_directory or _local_config.offline.cache self._cache_directory.joinpath("server_ids").mkdir(parents=True, exist_ok=True) self._throw_exceptions = throw_exceptions From 824714293b1feeeb5b8c846ddf43b5c305c03755 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 23 Oct 2025 10:49:55 +0100 Subject: [PATCH 06/16] =?UTF-8?q?=F0=9F=90=9B=20Ensure=20monitor=20argumen?= =?UTF-8?q?t=20present=20in=20all=20single=20item=20upload=20methods?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/sender/actions.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/simvue/sender/actions.py b/simvue/sender/actions.py index 7f955945..fcae0e04 100644 --- a/simvue/sender/actions.py +++ b/simvue/sender/actions.py @@ -934,11 +934,13 @@ def _single_item_upload( id_mapping: dict[str, str], cache_directory: pathlib.Path, thread_lock: threading.Lock, + simvue_monitor_run: SimvueRun, *, throw_exceptions: bool = False, retry_failed: bool = False, ) -> None: """Upload a single heartbeat item.""" + _ = simvue_monitor_run if not (_online_id := id_mapping.get(identifier)): # Run has been closed - can just remove heartbeat and continue cache_directory.joinpath(f"runs/{identifier}.heartbeat").unlink() @@ -1029,10 +1031,12 @@ def _single_item_upload( id_mapping: dict[str, str], cache_directory: pathlib.Path, thread_lock: threading.Lock, + simvue_monitor_run: SimvueRun, *, throw_exceptions: bool = False, retry_failed: bool = False, ) -> None: + _ = simvue_monitor_run _ = identifier _ = id_mapping _ = cache_directory From 0532f0c61a964e32b7621c1367fe8ae25858cb1a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 28 Oct 2025 10:21:58 +0000 Subject: [PATCH 07/16] =?UTF-8?q?=F0=9F=90=9B=20Add=20missing=20arguments?= =?UTF-8?q?=20to=20CO2=20and=20heartbeat=20upload=20actions?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/sender/actions.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/simvue/sender/actions.py b/simvue/sender/actions.py index fcae0e04..946a5a5a 100644 --- a/simvue/sender/actions.py +++ b/simvue/sender/actions.py @@ -938,6 +938,7 @@ def _single_item_upload( *, throw_exceptions: bool = False, retry_failed: bool = False, + upload_status: dict[str, str | float] | None, ) -> None: """Upload a single heartbeat item.""" _ = simvue_monitor_run @@ -1035,6 +1036,7 @@ def _single_item_upload( *, throw_exceptions: bool = False, retry_failed: bool = False, + upload_status: dict[str, str | float] | None, ) -> None: _ = simvue_monitor_run _ = identifier From aafae9976abcc28e16f315877ece32b344dc386c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 28 Oct 2025 11:20:03 +0000 Subject: [PATCH 08/16] =?UTF-8?q?=F0=9F=90=9B=20More=20bug=20fixes=20to=20?= =?UTF-8?q?offline=20Sender=20class=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/api/objects/administrator/tenant.py | 1 + simvue/sender/actions.py | 2 +- tests/functional/test_run_class.py | 22 +++++++++++----------- tests/unit/test_tag.py | 2 +- tests/unit/test_tenant.py | 4 ++-- 5 files changed, 16 insertions(+), 15 deletions(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index 88dd3a14..ffb71918 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -30,6 +30,7 @@ def new( max_runs: int = 0, max_data_volume: int = 0, offline: bool = False, + **_, ) -> Self: """Create a new tenant on the Simvue server. diff --git a/simvue/sender/actions.py b/simvue/sender/actions.py index 946a5a5a..ea679710 100644 --- a/simvue/sender/actions.py +++ b/simvue/sender/actions.py @@ -1067,7 +1067,7 @@ def upload( _ = simvue_monitor_run _ = upload_status - _local_config: SimvueConfiguration = SimvueConfiguration.fetch() + _local_config: SimvueConfiguration = SimvueConfiguration.fetch(mode="online") if not _local_config.metrics.enable_emission_metrics: return diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index fb6ddc99..0a990629 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -21,9 +21,7 @@ import simvue from simvue.api.objects import Alert, Metrics from simvue.api.objects.grids import GridMetrics -from simvue.eco.api_client import CO2SignalData, CO2SignalResponse from simvue.exception import ObjectNotFoundError, SimvueRunError -from simvue.eco.emissions_monitor import TIME_FORMAT, CO2Monitor from simvue.sender import Sender import simvue.run as sv_run import simvue.client as sv_cl @@ -1222,27 +1220,30 @@ def test_add_alerts_offline(monkeypatch) -> None: rule="is inside range", ) - _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) - _online_run = RunObject(identifier=_id_mapping.get(run.id)) + _sender = Sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender.upload() + _online_run = RunObject(identifier=_sender.id_mapping.get(run.id)) # Check that there is no duplication - assert sorted(_online_run.alerts) == sorted([_id_mapping.get(_id) for _id in _expected_alerts]) + assert sorted(_online_run.alerts) == sorted([_sender.id_mapping.get(_id) for _id in _expected_alerts]) # Create another run without adding to run _id = run.create_user_alert(name=f"user_alert_{_uuid}", attach_to_run=False) - _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender.upload() # Check alert is not added _online_run.refresh() - assert sorted(_online_run.alerts) == sorted([_id_mapping.get(_id) for _id in _expected_alerts]) + assert sorted(_online_run.alerts) == sorted([_sender.id_mapping.get(_id) for _id in _expected_alerts]) # Try adding alerts with IDs, check there is no duplication _expected_alerts.append(_id) run.add_alerts(ids=_expected_alerts) - _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender = Sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) + _sender.upload() _online_run.refresh() - assert sorted(_online_run.alerts) == sorted([_id_mapping.get(_id) for _id in _expected_alerts]) + assert sorted(_online_run.alerts) == sorted([_sender.id_mapping.get(_id) for _id in _expected_alerts]) run.close() @@ -1253,7 +1254,7 @@ def test_add_alerts_offline(monkeypatch) -> None: remove_runs=True, recursive=True ) - for _id in [_id_mapping.get(_id) for _id in _expected_alerts]: + for _id in [_sender.id_mapping.get(_id) for _id in _expected_alerts]: client.delete_alert(_id) @@ -1478,7 +1479,6 @@ def test_reconnect_functionality(mode, monkeypatch: pytest.MonkeyPatch) -> None: if mode == "offline": _sender = Sender(cache_directory=os.environ["SIMVUE_OFFLINE_DIRECTORY"], max_workers=2, threading_threshold=10, throw_exceptions=True) _sender.upload() - _id_mapping = _sender.id_mapping _reconnected_run = client.get_run(run_id) assert dict(_reconnected_run.metrics)["test_metric"]["last"] == 1 diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py index 1fa2f4de..c91af8c1 100644 --- a/tests/unit/test_tag.py +++ b/tests/unit/test_tag.py @@ -79,7 +79,7 @@ def test_tag_modification_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"test_tag_{_uuid}" - _sender = sender(_tag._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender = Sender(_tag._local_staging_file.parents[1], 1, 10, throw_exceptions=True) _sender.upload(["tags"]) _online_id = _sender.id_mapping.get(_tag.id) _online_tag = Tag(_online_id) diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py index 5ff854ae..73117ddd 100644 --- a/tests/unit/test_tenant.py +++ b/tests/unit/test_tenant.py @@ -40,8 +40,8 @@ def test_create_tenant_offline(offline_cache_setup) -> None: assert _local_data.get("name") == _uuid assert _local_data.get("is_enabled") == True - _sender = Sender(_new_tenant._local_staging_file.parents[1], 1, 10, ["tenants"], throw_exceptions=True) - _sender.upload() + _sender = Sender(_new_tenant._local_staging_file.parents[1], 1, 10, throw_exceptions=True) + _sender.upload(["tenants"]) time.sleep(1) _online_user = Tenant(_sender.id_mapping.get(_new_tenant.id)) assert _online_user.name == _uuid From c3ded7746fa9d6423832c9c1e46b3faad07b16ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 29 Oct 2025 09:39:33 +0000 Subject: [PATCH 09/16] =?UTF-8?q?=F0=9F=A7=AA=20Fix=20sender=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/unit/test_user_alert.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index 77141312..f13248c3 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -199,7 +199,7 @@ def test_user_alert_status_offline(offline_cache_setup) -> None: # Get online aler, check status is not set _online_alert = UserAlert(_sender.id_mapping.get(_alert.id)) - assert not _online_alert.get_status(run_id=_id_mapping.get(_run.id)) + assert not _online_alert.get_status(run_id=_sender.id_mapping.get(_run.id)) _alert.set_status(_run.id, "critical") _alert.commit() @@ -207,7 +207,7 @@ def test_user_alert_status_offline(offline_cache_setup) -> None: # Check online status is still not set as change has not been sent _online_alert.refresh() - assert not _online_alert.get_status(run_id=_id_mapping.get(_run.id)) + assert not _online_alert.get_status(run_id=_sender.id_mapping.get(_run.id)) _sender = Sender(_alert._local_staging_file.parents[1], 1, 10, throw_exceptions=True) _sender.upload(["alerts"]) From 7cee8950d36e31270c3fec10e060785e324b0712 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 29 Oct 2025 18:18:14 +0000 Subject: [PATCH 10/16] =?UTF-8?q?=F0=9F=92=9A=20Use=20parallelised=20jobs?= =?UTF-8?q?=20in=20CI?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../workflows/test_client_macos_nightlies.yml | 16 +++++++-------- .github/workflows/test_client_ubuntu.yml | 16 +++++++-------- .../test_client_ubuntu_nightlies.yml | 20 +++++++++---------- .../test_client_windows_nightlies.yml | 18 ++++++++--------- .github/workflows/test_multiple_python.yml | 18 ++++++++--------- 5 files changed, 44 insertions(+), 44 deletions(-) diff --git a/.github/workflows/test_client_macos_nightlies.yml b/.github/workflows/test_client_macos_nightlies.yml index 7a4e723b..b046ef0c 100644 --- a/.github/workflows/test_client_macos_nightlies.yml +++ b/.github/workflows/test_client_macos_nightlies.yml @@ -37,7 +37,7 @@ jobs: run: >- python -m pytest -x -m object_retrieval -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache object_removal: runs-on: ubuntu-latest timeout-minutes: 30 @@ -62,7 +62,7 @@ jobs: run: >- python -m pytest -x -m object_removal -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache dispatch_tests: runs-on: ubuntu-latest timeout-minutes: 30 @@ -86,7 +86,7 @@ jobs: run: >- python -m pytest -x -m dispatch -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache run_tests_online: runs-on: ubuntu-latest timeout-minutes: 30 @@ -110,8 +110,8 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m online -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -m "online and not api" -c /dev/null -p no:warnings + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache run_tests_offline: runs-on: ubuntu-latest timeout-minutes: 30 @@ -135,7 +135,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m offline -c /dev/null -p no:warnings + -m "offline and not api" -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest @@ -160,7 +160,7 @@ jobs: run: >- python -m pytest -x -m config -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache executor_tests: runs-on: ubuntu-latest timeout-minutes: 30 @@ -232,4 +232,4 @@ jobs: run: >- python -m pytest -x -m local -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache diff --git a/.github/workflows/test_client_ubuntu.yml b/.github/workflows/test_client_ubuntu.yml index 23251bc7..bc8911e2 100644 --- a/.github/workflows/test_client_ubuntu.yml +++ b/.github/workflows/test_client_ubuntu.yml @@ -42,7 +42,7 @@ jobs: run: >- python -m pytest -x -m object_retrieval -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache object_removal: runs-on: ubuntu-latest timeout-minutes: 30 @@ -67,7 +67,7 @@ jobs: run: >- python -m pytest -x -m object_removal -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache dispatch_tests: runs-on: ubuntu-latest timeout-minutes: 30 @@ -91,7 +91,7 @@ jobs: run: >- python -m pytest -x -m dispatch -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache run_tests_online: runs-on: ubuntu-latest timeout-minutes: 30 @@ -115,8 +115,8 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m online -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -m "online and not api" -c /dev/null -p no:warnings + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache run_tests_offline: runs-on: ubuntu-latest timeout-minutes: 30 @@ -140,7 +140,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m offline -c /dev/null -p no:warnings + -m "offline and not api" -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest @@ -165,7 +165,7 @@ jobs: run: >- python -m pytest -x -m config -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache executor_tests: runs-on: ubuntu-latest timeout-minutes: 30 @@ -237,4 +237,4 @@ jobs: run: >- python -m pytest -x -m local -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache diff --git a/.github/workflows/test_client_ubuntu_nightlies.yml b/.github/workflows/test_client_ubuntu_nightlies.yml index 724c2ef7..e868da63 100644 --- a/.github/workflows/test_client_ubuntu_nightlies.yml +++ b/.github/workflows/test_client_ubuntu_nightlies.yml @@ -40,7 +40,7 @@ jobs: run: >- python -m pytest -x -m object_retrieval -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache object_removal: runs-on: ubuntu-latest timeout-minutes: 30 @@ -65,7 +65,7 @@ jobs: run: >- python -m pytest -x -m object_removal -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache dispatch_tests: runs-on: ubuntu-latest timeout-minutes: 30 @@ -89,8 +89,8 @@ jobs: run: >- python -m pytest -x -m dispatch -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache - run_tests_online: + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + online_online: runs-on: ubuntu-latest timeout-minutes: 30 steps: @@ -113,9 +113,9 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m online -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache - run_tests_offline: + -m "online and not api" -c /dev/null -p no:warnings + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + offline_tests: runs-on: ubuntu-latest timeout-minutes: 30 steps: @@ -138,7 +138,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m offline -c /dev/null -p no:warnings + -m "offline and not api" -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest @@ -163,7 +163,7 @@ jobs: run: >- python -m pytest -x -m config -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache executor_tests: runs-on: ubuntu-latest timeout-minutes: 30 @@ -235,4 +235,4 @@ jobs: run: >- python -m pytest -x -m local -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache diff --git a/.github/workflows/test_client_windows_nightlies.yml b/.github/workflows/test_client_windows_nightlies.yml index d852dd39..4b863b56 100644 --- a/.github/workflows/test_client_windows_nightlies.yml +++ b/.github/workflows/test_client_windows_nightlies.yml @@ -36,8 +36,8 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m object_retrieval -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -m 'object_retrieval and not unix' -c /dev/null -p no:warnings + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache shell: pwsh object_removal: runs-on: windows-latest @@ -63,7 +63,7 @@ jobs: run: >- python -m pytest -x -m 'object_removal and not unix' -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache shell: pwsh dispatch_tests: runs-on: windows-latest @@ -88,7 +88,7 @@ jobs: run: >- python -m pytest -x -m 'dispatch and not unix' -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache shell: pwsh run_tests_online: runs-on: windows-latest @@ -113,8 +113,8 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m 'online and not unix' -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -m run -m 'online and not unix and not api' -c /dev/null -p no:warnings + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache shell: pwsh run_tests_offline: runs-on: windows-latest @@ -139,7 +139,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m 'offline and not unix' -c /dev/null -p no:warnings + -m run -m 'offline and not unix and not api' -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache shell: pwsh config_tests: @@ -165,7 +165,7 @@ jobs: run: >- python -m pytest -x -m 'config and not unix' -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache shell: pwsh executor_tests: runs-on: windows-latest @@ -240,5 +240,5 @@ jobs: run: >- python -m pytest -x -m 'local and not unix' -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache shell: pwsh diff --git a/.github/workflows/test_multiple_python.yml b/.github/workflows/test_multiple_python.yml index 7929acb7..7a44f92a 100644 --- a/.github/workflows/test_multiple_python.yml +++ b/.github/workflows/test_multiple_python.yml @@ -45,7 +45,7 @@ jobs: run: >- python -m pytest -x -m object_retrieval -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache object_removal: runs-on: ubuntu-latest name: Object Removal Tests @@ -75,7 +75,7 @@ jobs: run: >- python -m pytest -x -m object_removal -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache dispatch_tests: runs-on: ubuntu-latest timeout-minutes: 30 @@ -104,7 +104,7 @@ jobs: run: >- python -m pytest -x -m dispatch -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache run_tests_online: name: Run Tests Online runs-on: ubuntu-latest @@ -133,8 +133,8 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m online -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -m "online and not api" -c /dev/null -p no:warnings + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache run_tests_offline: runs-on: ubuntu-latest name: Run Tests Offline @@ -163,7 +163,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m offline -c /dev/null -p no:warnings + -m "offline and not api" -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest @@ -193,7 +193,7 @@ jobs: run: >- python -m pytest -x -m config -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache executor_tests: runs-on: ubuntu-latest name: Executor Tests @@ -251,7 +251,7 @@ jobs: run: >- python -m pytest -x -m api -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache local_tests: runs-on: ubuntu-latest name: Local Tests @@ -280,4 +280,4 @@ jobs: run: >- python -m pytest -x -m local -c /dev/null -p no:warnings - -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache + -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache From 7bc878b7a3c0d2437a6d8c61bc7281854d05bb63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 5 Nov 2025 09:17:10 +0000 Subject: [PATCH 11/16] =?UTF-8?q?=F0=9F=9A=A7=20Continue=20fixes=20and=20t?= =?UTF-8?q?est=20creation=20for=20sender=20CLI?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/bin/__init__.py | 3 +++ simvue/bin/sender.py | 4 +++- simvue/sender/base.py | 2 ++ tests/conftest.py | 8 ++++++-- 4 files changed, 14 insertions(+), 3 deletions(-) diff --git a/simvue/bin/__init__.py b/simvue/bin/__init__.py index e69de29b..dcca5b13 100644 --- a/simvue/bin/__init__.py +++ b/simvue/bin/__init__.py @@ -0,0 +1,3 @@ +from .sender import run as run_sender + +__all__ = ["run_sender"] diff --git a/simvue/bin/sender.py b/simvue/bin/sender.py index 0d62ba68..330e2b20 100644 --- a/simvue/bin/sender.py +++ b/simvue/bin/sender.py @@ -32,7 +32,7 @@ "-o", "--objects-to-upload", type=str, - nargs=-1, + multiple=True, required=False, default=UPLOAD_ORDER, help="The object types to upload, by default All", @@ -63,7 +63,9 @@ def run( cache_directory=cache_directory, max_workers=max_workers, threading_threshold=threading_threshold, + throw_exceptions=True, ) _sender.upload(objects_to_upload) except Exception as err: _logger.critical("Exception running sender: %s", str(err)) + raise click.Abort diff --git a/simvue/sender/base.py b/simvue/sender/base.py index d20da298..aacadc58 100644 --- a/simvue/sender/base.py +++ b/simvue/sender/base.py @@ -30,6 +30,8 @@ "metrics", "grid_metrics", "events", + "heartbeat", + "co2_intensity", ] UPLOAD_ORDER: list[str] = [action.object_type for action in UPLOAD_ACTION_ORDER] diff --git a/tests/conftest.py b/tests/conftest.py index ab574f2c..0e8570b8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -156,11 +156,12 @@ def testing_exit(status: int) -> None: with tempfile.TemporaryDirectory() as temp_d: monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) with sv_run.Run("offline") as run: - yield run, setup_test_run(run, True, request) + _test_run_data = setup_test_run(run, True, request) + yield run, _test_run_data with contextlib.suppress(ObjectNotFoundError): sv_api_obj.Folder(identifier=run._folder.id).delete(recursive=True, delete_runs=True, runs_only=False) for alert_id in _test_run_data.get("alert_ids", []): - with contextlib.suppress(ObjectNotFoundError): + with contextlib.suppress(ObjectNotFoundError, RuntimeError): sv_api_obj.Alert(identifier=alert_id).delete() clear_out_files() @@ -172,6 +173,7 @@ def testing_exit(status: int) -> None: with sv_run.Run() as run: run.metric_spy = mocker.spy(run, "_get_internal_metrics") yield run, setup_test_run(run, False, request) + clear_out_files() @pytest.fixture @@ -306,6 +308,8 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur out_f.write( "print('Hello World!')" ) + print(test_script) + assert pathlib.Path(test_script).exists() run.save_file(test_script, category="code", name="test_code_upload") TEST_DATA["file_3"] = "test_code_upload" From 7d9ef3dd509357cff778d687c8229716bc7763af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 10 Nov 2025 11:43:01 +0000 Subject: [PATCH 12/16] =?UTF-8?q?=F0=9F=90=9B=20Add=20obj=5Ftype=20to=20li?= =?UTF-8?q?st=20of=20local=20attributes=20in=20LLAPI?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/api/objects/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 0d386320..0b38df6c 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -200,7 +200,7 @@ def __init__( # For simvue object initialisation, unlike the server there is no nested # arguments, however this means that there are extra keys during post which # need removing, this attribute handles that and should be set in subclasses. - self._local_only_args: list[str] = ["created"] + self._local_only_args: list[str] = ["created", "obj_type"] self._identifier: str | None = ( identifier if identifier is not None else f"offline_{uuid.uuid1()}" From ec02b45646daf93883d02162d6bbdb4b7894b37c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 10 Nov 2025 11:57:10 +0000 Subject: [PATCH 13/16] =?UTF-8?q?=F0=9F=90=9B=20Fixed=20local=20argument?= =?UTF-8?q?=20assembly?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/api/objects/alert/base.py | 2 +- simvue/api/objects/storage/s3.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 5c3953cb..7643a026 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -31,7 +31,7 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: """Retrieve an alert from the Simvue server by identifier""" self._label = "alert" super().__init__(identifier=identifier, **kwargs) - self._local_only_args = [ + self._local_only_args += [ "frequency", "pattern", "aggregation", diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py index c4c522cc..9aee71a0 100644 --- a/simvue/api/objects/storage/s3.py +++ b/simvue/api/objects/storage/s3.py @@ -27,7 +27,7 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: """Initialise an S3Storage instance attaching a configuration""" self.config = Config(self) super().__init__(identifier, **kwargs) - self._local_only_args: list[str] = [ + self._local_only_args += [ "endpoint_url", "region_name", "access_key_id", From d5a9bcffceec5c5db6f5000ba177736eb298a0d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 1 Dec 2025 09:25:40 +0000 Subject: [PATCH 14/16] =?UTF-8?q?=F0=9F=A7=AA=20Added=20sender=20CLI=20tes?= =?UTF-8?q?t?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 1178 +++++++++++++++--------------- pyproject.toml | 2 +- tests/cli/test_sender_command.py | 29 + tests/conftest.py | 54 +- 4 files changed, 653 insertions(+), 610 deletions(-) create mode 100644 tests/cli/test_sender_command.py diff --git a/poetry.lock b/poetry.lock index edde32f8..d4952045 100644 --- a/poetry.lock +++ b/poetry.lock @@ -26,115 +26,149 @@ files = [ [[package]] name = "certifi" -version = "2025.10.5" +version = "2025.11.12" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"}, - {file = "certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"}, + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, ] [[package]] name = "charset-normalizer" -version = "3.4.3" +version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f"}, - {file = "charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849"}, - {file = "charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37"}, - {file = "charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce"}, - {file = "charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce"}, - {file = "charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f2be7e0cf7754b9a30eb01f4295cc3d4358a479843b31f328afd210e2c7598c"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c60e092517a73c632ec38e290eba714e9627abe9d301c8c8a12ec32c314a2a4b"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:252098c8c7a873e17dd696ed98bbe91dbacd571da4b87df3736768efa7a792e4"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3653fad4fe3ed447a596ae8638b437f827234f01a8cd801842e43f3d0a6b281b"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8999f965f922ae054125286faf9f11bc6932184b93011d138925a1773830bbe9"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95bfb53c211b57198bb91c46dd5a2d8018b3af446583aab40074bf7988401cb"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:5b413b0b1bfd94dbf4023ad6945889f374cd24e3f62de58d6bb102c4d9ae534a"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:b5e3b2d152e74e100a9e9573837aba24aab611d39428ded46f4e4022ea7d1942"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a2d08ac246bb48479170408d6c19f6385fa743e7157d716e144cad849b2dd94b"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-win32.whl", hash = "sha256:ec557499516fc90fd374bf2e32349a2887a876fbf162c160e3c01b6849eaf557"}, - {file = "charset_normalizer-3.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:5d8d01eac18c423815ed4f4a2ec3b439d654e55ee4ad610e153cf02faf67ea40"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432"}, - {file = "charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca"}, - {file = "charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a"}, - {file = "charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, ] [[package]] name = "click" -version = "8.3.0" +version = "8.3.1" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" groups = ["main", "dev"] files = [ - {file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"}, - {file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"}, + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, ] [package.dependencies] @@ -326,116 +360,104 @@ test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist" [[package]] name = "coverage" -version = "7.10.7" +version = "7.12.0" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}, - {file = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}, - {file = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"}, - {file = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"}, - {file = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"}, - {file = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"}, - {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"}, - {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"}, - {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"}, - {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"}, - {file = "coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"}, - {file = "coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"}, - {file = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"}, - {file = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"}, - {file = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"}, - {file = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"}, - {file = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"}, - {file = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"}, - {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"}, - {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"}, - {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"}, - {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"}, - {file = "coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"}, - {file = "coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"}, - {file = "coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"}, - {file = "coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417"}, - {file = "coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973"}, - {file = "coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c"}, - {file = "coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7"}, - {file = "coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6"}, - {file = "coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59"}, - {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b"}, - {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a"}, - {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb"}, - {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1"}, - {file = "coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"}, - {file = "coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"}, - {file = "coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"}, - {file = "coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d"}, - {file = "coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b"}, - {file = "coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e"}, - {file = "coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b"}, - {file = "coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49"}, - {file = "coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911"}, - {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0"}, - {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f"}, - {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c"}, - {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f"}, - {file = "coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698"}, - {file = "coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843"}, - {file = "coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546"}, - {file = "coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c"}, - {file = "coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15"}, - {file = "coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4"}, - {file = "coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0"}, - {file = "coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0"}, - {file = "coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65"}, - {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541"}, - {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6"}, - {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999"}, - {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2"}, - {file = "coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a"}, - {file = "coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb"}, - {file = "coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb"}, - {file = "coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520"}, - {file = "coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32"}, - {file = "coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f"}, - {file = "coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a"}, - {file = "coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360"}, - {file = "coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69"}, - {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14"}, - {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe"}, - {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e"}, - {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd"}, - {file = "coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2"}, - {file = "coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681"}, - {file = "coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880"}, - {file = "coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63"}, - {file = "coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2"}, - {file = "coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d"}, - {file = "coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0"}, - {file = "coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699"}, - {file = "coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9"}, - {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f"}, - {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1"}, - {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0"}, - {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399"}, - {file = "coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235"}, - {file = "coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d"}, - {file = "coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a"}, - {file = "coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3"}, - {file = "coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c"}, - {file = "coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396"}, - {file = "coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40"}, - {file = "coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594"}, - {file = "coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a"}, - {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b"}, - {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3"}, - {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0"}, - {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f"}, - {file = "coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431"}, - {file = "coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07"}, - {file = "coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"}, - {file = "coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239"}, + {file = "coverage-7.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:32b75c2ba3f324ee37af3ccee5b30458038c50b349ad9b88cee85096132a575b"}, + {file = "coverage-7.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cb2a1b6ab9fe833714a483a915de350abc624a37149649297624c8d57add089c"}, + {file = "coverage-7.12.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5734b5d913c3755e72f70bf6cc37a0518d4f4745cde760c5d8e12005e62f9832"}, + {file = "coverage-7.12.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b527a08cdf15753279b7afb2339a12073620b761d79b81cbe2cdebdb43d90daa"}, + {file = "coverage-7.12.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9bb44c889fb68004e94cab71f6a021ec83eac9aeabdbb5a5a88821ec46e1da73"}, + {file = "coverage-7.12.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4b59b501455535e2e5dde5881739897967b272ba25988c89145c12d772810ccb"}, + {file = "coverage-7.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d8842f17095b9868a05837b7b1b73495293091bed870e099521ada176aa3e00e"}, + {file = "coverage-7.12.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c5a6f20bf48b8866095c6820641e7ffbe23f2ac84a2efc218d91235e404c7777"}, + {file = "coverage-7.12.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:5f3738279524e988d9da2893f307c2093815c623f8d05a8f79e3eff3a7a9e553"}, + {file = "coverage-7.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0d68c1f7eabbc8abe582d11fa393ea483caf4f44b0af86881174769f185c94d"}, + {file = "coverage-7.12.0-cp310-cp310-win32.whl", hash = "sha256:7670d860e18b1e3ee5930b17a7d55ae6287ec6e55d9799982aa103a2cc1fa2ef"}, + {file = "coverage-7.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:f999813dddeb2a56aab5841e687b68169da0d3f6fc78ccf50952fa2463746022"}, + {file = "coverage-7.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa124a3683d2af98bd9d9c2bfa7a5076ca7e5ab09fdb96b81fa7d89376ae928f"}, + {file = "coverage-7.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d93fbf446c31c0140208dcd07c5d882029832e8ed7891a39d6d44bd65f2316c3"}, + {file = "coverage-7.12.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:52ca620260bd8cd6027317bdd8b8ba929be1d741764ee765b42c4d79a408601e"}, + {file = "coverage-7.12.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f3433ffd541380f3a0e423cff0f4926d55b0cc8c1d160fdc3be24a4c03aa65f7"}, + {file = "coverage-7.12.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f7bbb321d4adc9f65e402c677cd1c8e4c2d0105d3ce285b51b4d87f1d5db5245"}, + {file = "coverage-7.12.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:22a7aade354a72dff3b59c577bfd18d6945c61f97393bc5fb7bd293a4237024b"}, + {file = "coverage-7.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3ff651dcd36d2fea66877cd4a82de478004c59b849945446acb5baf9379a1b64"}, + {file = "coverage-7.12.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:31b8b2e38391a56e3cea39d22a23faaa7c3fc911751756ef6d2621d2a9daf742"}, + {file = "coverage-7.12.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:297bc2da28440f5ae51c845a47c8175a4db0553a53827886e4fb25c66633000c"}, + {file = "coverage-7.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ff7651cc01a246908eac162a6a86fc0dbab6de1ad165dfb9a1e2ec660b44984"}, + {file = "coverage-7.12.0-cp311-cp311-win32.whl", hash = "sha256:313672140638b6ddb2c6455ddeda41c6a0b208298034544cfca138978c6baed6"}, + {file = "coverage-7.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a1783ed5bd0d5938d4435014626568dc7f93e3cb99bc59188cc18857c47aa3c4"}, + {file = "coverage-7.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:4648158fd8dd9381b5847622df1c90ff314efbfc1df4550092ab6013c238a5fc"}, + {file = "coverage-7.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:29644c928772c78512b48e14156b81255000dcfd4817574ff69def189bcb3647"}, + {file = "coverage-7.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8638cbb002eaa5d7c8d04da667813ce1067080b9a91099801a0053086e52b736"}, + {file = "coverage-7.12.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:083631eeff5eb9992c923e14b810a179798bb598e6a0dd60586819fc23be6e60"}, + {file = "coverage-7.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:99d5415c73ca12d558e07776bd957c4222c687b9f1d26fa0e1b57e3598bdcde8"}, + {file = "coverage-7.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e949ebf60c717c3df63adb4a1a366c096c8d7fd8472608cd09359e1bd48ef59f"}, + {file = "coverage-7.12.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d907ddccbca819afa2cd014bc69983b146cca2735a0b1e6259b2a6c10be1e70"}, + {file = "coverage-7.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b1518ecbad4e6173f4c6e6c4a46e49555ea5679bf3feda5edb1b935c7c44e8a0"}, + {file = "coverage-7.12.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51777647a749abdf6f6fd8c7cffab12de68ab93aab15efc72fbbb83036c2a068"}, + {file = "coverage-7.12.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:42435d46d6461a3b305cdfcad7cdd3248787771f53fe18305548cba474e6523b"}, + {file = "coverage-7.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5bcead88c8423e1855e64b8057d0544e33e4080b95b240c2a355334bb7ced937"}, + {file = "coverage-7.12.0-cp312-cp312-win32.whl", hash = "sha256:dcbb630ab034e86d2a0f79aefd2be07e583202f41e037602d438c80044957baa"}, + {file = "coverage-7.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:2fd8354ed5d69775ac42986a691fbf68b4084278710cee9d7c3eaa0c28fa982a"}, + {file = "coverage-7.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:737c3814903be30695b2de20d22bcc5428fdae305c61ba44cdc8b3252984c49c"}, + {file = "coverage-7.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47324fffca8d8eae7e185b5bb20c14645f23350f870c1649003618ea91a78941"}, + {file = "coverage-7.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ccf3b2ede91decd2fb53ec73c1f949c3e034129d1e0b07798ff1d02ea0c8fa4a"}, + {file = "coverage-7.12.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b365adc70a6936c6b0582dc38746b33b2454148c02349345412c6e743efb646d"}, + {file = "coverage-7.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bc13baf85cd8a4cfcf4a35c7bc9d795837ad809775f782f697bf630b7e200211"}, + {file = "coverage-7.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:099d11698385d572ceafb3288a5b80fe1fc58bf665b3f9d362389de488361d3d"}, + {file = "coverage-7.12.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:473dc45d69694069adb7680c405fb1e81f60b2aff42c81e2f2c3feaf544d878c"}, + {file = "coverage-7.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:583f9adbefd278e9de33c33d6846aa8f5d164fa49b47144180a0e037f0688bb9"}, + {file = "coverage-7.12.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2089cc445f2dc0af6f801f0d1355c025b76c24481935303cf1af28f636688f0"}, + {file = "coverage-7.12.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:950411f1eb5d579999c5f66c62a40961f126fc71e5e14419f004471957b51508"}, + {file = "coverage-7.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1aab7302a87bafebfe76b12af681b56ff446dc6f32ed178ff9c092ca776e6bc"}, + {file = "coverage-7.12.0-cp313-cp313-win32.whl", hash = "sha256:d7e0d0303c13b54db495eb636bc2465b2fb8475d4c8bcec8fe4b5ca454dfbae8"}, + {file = "coverage-7.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:ce61969812d6a98a981d147d9ac583a36ac7db7766f2e64a9d4d059c2fe29d07"}, + {file = "coverage-7.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bcec6f47e4cb8a4c2dc91ce507f6eefc6a1b10f58df32cdc61dff65455031dfc"}, + {file = "coverage-7.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:459443346509476170d553035e4a3eed7b860f4fe5242f02de1010501956ce87"}, + {file = "coverage-7.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:04a79245ab2b7a61688958f7a855275997134bc84f4a03bc240cf64ff132abf6"}, + {file = "coverage-7.12.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:09a86acaaa8455f13d6a99221d9654df249b33937b4e212b4e5a822065f12aa7"}, + {file = "coverage-7.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:907e0df1b71ba77463687a74149c6122c3f6aac56c2510a5d906b2f368208560"}, + {file = "coverage-7.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b57e2d0ddd5f0582bae5437c04ee71c46cd908e7bc5d4d0391f9a41e812dd12"}, + {file = "coverage-7.12.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:58c1c6aa677f3a1411fe6fb28ec3a942e4f665df036a3608816e0847fad23296"}, + {file = "coverage-7.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4c589361263ab2953e3c4cd2a94db94c4ad4a8e572776ecfbad2389c626e4507"}, + {file = "coverage-7.12.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:91b810a163ccad2e43b1faa11d70d3cf4b6f3d83f9fd5f2df82a32d47b648e0d"}, + {file = "coverage-7.12.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:40c867af715f22592e0d0fb533a33a71ec9e0f73a6945f722a0c85c8c1cbe3a2"}, + {file = "coverage-7.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:68b0d0a2d84f333de875666259dadf28cc67858bc8fd8b3f1eae84d3c2bec455"}, + {file = "coverage-7.12.0-cp313-cp313t-win32.whl", hash = "sha256:73f9e7fbd51a221818fd11b7090eaa835a353ddd59c236c57b2199486b116c6d"}, + {file = "coverage-7.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:24cff9d1f5743f67db7ba46ff284018a6e9aeb649b67aa1e70c396aa1b7cb23c"}, + {file = "coverage-7.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c87395744f5c77c866d0f5a43d97cc39e17c7f1cb0115e54a2fe67ca75c5d14d"}, + {file = "coverage-7.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a1c59b7dc169809a88b21a936eccf71c3895a78f5592051b1af8f4d59c2b4f92"}, + {file = "coverage-7.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8787b0f982e020adb732b9f051f3e49dd5054cebbc3f3432061278512a2b1360"}, + {file = "coverage-7.12.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5ea5a9f7dc8877455b13dd1effd3202e0bca72f6f3ab09f9036b1bcf728f69ac"}, + {file = "coverage-7.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fdba9f15849534594f60b47c9a30bc70409b54947319a7c4fd0e8e3d8d2f355d"}, + {file = "coverage-7.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a00594770eb715854fb1c57e0dea08cce6720cfbc531accdb9850d7c7770396c"}, + {file = "coverage-7.12.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5560c7e0d82b42eb1951e4f68f071f8017c824ebfd5a6ebe42c60ac16c6c2434"}, + {file = "coverage-7.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2e26b481c9159c2773a37947a9718cfdc58893029cdfb177531793e375cfc"}, + {file = "coverage-7.12.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6e1a8c066dabcde56d5d9fed6a66bc19a2883a3fe051f0c397a41fc42aedd4cc"}, + {file = "coverage-7.12.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f7ba9da4726e446d8dd8aae5a6cd872511184a5d861de80a86ef970b5dacce3e"}, + {file = "coverage-7.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e0f483ab4f749039894abaf80c2f9e7ed77bbf3c737517fb88c8e8e305896a17"}, + {file = "coverage-7.12.0-cp314-cp314-win32.whl", hash = "sha256:76336c19a9ef4a94b2f8dc79f8ac2da3f193f625bb5d6f51a328cd19bfc19933"}, + {file = "coverage-7.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7c1059b600aec6ef090721f8f633f60ed70afaffe8ecab85b59df748f24b31fe"}, + {file = "coverage-7.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:172cf3a34bfef42611963e2b661302a8931f44df31629e5b1050567d6b90287d"}, + {file = "coverage-7.12.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:aa7d48520a32cb21c7a9b31f81799e8eaec7239db36c3b670be0fa2403828d1d"}, + {file = "coverage-7.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:90d58ac63bc85e0fb919f14d09d6caa63f35a5512a2205284b7816cafd21bb03"}, + {file = "coverage-7.12.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca8ecfa283764fdda3eae1bdb6afe58bf78c2c3ec2b2edcb05a671f0bba7b3f9"}, + {file = "coverage-7.12.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:874fe69a0785d96bd066059cd4368022cebbec1a8958f224f0016979183916e6"}, + {file = "coverage-7.12.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b3c889c0b8b283a24d721a9eabc8ccafcfc3aebf167e4cd0d0e23bf8ec4e339"}, + {file = "coverage-7.12.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bb5b894b3ec09dcd6d3743229dc7f2c42ef7787dc40596ae04c0edda487371e"}, + {file = "coverage-7.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:79a44421cd5fba96aa57b5e3b5a4d3274c449d4c622e8f76882d76635501fd13"}, + {file = "coverage-7.12.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:33baadc0efd5c7294f436a632566ccc1f72c867f82833eb59820ee37dc811c6f"}, + {file = "coverage-7.12.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:c406a71f544800ef7e9e0000af706b88465f3573ae8b8de37e5f96c59f689ad1"}, + {file = "coverage-7.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e71bba6a40883b00c6d571599b4627f50c360b3d0d02bfc658168936be74027b"}, + {file = "coverage-7.12.0-cp314-cp314t-win32.whl", hash = "sha256:9157a5e233c40ce6613dead4c131a006adfda70e557b6856b97aceed01b0e27a"}, + {file = "coverage-7.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:e84da3a0fd233aeec797b981c51af1cabac74f9bd67be42458365b30d11b5291"}, + {file = "coverage-7.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:01d24af36fedda51c2b1aca56e4330a3710f83b02a5ff3743a6b015ffa7c9384"}, + {file = "coverage-7.12.0-py3-none-any.whl", hash = "sha256:159d50c0b12e060b15ed3d39f87ed43d4f7f7ad40b8a534f4dd331adbb51104a"}, + {file = "coverage-7.12.0.tar.gz", hash = "sha256:fc11e0a4e372cb5f282f16ef90d4a585034050ccda536451901abfb19a57f40c"}, ] [package.dependencies] @@ -527,15 +549,15 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["dev"] markers = "python_version == \"3.10\"" files = [ - {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, - {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, ] [package.dependencies] @@ -546,14 +568,14 @@ test = ["pytest (>=6)"] [[package]] name = "execnet" -version = "2.1.1" +version = "2.1.2" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, - {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, + {file = "execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec"}, + {file = "execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd"}, ] [package.extras] @@ -590,84 +612,76 @@ files = [ [[package]] name = "fonttools" -version = "4.60.1" +version = "4.61.0" description = "Tools to manipulate font files" optional = true -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] markers = "extra == \"plot\"" files = [ - {file = "fonttools-4.60.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9a52f254ce051e196b8fe2af4634c2d2f02c981756c6464dc192f1b6050b4e28"}, - {file = "fonttools-4.60.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7420a2696a44650120cdd269a5d2e56a477e2bfa9d95e86229059beb1c19e15"}, - {file = "fonttools-4.60.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee0c0b3b35b34f782afc673d503167157094a16f442ace7c6c5e0ca80b08f50c"}, - {file = "fonttools-4.60.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:282dafa55f9659e8999110bd8ed422ebe1c8aecd0dc396550b038e6c9a08b8ea"}, - {file = "fonttools-4.60.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4ba4bd646e86de16160f0fb72e31c3b9b7d0721c3e5b26b9fa2fc931dfdb2652"}, - {file = "fonttools-4.60.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0b0835ed15dd5b40d726bb61c846a688f5b4ce2208ec68779bc81860adb5851a"}, - {file = "fonttools-4.60.1-cp310-cp310-win32.whl", hash = "sha256:1525796c3ffe27bb6268ed2a1bb0dcf214d561dfaf04728abf01489eb5339dce"}, - {file = "fonttools-4.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:268ecda8ca6cb5c4f044b1fb9b3b376e8cd1b361cef275082429dc4174907038"}, - {file = "fonttools-4.60.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7b4c32e232a71f63a5d00259ca3d88345ce2a43295bb049d21061f338124246f"}, - {file = "fonttools-4.60.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3630e86c484263eaac71d117085d509cbcf7b18f677906824e4bace598fb70d2"}, - {file = "fonttools-4.60.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c1015318e4fec75dd4943ad5f6a206d9727adf97410d58b7e32ab644a807914"}, - {file = "fonttools-4.60.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e6c58beb17380f7c2ea181ea11e7db8c0ceb474c9dd45f48e71e2cb577d146a1"}, - {file = "fonttools-4.60.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec3681a0cb34c255d76dd9d865a55f260164adb9fa02628415cdc2d43ee2c05d"}, - {file = "fonttools-4.60.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4b5c37a5f40e4d733d3bbaaef082149bee5a5ea3156a785ff64d949bd1353fa"}, - {file = "fonttools-4.60.1-cp311-cp311-win32.whl", hash = "sha256:398447f3d8c0c786cbf1209711e79080a40761eb44b27cdafffb48f52bcec258"}, - {file = "fonttools-4.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:d066ea419f719ed87bc2c99a4a4bfd77c2e5949cb724588b9dd58f3fd90b92bf"}, - {file = "fonttools-4.60.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7b0c6d57ab00dae9529f3faf187f2254ea0aa1e04215cf2f1a8ec277c96661bc"}, - {file = "fonttools-4.60.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:839565cbf14645952d933853e8ade66a463684ed6ed6c9345d0faf1f0e868877"}, - {file = "fonttools-4.60.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8177ec9676ea6e1793c8a084a90b65a9f778771998eb919d05db6d4b1c0b114c"}, - {file = "fonttools-4.60.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:996a4d1834524adbb423385d5a629b868ef9d774670856c63c9a0408a3063401"}, - {file = "fonttools-4.60.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a46b2f450bc79e06ef3b6394f0c68660529ed51692606ad7f953fc2e448bc903"}, - {file = "fonttools-4.60.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ec722ee589e89a89f5b7574f5c45604030aa6ae24cb2c751e2707193b466fed"}, - {file = "fonttools-4.60.1-cp312-cp312-win32.whl", hash = "sha256:b2cf105cee600d2de04ca3cfa1f74f1127f8455b71dbad02b9da6ec266e116d6"}, - {file = "fonttools-4.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:992775c9fbe2cf794786fa0ffca7f09f564ba3499b8fe9f2f80bd7197db60383"}, - {file = "fonttools-4.60.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f68576bb4bbf6060c7ab047b1574a1ebe5c50a17de62830079967b211059ebb"}, - {file = "fonttools-4.60.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:eedacb5c5d22b7097482fa834bda0dafa3d914a4e829ec83cdea2a01f8c813c4"}, - {file = "fonttools-4.60.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b33a7884fabd72bdf5f910d0cf46be50dce86a0362a65cfc746a4168c67eb96c"}, - {file = "fonttools-4.60.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2409d5fb7b55fd70f715e6d34e7a6e4f7511b8ad29a49d6df225ee76da76dd77"}, - {file = "fonttools-4.60.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c8651e0d4b3bdeda6602b85fdc2abbefc1b41e573ecb37b6779c4ca50753a199"}, - {file = "fonttools-4.60.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:145daa14bf24824b677b9357c5e44fd8895c2a8f53596e1b9ea3496081dc692c"}, - {file = "fonttools-4.60.1-cp313-cp313-win32.whl", hash = "sha256:2299df884c11162617a66b7c316957d74a18e3758c0274762d2cc87df7bc0272"}, - {file = "fonttools-4.60.1-cp313-cp313-win_amd64.whl", hash = "sha256:a3db56f153bd4c5c2b619ab02c5db5192e222150ce5a1bc10f16164714bc39ac"}, - {file = "fonttools-4.60.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:a884aef09d45ba1206712c7dbda5829562d3fea7726935d3289d343232ecb0d3"}, - {file = "fonttools-4.60.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8a44788d9d91df72d1a5eac49b31aeb887a5f4aab761b4cffc4196c74907ea85"}, - {file = "fonttools-4.60.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e852d9dda9f93ad3651ae1e3bb770eac544ec93c3807888798eccddf84596537"}, - {file = "fonttools-4.60.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:154cb6ee417e417bf5f7c42fe25858c9140c26f647c7347c06f0cc2d47eff003"}, - {file = "fonttools-4.60.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5664fd1a9ea7f244487ac8f10340c4e37664675e8667d6fee420766e0fb3cf08"}, - {file = "fonttools-4.60.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:583b7f8e3c49486e4d489ad1deacfb8d5be54a8ef34d6df824f6a171f8511d99"}, - {file = "fonttools-4.60.1-cp314-cp314-win32.whl", hash = "sha256:66929e2ea2810c6533a5184f938502cfdaea4bc3efb7130d8cc02e1c1b4108d6"}, - {file = "fonttools-4.60.1-cp314-cp314-win_amd64.whl", hash = "sha256:f3d5be054c461d6a2268831f04091dc82753176f6ea06dc6047a5e168265a987"}, - {file = "fonttools-4.60.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:b6379e7546ba4ae4b18f8ae2b9bc5960936007a1c0e30b342f662577e8bc3299"}, - {file = "fonttools-4.60.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9d0ced62b59e0430b3690dbc5373df1c2aa7585e9a8ce38eff87f0fd993c5b01"}, - {file = "fonttools-4.60.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:875cb7764708b3132637f6c5fb385b16eeba0f7ac9fa45a69d35e09b47045801"}, - {file = "fonttools-4.60.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a184b2ea57b13680ab6d5fbde99ccef152c95c06746cb7718c583abd8f945ccc"}, - {file = "fonttools-4.60.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:026290e4ec76583881763fac284aca67365e0be9f13a7fb137257096114cb3bc"}, - {file = "fonttools-4.60.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f0e8817c7d1a0c2eedebf57ef9a9896f3ea23324769a9a2061a80fe8852705ed"}, - {file = "fonttools-4.60.1-cp314-cp314t-win32.whl", hash = "sha256:1410155d0e764a4615774e5c2c6fc516259fe3eca5882f034eb9bfdbee056259"}, - {file = "fonttools-4.60.1-cp314-cp314t-win_amd64.whl", hash = "sha256:022beaea4b73a70295b688f817ddc24ed3e3418b5036ffcd5658141184ef0d0c"}, - {file = "fonttools-4.60.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:122e1a8ada290423c493491d002f622b1992b1ab0b488c68e31c413390dc7eb2"}, - {file = "fonttools-4.60.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a140761c4ff63d0cb9256ac752f230460ee225ccef4ad8f68affc723c88e2036"}, - {file = "fonttools-4.60.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0eae96373e4b7c9e45d099d7a523444e3554360927225c1cdae221a58a45b856"}, - {file = "fonttools-4.60.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:596ecaca36367027d525b3b426d8a8208169d09edcf8c7506aceb3a38bfb55c7"}, - {file = "fonttools-4.60.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ee06fc57512144d8b0445194c2da9f190f61ad51e230f14836286470c99f854"}, - {file = "fonttools-4.60.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b42d86938e8dda1cd9a1a87a6d82f1818eaf933348429653559a458d027446da"}, - {file = "fonttools-4.60.1-cp39-cp39-win32.whl", hash = "sha256:8b4eb332f9501cb1cd3d4d099374a1e1306783ff95489a1026bde9eb02ccc34a"}, - {file = "fonttools-4.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:7473a8ed9ed09aeaa191301244a5a9dbe46fe0bf54f9d6cd21d83044c3321217"}, - {file = "fonttools-4.60.1-py3-none-any.whl", hash = "sha256:906306ac7afe2156fcf0042173d6ebbb05416af70f6b370967b47f8f00103bbb"}, - {file = "fonttools-4.60.1.tar.gz", hash = "sha256:ef00af0439ebfee806b25f24c8f92109157ff3fac5731dc7867957812e87b8d9"}, + {file = "fonttools-4.61.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dc25a4a9c1225653e4431a9413d0381b1c62317b0f543bdcec24e1991f612f33"}, + {file = "fonttools-4.61.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b493c32d2555e9944ec1b911ea649ff8f01a649ad9cba6c118d6798e932b3f0"}, + {file = "fonttools-4.61.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad751319dc532a79bdf628b8439af167181b4210a0cd28a8935ca615d9fdd727"}, + {file = "fonttools-4.61.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2de14557d113faa5fb519f7f29c3abe4d69c17fe6a5a2595cc8cda7338029219"}, + {file = "fonttools-4.61.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:59587bbe455dbdf75354a9dbca1697a35a8903e01fab4248d6b98a17032cee52"}, + {file = "fonttools-4.61.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:46cb3d9279f758ac0cf671dc3482da877104b65682679f01b246515db03dbb72"}, + {file = "fonttools-4.61.0-cp310-cp310-win32.whl", hash = "sha256:58b4f1b78dfbfe855bb8a6801b31b8cdcca0e2847ec769ad8e0b0b692832dd3b"}, + {file = "fonttools-4.61.0-cp310-cp310-win_amd64.whl", hash = "sha256:68704a8bbe0b61976262b255e90cde593dc0fe3676542d9b4d846bad2a890a76"}, + {file = "fonttools-4.61.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a32a16951cbf113d38f1dd8551b277b6e06e0f6f776fece0f99f746d739e1be3"}, + {file = "fonttools-4.61.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:328a9c227984bebaf69f3ac9062265f8f6acc7ddf2e4e344c63358579af0aa3d"}, + {file = "fonttools-4.61.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f0bafc8a3b3749c69cc610e5aa3da832d39c2a37a68f03d18ec9a02ecaac04a"}, + {file = "fonttools-4.61.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b5ca59b7417d149cf24e4c1933c9f44b2957424fc03536f132346d5242e0ebe5"}, + {file = "fonttools-4.61.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:df8cbce85cf482eb01f4551edca978c719f099c623277bda8332e5dbe7dba09d"}, + {file = "fonttools-4.61.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7fb5b84f48a6a733ca3d7f41aa9551908ccabe8669ffe79586560abcc00a9cfd"}, + {file = "fonttools-4.61.0-cp311-cp311-win32.whl", hash = "sha256:787ef9dfd1ea9fe49573c272412ae5f479d78e671981819538143bec65863865"}, + {file = "fonttools-4.61.0-cp311-cp311-win_amd64.whl", hash = "sha256:14fafda386377b6131d9e448af42d0926bad47e038de0e5ba1d58c25d621f028"}, + {file = "fonttools-4.61.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e24a1565c4e57111ec7f4915f8981ecbb61adf66a55f378fdc00e206059fcfef"}, + {file = "fonttools-4.61.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2bfacb5351303cae9f072ccf3fc6ecb437a6f359c0606bae4b1ab6715201d87"}, + {file = "fonttools-4.61.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0bdcf2e29d65c26299cc3d502f4612365e8b90a939f46cd92d037b6cb7bb544a"}, + {file = "fonttools-4.61.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e6cd0d9051b8ddaf7385f99dd82ec2a058e2b46cf1f1961e68e1ff20fcbb61af"}, + {file = "fonttools-4.61.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e074bc07c31406f45c418e17c1722e83560f181d122c412fa9e815df0ff74810"}, + {file = "fonttools-4.61.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a9b78da5d5faa17e63b2404b77feeae105c1b7e75f26020ab7a27b76e02039f"}, + {file = "fonttools-4.61.0-cp312-cp312-win32.whl", hash = "sha256:9821ed77bb676736b88fa87a737c97b6af06e8109667e625a4f00158540ce044"}, + {file = "fonttools-4.61.0-cp312-cp312-win_amd64.whl", hash = "sha256:0011d640afa61053bc6590f9a3394bd222de7cfde19346588beabac374e9d8ac"}, + {file = "fonttools-4.61.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba774b8cbd8754f54b8eb58124e8bd45f736b2743325ab1a5229698942b9b433"}, + {file = "fonttools-4.61.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c84b430616ed73ce46e9cafd0bf0800e366a3e02fb7e1ad7c1e214dbe3862b1f"}, + {file = "fonttools-4.61.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b2b734d8391afe3c682320840c8191de9bd24e7eb85768dd4dc06ed1b63dbb1b"}, + {file = "fonttools-4.61.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5c5fff72bf31b0e558ed085e4fd7ed96eb85881404ecc39ed2a779e7cf724eb"}, + {file = "fonttools-4.61.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:14a290c5c93fcab76b7f451e6a4b7721b712d90b3b5ed6908f1abcf794e90d6d"}, + {file = "fonttools-4.61.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:13e3e20a5463bfeb77b3557d04b30bd6a96a6bb5c15c7b2e7908903e69d437a0"}, + {file = "fonttools-4.61.0-cp313-cp313-win32.whl", hash = "sha256:6781e7a4bb010be1cd69a29927b0305c86b843395f2613bdabe115f7d6ea7f34"}, + {file = "fonttools-4.61.0-cp313-cp313-win_amd64.whl", hash = "sha256:c53b47834ae41e8e4829171cc44fec0fdf125545a15f6da41776b926b9645a9a"}, + {file = "fonttools-4.61.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:96dfc9bc1f2302224e48e6ee37e656eddbab810b724b52e9d9c13a57a6abad01"}, + {file = "fonttools-4.61.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3b2065d94e5d63aafc2591c8b6ccbdb511001d9619f1bca8ad39b745ebeb5efa"}, + {file = "fonttools-4.61.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e0d87e81e4d869549585ba0beb3f033718501c1095004f5e6aef598d13ebc216"}, + {file = "fonttools-4.61.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cfa2eb9bae650e58f0e8ad53c49d19a844d6034d6b259f30f197238abc1ccee"}, + {file = "fonttools-4.61.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4238120002e68296d55e091411c09eab94e111c8ce64716d17df53fd0eb3bb3d"}, + {file = "fonttools-4.61.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b6ceac262cc62bec01b3bb59abccf41b24ef6580869e306a4e88b7e56bb4bdda"}, + {file = "fonttools-4.61.0-cp314-cp314-win32.whl", hash = "sha256:adbb4ecee1a779469a77377bbe490565effe8fce6fb2e6f95f064de58f8bac85"}, + {file = "fonttools-4.61.0-cp314-cp314-win_amd64.whl", hash = "sha256:02bdf8e04d1a70476564b8640380f04bb4ac74edc1fc71f1bacb840b3e398ee9"}, + {file = "fonttools-4.61.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:627216062d90ab0d98215176d8b9562c4dd5b61271d35f130bcd30f6a8aaa33a"}, + {file = "fonttools-4.61.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:7b446623c9cd5f14a59493818eaa80255eec2468c27d2c01b56e05357c263195"}, + {file = "fonttools-4.61.0-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:70e2a0c0182ee75e493ef33061bfebf140ea57e035481d2f95aa03b66c7a0e05"}, + {file = "fonttools-4.61.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9064b0f55b947e929ac669af5311ab1f26f750214db6dd9a0c97e091e918f486"}, + {file = "fonttools-4.61.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2cb5e45a824ce14b90510024d0d39dae51bd4fbb54c42a9334ea8c8cf4d95cbe"}, + {file = "fonttools-4.61.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6e5ca8c62efdec7972dfdfd454415c4db49b89aeaefaaacada432f3b7eea9866"}, + {file = "fonttools-4.61.0-cp314-cp314t-win32.whl", hash = "sha256:63c7125d31abe3e61d7bb917329b5543c5b3448db95f24081a13aaf064360fc8"}, + {file = "fonttools-4.61.0-cp314-cp314t-win_amd64.whl", hash = "sha256:67d841aa272be5500de7f447c40d1d8452783af33b4c3599899319f6ef9ad3c1"}, + {file = "fonttools-4.61.0-py3-none-any.whl", hash = "sha256:276f14c560e6f98d24ef7f5f44438e55ff5a67f78fa85236b218462c9f5d0635"}, + {file = "fonttools-4.61.0.tar.gz", hash = "sha256:ec520a1f0c7758d7a858a00f090c1745f6cde6a7c5e76fb70ea4044a15f712e7"}, ] [package.extras] -all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"] +all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.45.0)", "unicodedata2 (>=17.0.0) ; python_version <= \"3.14\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""] lxml = ["lxml (>=4.0)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] -repacker = ["uharfbuzz (>=0.23.0)"] +repacker = ["uharfbuzz (>=0.45.0)"] symfont = ["sympy"] type1 = ["xattr ; sys_platform == \"darwin\""] -unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""] +unicode = ["unicodedata2 (>=17.0.0) ; python_version <= \"3.14\""] woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"] [[package]] @@ -752,14 +766,14 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve [[package]] name = "idna" -version = "3.10" +version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, ] [package.extras] @@ -767,14 +781,14 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 [[package]] name = "iniconfig" -version = "2.1.0" +version = "2.3.0" description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, - {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, ] [[package]] @@ -1186,21 +1200,21 @@ files = [ [[package]] name = "narwhals" -version = "2.7.0" +version = "2.12.0" description = "Extremely lightweight compatibility layer between dataframe libraries" optional = true python-versions = ">=3.9" groups = ["main"] markers = "extra == \"plot\"" files = [ - {file = "narwhals-2.7.0-py3-none-any.whl", hash = "sha256:010791aa0cee86d90bf2b658264aaec3eeea34fb4ddf2e83746ea4940bcffae3"}, - {file = "narwhals-2.7.0.tar.gz", hash = "sha256:e3fff7f1610fd3318ede78c969bc5954ce710d585eefdb689586fb69da3da43c"}, + {file = "narwhals-2.12.0-py3-none-any.whl", hash = "sha256:baeba5d448a30b04c299a696bd9ee5ff73e4742143e06c49ca316b46539a7cbb"}, + {file = "narwhals-2.12.0.tar.gz", hash = "sha256:075b6d56f3a222613793e025744b129439ecdff9292ea6615dd983af7ba6ea44"}, ] [package.extras] cudf = ["cudf (>=24.10.0)"] dask = ["dask[dataframe] (>=2024.8)"] -duckdb = ["duckdb (>=1.0)"] +duckdb = ["duckdb (>=1.1)"] ibis = ["ibis-framework (>=6.0.0)", "packaging", "pyarrow-hotfix", "rich"] modin = ["modin"] pandas = ["pandas (>=1.1.3)"] @@ -1278,87 +1292,87 @@ files = [ [[package]] name = "numpy" -version = "2.3.3" +version = "2.3.5" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.11" groups = ["main"] markers = "python_version >= \"3.11\"" files = [ - {file = "numpy-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ffc4f5caba7dfcbe944ed674b7eef683c7e94874046454bb79ed7ee0236f59d"}, - {file = "numpy-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7e946c7170858a0295f79a60214424caac2ffdb0063d4d79cb681f9aa0aa569"}, - {file = "numpy-2.3.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cd4260f64bc794c3390a63bf0728220dd1a68170c169088a1e0dfa2fde1be12f"}, - {file = "numpy-2.3.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f0ddb4b96a87b6728df9362135e764eac3cfa674499943ebc44ce96c478ab125"}, - {file = "numpy-2.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afd07d377f478344ec6ca2b8d4ca08ae8bd44706763d1efb56397de606393f48"}, - {file = "numpy-2.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc92a5dedcc53857249ca51ef29f5e5f2f8c513e22cfb90faeb20343b8c6f7a6"}, - {file = "numpy-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7af05ed4dc19f308e1d9fc759f36f21921eb7bbfc82843eeec6b2a2863a0aefa"}, - {file = "numpy-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433bf137e338677cebdd5beac0199ac84712ad9d630b74eceeb759eaa45ddf30"}, - {file = "numpy-2.3.3-cp311-cp311-win32.whl", hash = "sha256:eb63d443d7b4ffd1e873f8155260d7f58e7e4b095961b01c91062935c2491e57"}, - {file = "numpy-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:ec9d249840f6a565f58d8f913bccac2444235025bbb13e9a4681783572ee3caa"}, - {file = "numpy-2.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:74c2a948d02f88c11a3c075d9733f1ae67d97c6bdb97f2bb542f980458b257e7"}, - {file = "numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf"}, - {file = "numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25"}, - {file = "numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe"}, - {file = "numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b"}, - {file = "numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8"}, - {file = "numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20"}, - {file = "numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea"}, - {file = "numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7"}, - {file = "numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf"}, - {file = "numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb"}, - {file = "numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5"}, - {file = "numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf"}, - {file = "numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7"}, - {file = "numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6"}, - {file = "numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7"}, - {file = "numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c"}, - {file = "numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93"}, - {file = "numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae"}, - {file = "numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86"}, - {file = "numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8"}, - {file = "numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf"}, - {file = "numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5"}, - {file = "numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc"}, - {file = "numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc"}, - {file = "numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b"}, - {file = "numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19"}, - {file = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30"}, - {file = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e"}, - {file = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3"}, - {file = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea"}, - {file = "numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd"}, - {file = "numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d"}, - {file = "numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1"}, - {file = "numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593"}, - {file = "numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652"}, - {file = "numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7"}, - {file = "numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a"}, - {file = "numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe"}, - {file = "numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421"}, - {file = "numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021"}, - {file = "numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf"}, - {file = "numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0"}, - {file = "numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8"}, - {file = "numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe"}, - {file = "numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00"}, - {file = "numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a"}, - {file = "numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d"}, - {file = "numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a"}, - {file = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54"}, - {file = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e"}, - {file = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097"}, - {file = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970"}, - {file = "numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5"}, - {file = "numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f"}, - {file = "numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b"}, - {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1e02c7159791cd481e1e6d5ddd766b62a4d5acf8df4d4d1afe35ee9c5c33a41e"}, - {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:dca2d0fc80b3893ae72197b39f69d55a3cd8b17ea1b50aa4c62de82419936150"}, - {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:99683cbe0658f8271b333a1b1b4bb3173750ad59c0c61f5bbdc5b318918fffe3"}, - {file = "numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d9d537a39cc9de668e5cd0e25affb17aec17b577c6b3ae8a3d866b479fbe88d0"}, - {file = "numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8596ba2f8af5f93b01d97563832686d20206d303024777f6dfc2e7c7c3f1850e"}, - {file = "numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1ec5615b05369925bd1125f27df33f3b6c8bc10d788d5999ecd8769a1fa04db"}, - {file = "numpy-2.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2e267c7da5bf7309670523896df97f93f6e469fb931161f483cd6882b3b1a5dc"}, - {file = "numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029"}, + {file = "numpy-2.3.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de5672f4a7b200c15a4127042170a694d4df43c992948f5e1af57f0174beed10"}, + {file = "numpy-2.3.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:acfd89508504a19ed06ef963ad544ec6664518c863436306153e13e94605c218"}, + {file = "numpy-2.3.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:ffe22d2b05504f786c867c8395de703937f934272eb67586817b46188b4ded6d"}, + {file = "numpy-2.3.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:872a5cf366aec6bb1147336480fef14c9164b154aeb6542327de4970282cd2f5"}, + {file = "numpy-2.3.5-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3095bdb8dd297e5920b010e96134ed91d852d81d490e787beca7e35ae1d89cf7"}, + {file = "numpy-2.3.5-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cba086a43d54ca804ce711b2a940b16e452807acebe7852ff327f1ecd49b0d4"}, + {file = "numpy-2.3.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6cf9b429b21df6b99f4dee7a1218b8b7ffbbe7df8764dc0bd60ce8a0708fed1e"}, + {file = "numpy-2.3.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:396084a36abdb603546b119d96528c2f6263921c50df3c8fd7cb28873a237748"}, + {file = "numpy-2.3.5-cp311-cp311-win32.whl", hash = "sha256:b0c7088a73aef3d687c4deef8452a3ac7c1be4e29ed8bf3b366c8111128ac60c"}, + {file = "numpy-2.3.5-cp311-cp311-win_amd64.whl", hash = "sha256:a414504bef8945eae5f2d7cb7be2d4af77c5d1cb5e20b296c2c25b61dff2900c"}, + {file = "numpy-2.3.5-cp311-cp311-win_arm64.whl", hash = "sha256:0cd00b7b36e35398fa2d16af7b907b65304ef8bb4817a550e06e5012929830fa"}, + {file = "numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e"}, + {file = "numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769"}, + {file = "numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5"}, + {file = "numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4"}, + {file = "numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d"}, + {file = "numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28"}, + {file = "numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b"}, + {file = "numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c"}, + {file = "numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952"}, + {file = "numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa"}, + {file = "numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013"}, + {file = "numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff"}, + {file = "numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188"}, + {file = "numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0"}, + {file = "numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903"}, + {file = "numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d"}, + {file = "numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017"}, + {file = "numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf"}, + {file = "numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce"}, + {file = "numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e"}, + {file = "numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b"}, + {file = "numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae"}, + {file = "numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd"}, + {file = "numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f"}, + {file = "numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a"}, + {file = "numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139"}, + {file = "numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e"}, + {file = "numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9"}, + {file = "numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946"}, + {file = "numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1"}, + {file = "numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3"}, + {file = "numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234"}, + {file = "numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7"}, + {file = "numpy-2.3.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:bf06bc2af43fa8d32d30fae16ad965663e966b1a3202ed407b84c989c3221e82"}, + {file = "numpy-2.3.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:052e8c42e0c49d2575621c158934920524f6c5da05a1d3b9bab5d8e259e045f0"}, + {file = "numpy-2.3.5-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:1ed1ec893cff7040a02c8aa1c8611b94d395590d553f6b53629a4461dc7f7b63"}, + {file = "numpy-2.3.5-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:2dcd0808a421a482a080f89859a18beb0b3d1e905b81e617a188bd80422d62e9"}, + {file = "numpy-2.3.5-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:727fd05b57df37dc0bcf1a27767a3d9a78cbbc92822445f32cc3436ba797337b"}, + {file = "numpy-2.3.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fffe29a1ef00883599d1dc2c51aa2e5d80afe49523c261a74933df395c15c520"}, + {file = "numpy-2.3.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f7f0e05112916223d3f438f293abf0727e1181b5983f413dfa2fefc4098245c"}, + {file = "numpy-2.3.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2e2eb32ddb9ccb817d620ac1d8dae7c3f641c1e5f55f531a33e8ab97960a75b8"}, + {file = "numpy-2.3.5-cp314-cp314-win32.whl", hash = "sha256:66f85ce62c70b843bab1fb14a05d5737741e74e28c7b8b5a064de10142fad248"}, + {file = "numpy-2.3.5-cp314-cp314-win_amd64.whl", hash = "sha256:e6a0bc88393d65807d751a614207b7129a310ca4fe76a74e5c7da5fa5671417e"}, + {file = "numpy-2.3.5-cp314-cp314-win_arm64.whl", hash = "sha256:aeffcab3d4b43712bb7a60b65f6044d444e75e563ff6180af8f98dd4b905dfd2"}, + {file = "numpy-2.3.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:17531366a2e3a9e30762c000f2c43a9aaa05728712e25c11ce1dbe700c53ad41"}, + {file = "numpy-2.3.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d21644de1b609825ede2f48be98dfde4656aefc713654eeee280e37cadc4e0ad"}, + {file = "numpy-2.3.5-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:c804e3a5aba5460c73955c955bdbd5c08c354954e9270a2c1565f62e866bdc39"}, + {file = "numpy-2.3.5-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:cc0a57f895b96ec78969c34f682c602bf8da1a0270b09bc65673df2e7638ec20"}, + {file = "numpy-2.3.5-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:900218e456384ea676e24ea6a0417f030a3b07306d29d7ad843957b40a9d8d52"}, + {file = "numpy-2.3.5-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a1bea522b25109bf8e6f3027bd810f7c1085c64a0c7ce050c1676ad0ba010b"}, + {file = "numpy-2.3.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04822c00b5fd0323c8166d66c701dc31b7fbd252c100acd708c48f763968d6a3"}, + {file = "numpy-2.3.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d6889ec4ec662a1a37eb4b4fb26b6100841804dac55bd9df579e326cdc146227"}, + {file = "numpy-2.3.5-cp314-cp314t-win32.whl", hash = "sha256:93eebbcf1aafdf7e2ddd44c2923e2672e1010bddc014138b229e49725b4d6be5"}, + {file = "numpy-2.3.5-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a9958e88b65c3b27e22ca2a076311636850b612d6bbfb76e8d156aacde2aaf"}, + {file = "numpy-2.3.5-cp314-cp314t-win_arm64.whl", hash = "sha256:6203fdf9f3dc5bdaed7319ad8698e685c7a3be10819f41d32a0723e611733b42"}, + {file = "numpy-2.3.5-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f0963b55cdd70fad460fa4c1341f12f976bb26cb66021a5580329bd498988310"}, + {file = "numpy-2.3.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f4255143f5160d0de972d28c8f9665d882b5f61309d8362fdd3e103cf7bf010c"}, + {file = "numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:a4b9159734b326535f4dd01d947f919c6eefd2d9827466a696c44ced82dfbc18"}, + {file = "numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2feae0d2c91d46e59fcd62784a3a83b3fb677fead592ce51b5a6fbb4f95965ff"}, + {file = "numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ffac52f28a7849ad7576293c0cb7b9f08304e8f7d738a8cb8a90ec4c55a998eb"}, + {file = "numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63c0e9e7eea69588479ebf4a8a270d5ac22763cc5854e9a7eae952a3908103f7"}, + {file = "numpy-2.3.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f16417ec91f12f814b10bafe79ef77e70113a2f5f7018640e7425ff979253425"}, + {file = "numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0"}, ] [[package]] @@ -1476,141 +1490,125 @@ xml = ["lxml (>=4.9.2)"] [[package]] name = "pillow" -version = "11.3.0" -description = "Python Imaging Library (Fork)" +version = "12.0.0" +description = "Python Imaging Library (fork)" optional = true -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] markers = "extra == \"plot\"" files = [ - {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}, - {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}, - {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}, - {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}, - {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}, - {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}, - {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}, - {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"}, - {file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"}, - {file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"}, - {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}, - {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}, - {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}, - {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}, - {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}, - {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}, - {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}, - {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}, - {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"}, - {file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"}, - {file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"}, - {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}, - {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"}, - {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"}, - {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"}, - {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"}, - {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"}, - {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"}, - {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"}, - {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"}, - {file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"}, - {file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"}, - {file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"}, - {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"}, - {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"}, - {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"}, - {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"}, - {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"}, - {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"}, - {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"}, - {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"}, - {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"}, - {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"}, - {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"}, - {file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"}, - {file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"}, - {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"}, - {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"}, - {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"}, - {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"}, - {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"}, - {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"}, - {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"}, - {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"}, - {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"}, - {file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"}, - {file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"}, - {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"}, - {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"}, - {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"}, - {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"}, - {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"}, - {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"}, - {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"}, - {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"}, - {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"}, - {file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"}, - {file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"}, - {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"}, - {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"}, - {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"}, - {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"}, - {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"}, - {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"}, - {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"}, - {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"}, - {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"}, - {file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"}, - {file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"}, - {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"}, - {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"}, - {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"}, - {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"}, - {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"}, - {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"}, - {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"}, - {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"}, - {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"}, - {file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"}, - {file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"}, - {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"}, - {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}, - {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}, - {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}, - {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}, - {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}, - {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}, - {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}, - {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}, - {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}, - {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}, - {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}, - {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}, - {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}, - {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}, - {file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"}, + {file = "pillow-12.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:3adfb466bbc544b926d50fe8f4a4e6abd8c6bffd28a26177594e6e9b2b76572b"}, + {file = "pillow-12.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ac11e8ea4f611c3c0147424eae514028b5e9077dd99ab91e1bd7bc33ff145e1"}, + {file = "pillow-12.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d49e2314c373f4c2b39446fb1a45ed333c850e09d0c59ac79b72eb3b95397363"}, + {file = "pillow-12.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c7b2a63fd6d5246349f3d3f37b14430d73ee7e8173154461785e43036ffa96ca"}, + {file = "pillow-12.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d64317d2587c70324b79861babb9c09f71fbb780bad212018874b2c013d8600e"}, + {file = "pillow-12.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d77153e14b709fd8b8af6f66a3afbb9ed6e9fc5ccf0b6b7e1ced7b036a228782"}, + {file = "pillow-12.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32ed80ea8a90ee3e6fa08c21e2e091bba6eda8eccc83dbc34c95169507a91f10"}, + {file = "pillow-12.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c828a1ae702fc712978bda0320ba1b9893d99be0badf2647f693cc01cf0f04fa"}, + {file = "pillow-12.0.0-cp310-cp310-win32.whl", hash = "sha256:bd87e140e45399c818fac4247880b9ce719e4783d767e030a883a970be632275"}, + {file = "pillow-12.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:455247ac8a4cfb7b9bc45b7e432d10421aea9fc2e74d285ba4072688a74c2e9d"}, + {file = "pillow-12.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:6ace95230bfb7cd79ef66caa064bbe2f2a1e63d93471c3a2e1f1348d9f22d6b7"}, + {file = "pillow-12.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0fd00cac9c03256c8b2ff58f162ebcd2587ad3e1f2e397eab718c47e24d231cc"}, + {file = "pillow-12.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3475b96f5908b3b16c47533daaa87380c491357d197564e0ba34ae75c0f3257"}, + {file = "pillow-12.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:110486b79f2d112cf6add83b28b627e369219388f64ef2f960fef9ebaf54c642"}, + {file = "pillow-12.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5269cc1caeedb67e6f7269a42014f381f45e2e7cd42d834ede3c703a1d915fe3"}, + {file = "pillow-12.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa5129de4e174daccbc59d0a3b6d20eaf24417d59851c07ebb37aeb02947987c"}, + {file = "pillow-12.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bee2a6db3a7242ea309aa7ee8e2780726fed67ff4e5b40169f2c940e7eb09227"}, + {file = "pillow-12.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:90387104ee8400a7b4598253b4c406f8958f59fcf983a6cea2b50d59f7d63d0b"}, + {file = "pillow-12.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc91a56697869546d1b8f0a3ff35224557ae7f881050e99f615e0119bf934b4e"}, + {file = "pillow-12.0.0-cp311-cp311-win32.whl", hash = "sha256:27f95b12453d165099c84f8a8bfdfd46b9e4bda9e0e4b65f0635430027f55739"}, + {file = "pillow-12.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b583dc9070312190192631373c6c8ed277254aa6e6084b74bdd0a6d3b221608e"}, + {file = "pillow-12.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:759de84a33be3b178a64c8ba28ad5c135900359e85fb662bc6e403ad4407791d"}, + {file = "pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371"}, + {file = "pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082"}, + {file = "pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f"}, + {file = "pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d"}, + {file = "pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953"}, + {file = "pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8"}, + {file = "pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79"}, + {file = "pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba"}, + {file = "pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0"}, + {file = "pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a"}, + {file = "pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad"}, + {file = "pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643"}, + {file = "pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4"}, + {file = "pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399"}, + {file = "pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5"}, + {file = "pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b"}, + {file = "pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3"}, + {file = "pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07"}, + {file = "pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e"}, + {file = "pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344"}, + {file = "pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27"}, + {file = "pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79"}, + {file = "pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098"}, + {file = "pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905"}, + {file = "pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a"}, + {file = "pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3"}, + {file = "pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced"}, + {file = "pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b"}, + {file = "pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d"}, + {file = "pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a"}, + {file = "pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe"}, + {file = "pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee"}, + {file = "pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef"}, + {file = "pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9"}, + {file = "pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b"}, + {file = "pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47"}, + {file = "pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9"}, + {file = "pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2"}, + {file = "pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a"}, + {file = "pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b"}, + {file = "pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad"}, + {file = "pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01"}, + {file = "pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c"}, + {file = "pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e"}, + {file = "pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e"}, + {file = "pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9"}, + {file = "pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab"}, + {file = "pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b"}, + {file = "pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b"}, + {file = "pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0"}, + {file = "pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6"}, + {file = "pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6"}, + {file = "pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1"}, + {file = "pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e"}, + {file = "pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca"}, + {file = "pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925"}, + {file = "pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8"}, + {file = "pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4"}, + {file = "pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52"}, + {file = "pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a"}, + {file = "pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b22bd8c974942477156be55a768f7aa37c46904c175be4e158b6a86e3a6b7ca8"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:805ebf596939e48dbb2e4922a1d3852cfc25c38160751ce02da93058b48d252a"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cae81479f77420d217def5f54b5b9d279804d17e982e0f2fa19b1d1e14ab5197"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aeaefa96c768fc66818730b952a862235d68825c178f1b3ffd4efd7ad2edcb7c"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f2d0abef9e4e2f349305a4f8cc784a8a6c2f58a8c4892eea13b10a943bd26e"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bdee52571a343d721fb2eb3b090a82d959ff37fc631e3f70422e0c2e029f3e76"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b290fd8aa38422444d4b50d579de197557f182ef1068b75f5aa8558638b8d0a5"}, + {file = "pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353"}, ] [package.extras] docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] -test-arrow = ["pyarrow"] -tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"] -typing = ["typing-extensions ; python_version < \"3.10\""] +test-arrow = ["arro3-compute", "arro3-core", "nanoarrow", "pyarrow"] +tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma (>=5)", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"] xmp = ["defusedxml"] [[package]] name = "plotly" -version = "6.3.1" +version = "6.5.0" description = "An open-source interactive data visualization library for Python" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"plot\"" files = [ - {file = "plotly-6.3.1-py3-none-any.whl", hash = "sha256:8b4420d1dcf2b040f5983eed433f95732ed24930e496d36eb70d211923532e64"}, - {file = "plotly-6.3.1.tar.gz", hash = "sha256:dd896e3d940e653a7ce0470087e82c2bd903969a55e30d1b01bb389319461bb0"}, + {file = "plotly-6.5.0-py3-none-any.whl", hash = "sha256:5ac851e100367735250206788a2b1325412aa4a4917a4fe3e6f0bc5aa6f3d90a"}, + {file = "plotly-6.5.0.tar.gz", hash = "sha256:d5d38224883fd38c1409bef7d6a8dc32b74348d39313f3c52ca998b8e447f5c8"}, ] [package.dependencies] @@ -1623,7 +1621,7 @@ dev-build = ["build", "jupyter", "plotly[dev-core]"] dev-core = ["pytest", "requests", "ruff (==0.11.12)"] dev-optional = ["anywidget", "colorcet", "fiona (<=1.9.6) ; python_version <= \"3.8\"", "geopandas", "inflect", "numpy", "orjson", "pandas", "pdfrw", "pillow", "plotly-geo", "plotly[dev-build]", "plotly[kaleido]", "polars[timezone]", "pyarrow", "pyshp", "pytz", "scikit-image", "scipy", "shapely", "statsmodels", "vaex ; python_version <= \"3.9\"", "xarray"] express = ["numpy"] -kaleido = ["kaleido (>=1.0.0)"] +kaleido = ["kaleido (>=1.1.0)"] [[package]] name = "pluggy" @@ -1686,19 +1684,19 @@ files = [ [[package]] name = "pydantic" -version = "2.12.0" +version = "2.12.5" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic-2.12.0-py3-none-any.whl", hash = "sha256:f6a1da352d42790537e95e83a8bdfb91c7efbae63ffd0b86fa823899e807116f"}, - {file = "pydantic-2.12.0.tar.gz", hash = "sha256:c1a077e6270dbfb37bfd8b498b3981e2bb18f68103720e51fa6c306a5a9af563"}, + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.41.1" +pydantic-core = "2.41.5" typing-extensions = ">=4.14.1" typing-inspection = ">=0.4.2" @@ -1708,125 +1706,133 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows [[package]] name = "pydantic-core" -version = "2.41.1" +version = "2.41.5" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic_core-2.41.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e63036298322e9aea1c8b7c0a6c1204d615dbf6ec0668ce5b83ff27f07404a61"}, - {file = "pydantic_core-2.41.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:241299ca91fc77ef64f11ed909d2d9220a01834e8e6f8de61275c4dd16b7c936"}, - {file = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab7e594a2a5c24ab8013a7dc8cfe5f2260e80e490685814122081705c2cf2b0"}, - {file = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b054ef1a78519cb934b58e9c90c09e93b837c935dcd907b891f2b265b129eb6e"}, - {file = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2ab7d10d0ab2ed6da54c757233eb0f48ebfb4f86e9b88ccecb3f92bbd61a538"}, - {file = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2757606b7948bb853a27e4040820306eaa0ccb9e8f9f8a0fa40cb674e170f350"}, - {file = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cec0e75eb61f606bad0a32f2be87507087514e26e8c73db6cbdb8371ccd27917"}, - {file = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0234236514f44a5bf552105cfe2543a12f48203397d9d0f866affa569345a5b5"}, - {file = "pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1b974e41adfbb4ebb0f65fc4ca951347b17463d60893ba7d5f7b9bb087c83897"}, - {file = "pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:248dafb3204136113c383e91a4d815269f51562b6659b756cf3df14eefc7d0bb"}, - {file = "pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:678f9d76a91d6bcedd7568bbf6beb77ae8447f85d1aeebaab7e2f0829cfc3a13"}, - {file = "pydantic_core-2.41.1-cp310-cp310-win32.whl", hash = "sha256:dff5bee1d21ee58277900692a641925d2dddfde65182c972569b1a276d2ac8fb"}, - {file = "pydantic_core-2.41.1-cp310-cp310-win_amd64.whl", hash = "sha256:5042da12e5d97d215f91567110fdfa2e2595a25f17c19b9ff024f31c34f9b53e"}, - {file = "pydantic_core-2.41.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4f276a6134fe1fc1daa692642a3eaa2b7b858599c49a7610816388f5e37566a1"}, - {file = "pydantic_core-2.41.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07588570a805296ece009c59d9a679dc08fab72fb337365afb4f3a14cfbfc176"}, - {file = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28527e4b53400cd60ffbd9812ccb2b5135d042129716d71afd7e45bf42b855c0"}, - {file = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46a1c935c9228bad738c8a41de06478770927baedf581d172494ab36a6b96575"}, - {file = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:447ddf56e2b7d28d200d3e9eafa936fe40485744b5a824b67039937580b3cb20"}, - {file = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63892ead40c1160ac860b5debcc95c95c5a0035e543a8b5a4eac70dd22e995f4"}, - {file = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4a9543ca355e6df8fbe9c83e9faab707701e9103ae857ecb40f1c0cf8b0e94d"}, - {file = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2611bdb694116c31e551ed82e20e39a90bea9b7ad9e54aaf2d045ad621aa7a1"}, - {file = "pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fecc130893a9b5f7bfe230be1bb8c61fe66a19db8ab704f808cb25a82aad0bc9"}, - {file = "pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:1e2df5f8344c99b6ea5219f00fdc8950b8e6f2c422fbc1cc122ec8641fac85a1"}, - {file = "pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:35291331e9d8ed94c257bab6be1cb3a380b5eee570a2784bffc055e18040a2ea"}, - {file = "pydantic_core-2.41.1-cp311-cp311-win32.whl", hash = "sha256:2876a095292668d753f1a868c4a57c4ac9f6acbd8edda8debe4218d5848cf42f"}, - {file = "pydantic_core-2.41.1-cp311-cp311-win_amd64.whl", hash = "sha256:b92d6c628e9a338846a28dfe3fcdc1a3279388624597898b105e078cdfc59298"}, - {file = "pydantic_core-2.41.1-cp311-cp311-win_arm64.whl", hash = "sha256:7d82ae99409eb69d507a89835488fb657faa03ff9968a9379567b0d2e2e56bc5"}, - {file = "pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4"}, - {file = "pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601"}, - {file = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00"}, - {file = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab0adafdf2b89c8b84f847780a119437a0931eca469f7b44d356f2b426dd9741"}, - {file = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5da98cc81873f39fd56882e1569c4677940fbc12bce6213fad1ead784192d7c8"}, - {file = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:209910e88afb01fd0fd403947b809ba8dba0e08a095e1f703294fda0a8fdca51"}, - {file = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365109d1165d78d98e33c5bfd815a9b5d7d070f578caefaabcc5771825b4ecb5"}, - {file = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:706abf21e60a2857acdb09502bc853ee5bce732955e7b723b10311114f033115"}, - {file = "pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bf0bd5417acf7f6a7ec3b53f2109f587be176cb35f9cf016da87e6017437a72d"}, - {file = "pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:2e71b1c6ceb9c78424ae9f63a07292fb769fb890a4e7efca5554c47f33a60ea5"}, - {file = "pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80745b9770b4a38c25015b517451c817799bfb9d6499b0d13d8227ec941cb513"}, - {file = "pydantic_core-2.41.1-cp312-cp312-win32.whl", hash = "sha256:83b64d70520e7890453f1aa21d66fda44e7b35f1cfea95adf7b4289a51e2b479"}, - {file = "pydantic_core-2.41.1-cp312-cp312-win_amd64.whl", hash = "sha256:377defd66ee2003748ee93c52bcef2d14fde48fe28a0b156f88c3dbf9bc49a50"}, - {file = "pydantic_core-2.41.1-cp312-cp312-win_arm64.whl", hash = "sha256:c95caff279d49c1d6cdfe2996e6c2ad712571d3b9caaa209a404426c326c4bde"}, - {file = "pydantic_core-2.41.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70e790fce5f05204ef4403159857bfcd587779da78627b0babb3654f75361ebf"}, - {file = "pydantic_core-2.41.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9cebf1ca35f10930612d60bd0f78adfacee824c30a880e3534ba02c207cceceb"}, - {file = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:170406a37a5bc82c22c3274616bf6f17cc7df9c4a0a0a50449e559cb755db669"}, - {file = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12d4257fc9187a0ccd41b8b327d6a4e57281ab75e11dda66a9148ef2e1fb712f"}, - {file = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a75a33b4db105dd1c8d57839e17ee12db8d5ad18209e792fa325dbb4baeb00f4"}, - {file = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a589f850803a74e0fcb16a72081cafb0d72a3cdda500106942b07e76b7bf62"}, - {file = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97939d6ea44763c456bd8a617ceada2c9b96bb5b8ab3dfa0d0827df7619014"}, - {file = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae423c65c556f09569524b80ffd11babff61f33055ef9773d7c9fabc11ed8d"}, - {file = "pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:4dc703015fbf8764d6a8001c327a87f1823b7328d40b47ce6000c65918ad2b4f"}, - {file = "pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:968e4ffdfd35698a5fe659e5e44c508b53664870a8e61c8f9d24d3d145d30257"}, - {file = "pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:fff2b76c8e172d34771cd4d4f0ade08072385310f214f823b5a6ad4006890d32"}, - {file = "pydantic_core-2.41.1-cp313-cp313-win32.whl", hash = "sha256:a38a5263185407ceb599f2f035faf4589d57e73c7146d64f10577f6449e8171d"}, - {file = "pydantic_core-2.41.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42ae7fd6760782c975897e1fdc810f483b021b32245b0105d40f6e7a3803e4b"}, - {file = "pydantic_core-2.41.1-cp313-cp313-win_arm64.whl", hash = "sha256:ad4111acc63b7384e205c27a2f15e23ac0ee21a9d77ad6f2e9cb516ec90965fb"}, - {file = "pydantic_core-2.41.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:440d0df7415b50084a4ba9d870480c16c5f67c0d1d4d5119e3f70925533a0edc"}, - {file = "pydantic_core-2.41.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71eaa38d342099405dae6484216dcf1e8e4b0bebd9b44a4e08c9b43db6a2ab67"}, - {file = "pydantic_core-2.41.1-cp313-cp313t-win_amd64.whl", hash = "sha256:555ecf7e50f1161d3f693bc49f23c82cf6cdeafc71fa37a06120772a09a38795"}, - {file = "pydantic_core-2.41.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:05226894a26f6f27e1deb735d7308f74ef5fa3a6de3e0135bb66cdcaee88f64b"}, - {file = "pydantic_core-2.41.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:85ff7911c6c3e2fd8d3779c50925f6406d770ea58ea6dde9c230d35b52b16b4a"}, - {file = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47f1f642a205687d59b52dc1a9a607f45e588f5a2e9eeae05edd80c7a8c47674"}, - {file = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df11c24e138876ace5ec6043e5cae925e34cf38af1a1b3d63589e8f7b5f5cdc4"}, - {file = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f0bf7f5c8f7bf345c527e8a0d72d6b26eda99c1227b0c34e7e59e181260de31"}, - {file = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82b887a711d341c2c47352375d73b029418f55b20bd7815446d175a70effa706"}, - {file = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5f1d5d6bbba484bdf220c72d8ecd0be460f4bd4c5e534a541bb2cd57589fb8b"}, - {file = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bf1917385ebe0f968dc5c6ab1375886d56992b93ddfe6bf52bff575d03662be"}, - {file = "pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:4f94f3ab188f44b9a73f7295663f3ecb8f2e2dd03a69c8f2ead50d37785ecb04"}, - {file = "pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:3925446673641d37c30bd84a9d597e49f72eacee8b43322c8999fa17d5ae5bc4"}, - {file = "pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:49bd51cc27adb980c7b97357ae036ce9b3c4d0bb406e84fbe16fb2d368b602a8"}, - {file = "pydantic_core-2.41.1-cp314-cp314-win32.whl", hash = "sha256:a31ca0cd0e4d12ea0df0077df2d487fc3eb9d7f96bbb13c3c5b88dcc21d05159"}, - {file = "pydantic_core-2.41.1-cp314-cp314-win_amd64.whl", hash = "sha256:1b5c4374a152e10a22175d7790e644fbd8ff58418890e07e2073ff9d4414efae"}, - {file = "pydantic_core-2.41.1-cp314-cp314-win_arm64.whl", hash = "sha256:4fee76d757639b493eb600fba668f1e17475af34c17dd61db7a47e824d464ca9"}, - {file = "pydantic_core-2.41.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f9b9c968cfe5cd576fdd7361f47f27adeb120517e637d1b189eea1c3ece573f4"}, - {file = "pydantic_core-2.41.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ebc7ab67b856384aba09ed74e3e977dded40e693de18a4f197c67d0d4e6d8e"}, - {file = "pydantic_core-2.41.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8ae0dc57b62a762985bc7fbf636be3412394acc0ddb4ade07fe104230f1b9762"}, - {file = "pydantic_core-2.41.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:10ce489cf09a4956a1549af839b983edc59b0f60e1b068c21b10154e58f54f80"}, - {file = "pydantic_core-2.41.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff548c908caffd9455fd1342366bcf8a1ec8a3fca42f35c7fc60883d6a901074"}, - {file = "pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d43bf082025082bda13be89a5f876cc2386b7727c7b322be2d2b706a45cea8e"}, - {file = "pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:666aee751faf1c6864b2db795775dd67b61fdcf646abefa309ed1da039a97209"}, - {file = "pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b83aaeff0d7bde852c32e856f3ee410842ebc08bc55c510771d87dcd1c01e1ed"}, - {file = "pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:055c7931b0329cb8acde20cdde6d9c2cbc2a02a0a8e54a792cddd91e2ea92c65"}, - {file = "pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530bbb1347e3e5ca13a91ac087c4971d7da09630ef8febd27a20a10800c2d06d"}, - {file = "pydantic_core-2.41.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65a0ea16cfea7bfa9e43604c8bd726e63a3788b61c384c37664b55209fcb1d74"}, - {file = "pydantic_core-2.41.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8fa93fadff794c6d15c345c560513b160197342275c6d104cc879f932b978afc"}, - {file = "pydantic_core-2.41.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c8a1af9ac51969a494c6a82b563abae6859dc082d3b999e8fa7ba5ee1b05e8e8"}, - {file = "pydantic_core-2.41.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:30edab28829703f876897c9471a857e43d847b8799c3c9e2fbce644724b50aa4"}, - {file = "pydantic_core-2.41.1-cp39-cp39-win32.whl", hash = "sha256:84d0ff869f98be2e93efdf1ae31e5a15f0926d22af8677d51676e373abbfe57a"}, - {file = "pydantic_core-2.41.1-cp39-cp39-win_amd64.whl", hash = "sha256:b5674314987cdde5a5511b029fa5fb1556b3d147a367e01dd583b19cfa8e35df"}, - {file = "pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:68f2251559b8efa99041bb63571ec7cdd2d715ba74cc82b3bc9eff824ebc8bf0"}, - {file = "pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:c7bc140c596097cb53b30546ca257dbe3f19282283190b1b5142928e5d5d3a20"}, - {file = "pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2896510fce8f4725ec518f8b9d7f015a00db249d2fd40788f442af303480063d"}, - {file = "pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ced20e62cfa0f496ba68fa5d6c7ee71114ea67e2a5da3114d6450d7f4683572a"}, - {file = "pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b04fa9ed049461a7398138c604b00550bc89e3e1151d84b81ad6dc93e39c4c06"}, - {file = "pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:b3b7d9cfbfdc43c80a16638c6dc2768e3956e73031fca64e8e1a3ae744d1faeb"}, - {file = "pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec83fc6abef04c7f9bec616e2d76ee9a6a4ae2a359b10c21d0f680e24a247ca"}, - {file = "pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6771a2d9f83c4038dfad5970a3eef215940682b2175e32bcc817bdc639019b28"}, - {file = "pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fabcbdb12de6eada8d6e9a759097adb3c15440fafc675b3e94ae5c9cb8d678a0"}, - {file = "pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:80e97ccfaf0aaf67d55de5085b0ed0d994f57747d9d03f2de5cc9847ca737b08"}, - {file = "pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34df1fe8fea5d332484a763702e8b6a54048a9d4fe6ccf41e34a128238e01f52"}, - {file = "pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:421b5595f845842fc093f7250e24ee395f54ca62d494fdde96f43ecf9228ae01"}, - {file = "pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dce8b22663c134583aaad24827863306a933f576c79da450be3984924e2031d1"}, - {file = "pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:300a9c162fea9906cc5c103893ca2602afd84f0ec90d3be36f4cc360125d22e1"}, - {file = "pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e019167628f6e6161ae7ab9fb70f6d076a0bf0d55aa9b20833f86a320c70dd65"}, - {file = "pydantic_core-2.41.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:13ab9cc2de6f9d4ab645a050ae5aee61a2424ac4d3a16ba23d4c2027705e0301"}, - {file = "pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:af2385d3f98243fb733862f806c5bb9122e5fba05b373e3af40e3c82d711cef1"}, - {file = "pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6550617a0c2115be56f90c31a5370261d8ce9dbf051c3ed53b51172dd34da696"}, - {file = "pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc17b6ecf4983d298686014c92ebc955a9f9baf9f57dad4065e7906e7bee6222"}, - {file = "pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:42ae9352cf211f08b04ea110563d6b1e415878eea5b4c70f6bdb17dca3b932d2"}, - {file = "pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e82947de92068b0a21681a13dd2102387197092fbe7defcfb8453e0913866506"}, - {file = "pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e244c37d5471c9acdcd282890c6c4c83747b77238bfa19429b8473586c907656"}, - {file = "pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1e798b4b304a995110d41ec93653e57975620ccb2842ba9420037985e7d7284e"}, - {file = "pydantic_core-2.41.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f1fc716c0eb1663c59699b024428ad5ec2bcc6b928527b8fe28de6cb89f47efb"}, - {file = "pydantic_core-2.41.1.tar.gz", hash = "sha256:1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, ] [package.dependencies] @@ -2333,14 +2339,14 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "termcolor" -version = "3.1.0" +version = "3.2.0" description = "ANSI color formatting for output in terminal" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main", "dev"] files = [ - {file = "termcolor-3.1.0-py3-none-any.whl", hash = "sha256:591dd26b5c2ce03b9e43f391264626557873ce1d379019786f99b0c2bee140aa"}, - {file = "termcolor-3.1.0.tar.gz", hash = "sha256:6a6dd7fbee581909eeec6a756cff1d7f7c376063b14e4a298dc4980309e55970"}, + {file = "termcolor-3.2.0-py3-none-any.whl", hash = "sha256:a10343879eba4da819353c55cb8049b0933890c2ebf9ad5d3ecd2bb32ea96ea6"}, + {file = "termcolor-3.2.0.tar.gz", hash = "sha256:610e6456feec42c4bcd28934a8c87a06c3fa28b01561d46aa09a9881b8622c58"}, ] [package.extras] diff --git a/pyproject.toml b/pyproject.toml index 92a2df07..57b96e11 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,7 @@ documentation = "https://docs.simvue.io" plot = ["plotly (>=6.0.0,<7.0.0)", "matplotlib (>=3.10.0,<4.0.0)"] [project.scripts] -simvue_sender = "simvue.bin.sender:run" +simvue-sender = "simvue.bin.sender:run" [tool.poetry.group.dev.dependencies] pytest = "^8.0.0" diff --git a/tests/cli/test_sender_command.py b/tests/cli/test_sender_command.py new file mode 100644 index 00000000..381418a5 --- /dev/null +++ b/tests/cli/test_sender_command.py @@ -0,0 +1,29 @@ +import tempfile +from typing import Any +import pytest +import click.testing +import os + +from simvue.bin.sender import run +from simvue.run import Run + +from conftest import create_test_run_offline, setup_test_run + +@pytest.mark.cli +def test_sender_command(request, monkeypatch) -> None: + with tempfile.TemporaryDirectory() as tempd: + monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", tempd) + _run = Run(mode="offline") + setup_test_run(_run, tempd, create_objects=True, request=request) + _run.close() + _runner = click.testing.CliRunner() + _result = _runner.invoke( + run, + [ + "-i", + tempd + ], + catch_exceptions=False + ) + assert _result.exit_code == 0, _result.output + diff --git a/tests/conftest.py b/tests/conftest.py index 0e8570b8..ce6355c1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ import contextlib from _pytest import monkeypatch +import numpy import pytest import pytest_mock import typing @@ -156,7 +157,7 @@ def testing_exit(status: int) -> None: with tempfile.TemporaryDirectory() as temp_d: monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) with sv_run.Run("offline") as run: - _test_run_data = setup_test_run(run, True, request) + _test_run_data = setup_test_run(run, temp_d, True, request) yield run, _test_run_data with contextlib.suppress(ObjectNotFoundError): sv_api_obj.Folder(identifier=run._folder.id).delete(recursive=True, delete_runs=True, runs_only=False) @@ -188,7 +189,8 @@ def create_plain_run_offline(request,prevent_script_exit,monkeypatch) -> typing. with tempfile.TemporaryDirectory() as temp_d: monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) with sv_run.Run("offline") as run: - yield run, setup_test_run(run, False, request) + _temporary_directory = pathlib.Path(temp_d) + yield run, setup_test_run(run, _temporary_directory,False, request) clear_out_files() @@ -205,7 +207,7 @@ def testing_exit(status: int) -> None: _folder.delete(recursive=True, runs_only=False, delete_runs=True) -def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.FixtureRequest, created_only: bool=False): +def setup_test_run(run: sv_run.Run, temp_dir: pathlib.Path, create_objects: bool, request: pytest.FixtureRequest, created_only: bool=False): fix_use_id: str = str(uuid.uuid4()).split('-', 1)[0] _test_name: str = request.node.name.replace("[", "_").replace("]", "") TEST_DATA = { @@ -239,6 +241,13 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur _alert_ids = [] if create_objects: + + run.assign_metric_to_grid( + metric_name="grid_metric", + grid_name=f"test_grid_{fix_use_id}", + axes_ticks=[list(range(10)), list(range(10))], + axes_labels=["x", "y"] + ) for i in range(5): run.log_event(f"{TEST_DATA['event_contains']} {i}") @@ -279,11 +288,12 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur for i in range(5): run.log_metrics({"metric_counter": i, "metric_val": i*i - 1}) + run.log_metrics({"grid_metric": i * numpy.identity(10)}) run.update_metadata(TEST_DATA["metadata"]) if create_objects: - TEST_DATA["metrics"] = ("metric_counter", "metric_val") + TEST_DATA["metrics"] = ("metric_counter", "metric_val", "grid_metric") TEST_DATA["run_id"] = run.id TEST_DATA["run_name"] = run.name @@ -293,25 +303,23 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur TEST_DATA["system_metrics_interval"] = run._system_metrics_interval if create_objects: - with tempfile.TemporaryDirectory() as tempd: - with open((test_file := os.path.join(tempd, "test_file.txt")), "w") as out_f: - out_f.write("This is a test file") - run.save_file(test_file, category="input", name="test_file") - TEST_DATA["file_1"] = "test_file" - - with open((test_json := os.path.join(tempd, f"test_attrs_{fix_use_id}.json")), "w") as out_f: - json.dump(TEST_DATA, out_f, indent=2) - run.save_file(test_json, category="output", name="test_attributes") - TEST_DATA["file_2"] = "test_attributes" - - with open((test_script := os.path.join(tempd, "test_script.py")), "w") as out_f: - out_f.write( - "print('Hello World!')" - ) - print(test_script) - assert pathlib.Path(test_script).exists() - run.save_file(test_script, category="code", name="test_code_upload") - TEST_DATA["file_3"] = "test_code_upload" + with open((test_file := os.path.join(temp_dir, "test_file.txt")), "w") as out_f: + out_f.write("This is a test file") + run.save_file(test_file, category="input", name="test_file") + TEST_DATA["file_1"] = "test_file" + + with open((test_json := os.path.join(temp_dir, f"test_attrs_{fix_use_id}.json")), "w") as out_f: + json.dump(TEST_DATA, out_f, indent=2) + run.save_file(test_json, category="output", name="test_attributes") + TEST_DATA["file_2"] = "test_attributes" + + with open((test_script := os.path.join(temp_dir, "test_script.py")), "w") as out_f: + out_f.write( + "print('Hello World!')" + ) + assert pathlib.Path(test_script).exists() + run.save_file(test_script, category="code", name="test_code_upload") + TEST_DATA["file_3"] = "test_code_upload" TEST_DATA["alert_ids"] = _alert_ids From 3b075adb00b0c584e8623a7f2094249c37fc5d8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 1 Dec 2025 09:39:55 +0000 Subject: [PATCH 15/16] =?UTF-8?q?=F0=9F=92=9A=20Include=20CLI=20test=20in?= =?UTF-8?q?=20CI?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/test_client_ubuntu.yml | 2 +- .github/workflows/test_client_ubuntu_nightlies.yml | 4 ++-- .github/workflows/test_client_windows_nightlies.yml | 2 +- .github/workflows/test_multiple_python.yml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test_client_ubuntu.yml b/.github/workflows/test_client_ubuntu.yml index bc8911e2..87f419b7 100644 --- a/.github/workflows/test_client_ubuntu.yml +++ b/.github/workflows/test_client_ubuntu.yml @@ -140,7 +140,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m "offline and not api" -c /dev/null -p no:warnings + -m "offline and not api" -m cli -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest diff --git a/.github/workflows/test_client_ubuntu_nightlies.yml b/.github/workflows/test_client_ubuntu_nightlies.yml index e868da63..55ed6e35 100644 --- a/.github/workflows/test_client_ubuntu_nightlies.yml +++ b/.github/workflows/test_client_ubuntu_nightlies.yml @@ -90,7 +90,7 @@ jobs: python -m pytest -x -m dispatch -c /dev/null -p no:warnings -n 8 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache - online_online: + online_tests: runs-on: ubuntu-latest timeout-minutes: 30 steps: @@ -138,7 +138,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m "offline and not api" -c /dev/null -p no:warnings + -m "offline and not api" -m cli -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest diff --git a/.github/workflows/test_client_windows_nightlies.yml b/.github/workflows/test_client_windows_nightlies.yml index 4b863b56..82032bc5 100644 --- a/.github/workflows/test_client_windows_nightlies.yml +++ b/.github/workflows/test_client_windows_nightlies.yml @@ -139,7 +139,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m 'offline and not unix and not api' -c /dev/null -p no:warnings + -m run -m 'offline and not unix and not api' -m cli -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache shell: pwsh config_tests: diff --git a/.github/workflows/test_multiple_python.yml b/.github/workflows/test_multiple_python.yml index 7a44f92a..1771985f 100644 --- a/.github/workflows/test_multiple_python.yml +++ b/.github/workflows/test_multiple_python.yml @@ -163,7 +163,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m "offline and not api" -c /dev/null -p no:warnings + -m "offline and not api" -m cli -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest From a5c279a3b62cfb0a7deb6024c6ca354273f68879 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 1 Dec 2025 10:43:58 +0000 Subject: [PATCH 16/16] =?UTF-8?q?=F0=9F=A7=AA=20Fix=20failing=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/test_client_macos_nightlies.yml | 2 +- .github/workflows/test_client_ubuntu.yml | 2 +- .github/workflows/test_client_ubuntu_nightlies.yml | 2 +- .github/workflows/test_client_windows_nightlies.yml | 2 +- .github/workflows/test_multiple_python.yml | 2 +- tests/cli/test_sender_command.py | 2 +- tests/functional/test_client.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/test_client_macos_nightlies.yml b/.github/workflows/test_client_macos_nightlies.yml index b046ef0c..cc8d48c4 100644 --- a/.github/workflows/test_client_macos_nightlies.yml +++ b/.github/workflows/test_client_macos_nightlies.yml @@ -135,7 +135,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m "offline and not api" -c /dev/null -p no:warnings + -m "(offline and not api) or cli" -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest diff --git a/.github/workflows/test_client_ubuntu.yml b/.github/workflows/test_client_ubuntu.yml index 87f419b7..f940d4d4 100644 --- a/.github/workflows/test_client_ubuntu.yml +++ b/.github/workflows/test_client_ubuntu.yml @@ -140,7 +140,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m "offline and not api" -m cli -c /dev/null -p no:warnings + -m "(offline and not api) or cli" -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest diff --git a/.github/workflows/test_client_ubuntu_nightlies.yml b/.github/workflows/test_client_ubuntu_nightlies.yml index 55ed6e35..ad9544f3 100644 --- a/.github/workflows/test_client_ubuntu_nightlies.yml +++ b/.github/workflows/test_client_ubuntu_nightlies.yml @@ -138,7 +138,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m "offline and not api" -m cli -c /dev/null -p no:warnings + -m "(offline and not api) or cli" -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest diff --git a/.github/workflows/test_client_windows_nightlies.yml b/.github/workflows/test_client_windows_nightlies.yml index 82032bc5..077fb958 100644 --- a/.github/workflows/test_client_windows_nightlies.yml +++ b/.github/workflows/test_client_windows_nightlies.yml @@ -139,7 +139,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m run -m 'offline and not unix and not api' -m cli -c /dev/null -p no:warnings + -m run -m '(offline and not unix and not api) or cli' -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache shell: pwsh config_tests: diff --git a/.github/workflows/test_multiple_python.yml b/.github/workflows/test_multiple_python.yml index 1771985f..aa1cbff6 100644 --- a/.github/workflows/test_multiple_python.yml +++ b/.github/workflows/test_multiple_python.yml @@ -163,7 +163,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest -x - -m "offline and not api" -m cli -c /dev/null -p no:warnings + -m "(offline and not api) or cli" -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache config_tests: runs-on: ubuntu-latest diff --git a/tests/cli/test_sender_command.py b/tests/cli/test_sender_command.py index 381418a5..84815440 100644 --- a/tests/cli/test_sender_command.py +++ b/tests/cli/test_sender_command.py @@ -14,7 +14,7 @@ def test_sender_command(request, monkeypatch) -> None: with tempfile.TemporaryDirectory() as tempd: monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", tempd) _run = Run(mode="offline") - setup_test_run(_run, tempd, create_objects=True, request=request) + setup_test_run(_run, temp_dir=tempd, create_objects=True, request=request) _run.close() _runner = click.testing.CliRunner() _result = _runner.invoke( diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 1af18b51..feef0655 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -161,7 +161,7 @@ def test_plot_metrics(create_test_run: tuple[sv_run.Run, dict]) -> None: raise AssertionError("Failed to retrieve metrics.") client.plot_metrics( run_ids=[create_test_run[1]["run_id"]], - metric_names=list(create_test_run[1]["metrics"]), + metric_names=[m for m in create_test_run[1]["metrics"] if not m.startswith("grid")], xaxis="time", )